cse.c revision 50397
1/* Common subexpression elimination for GNU compiler.
2   Copyright (C) 1987, 88, 89, 92-7, 1998 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING.  If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA.  */
20
21
22#include "config.h"
23/* stdio.h must precede rtl.h for FFS.  */
24#include "system.h"
25#include <setjmp.h>
26
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "flags.h"
31#include "real.h"
32#include "insn-config.h"
33#include "recog.h"
34#include "expr.h"
35#include "toplev.h"
36#include "output.h"
37
38/* The basic idea of common subexpression elimination is to go
39   through the code, keeping a record of expressions that would
40   have the same value at the current scan point, and replacing
41   expressions encountered with the cheapest equivalent expression.
42
43   It is too complicated to keep track of the different possibilities
44   when control paths merge; so, at each label, we forget all that is
45   known and start fresh.  This can be described as processing each
46   basic block separately.  Note, however, that these are not quite
47   the same as the basic blocks found by a later pass and used for
48   data flow analysis and register packing.  We do not need to start fresh
49   after a conditional jump instruction if there is no label there.
50
51   We use two data structures to record the equivalent expressions:
52   a hash table for most expressions, and several vectors together
53   with "quantity numbers" to record equivalent (pseudo) registers.
54
55   The use of the special data structure for registers is desirable
56   because it is faster.  It is possible because registers references
57   contain a fairly small number, the register number, taken from
58   a contiguously allocated series, and two register references are
59   identical if they have the same number.  General expressions
60   do not have any such thing, so the only way to retrieve the
61   information recorded on an expression other than a register
62   is to keep it in a hash table.
63
64Registers and "quantity numbers":
65
66   At the start of each basic block, all of the (hardware and pseudo)
67   registers used in the function are given distinct quantity
68   numbers to indicate their contents.  During scan, when the code
69   copies one register into another, we copy the quantity number.
70   When a register is loaded in any other way, we allocate a new
71   quantity number to describe the value generated by this operation.
72   `reg_qty' records what quantity a register is currently thought
73   of as containing.
74
75   All real quantity numbers are greater than or equal to `max_reg'.
76   If register N has not been assigned a quantity, reg_qty[N] will equal N.
77
78   Quantity numbers below `max_reg' do not exist and none of the `qty_...'
79   variables should be referenced with an index below `max_reg'.
80
81   We also maintain a bidirectional chain of registers for each
82   quantity number.  `qty_first_reg', `qty_last_reg',
83   `reg_next_eqv' and `reg_prev_eqv' hold these chains.
84
85   The first register in a chain is the one whose lifespan is least local.
86   Among equals, it is the one that was seen first.
87   We replace any equivalent register with that one.
88
89   If two registers have the same quantity number, it must be true that
90   REG expressions with `qty_mode' must be in the hash table for both
91   registers and must be in the same class.
92
93   The converse is not true.  Since hard registers may be referenced in
94   any mode, two REG expressions might be equivalent in the hash table
95   but not have the same quantity number if the quantity number of one
96   of the registers is not the same mode as those expressions.
97
98Constants and quantity numbers
99
100   When a quantity has a known constant value, that value is stored
101   in the appropriate element of qty_const.  This is in addition to
102   putting the constant in the hash table as is usual for non-regs.
103
104   Whether a reg or a constant is preferred is determined by the configuration
105   macro CONST_COSTS and will often depend on the constant value.  In any
106   event, expressions containing constants can be simplified, by fold_rtx.
107
108   When a quantity has a known nearly constant value (such as an address
109   of a stack slot), that value is stored in the appropriate element
110   of qty_const.
111
112   Integer constants don't have a machine mode.  However, cse
113   determines the intended machine mode from the destination
114   of the instruction that moves the constant.  The machine mode
115   is recorded in the hash table along with the actual RTL
116   constant expression so that different modes are kept separate.
117
118Other expressions:
119
120   To record known equivalences among expressions in general
121   we use a hash table called `table'.  It has a fixed number of buckets
122   that contain chains of `struct table_elt' elements for expressions.
123   These chains connect the elements whose expressions have the same
124   hash codes.
125
126   Other chains through the same elements connect the elements which
127   currently have equivalent values.
128
129   Register references in an expression are canonicalized before hashing
130   the expression.  This is done using `reg_qty' and `qty_first_reg'.
131   The hash code of a register reference is computed using the quantity
132   number, not the register number.
133
134   When the value of an expression changes, it is necessary to remove from the
135   hash table not just that expression but all expressions whose values
136   could be different as a result.
137
138     1. If the value changing is in memory, except in special cases
139     ANYTHING referring to memory could be changed.  That is because
140     nobody knows where a pointer does not point.
141     The function `invalidate_memory' removes what is necessary.
142
143     The special cases are when the address is constant or is
144     a constant plus a fixed register such as the frame pointer
145     or a static chain pointer.  When such addresses are stored in,
146     we can tell exactly which other such addresses must be invalidated
147     due to overlap.  `invalidate' does this.
148     All expressions that refer to non-constant
149     memory addresses are also invalidated.  `invalidate_memory' does this.
150
151     2. If the value changing is a register, all expressions
152     containing references to that register, and only those,
153     must be removed.
154
155   Because searching the entire hash table for expressions that contain
156   a register is very slow, we try to figure out when it isn't necessary.
157   Precisely, this is necessary only when expressions have been
158   entered in the hash table using this register, and then the value has
159   changed, and then another expression wants to be added to refer to
160   the register's new value.  This sequence of circumstances is rare
161   within any one basic block.
162
163   The vectors `reg_tick' and `reg_in_table' are used to detect this case.
164   reg_tick[i] is incremented whenever a value is stored in register i.
165   reg_in_table[i] holds -1 if no references to register i have been
166   entered in the table; otherwise, it contains the value reg_tick[i] had
167   when the references were entered.  If we want to enter a reference
168   and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
169   Until we want to enter a new entry, the mere fact that the two vectors
170   don't match makes the entries be ignored if anyone tries to match them.
171
172   Registers themselves are entered in the hash table as well as in
173   the equivalent-register chains.  However, the vectors `reg_tick'
174   and `reg_in_table' do not apply to expressions which are simple
175   register references.  These expressions are removed from the table
176   immediately when they become invalid, and this can be done even if
177   we do not immediately search for all the expressions that refer to
178   the register.
179
180   A CLOBBER rtx in an instruction invalidates its operand for further
181   reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
182   invalidates everything that resides in memory.
183
184Related expressions:
185
186   Constant expressions that differ only by an additive integer
187   are called related.  When a constant expression is put in
188   the table, the related expression with no constant term
189   is also entered.  These are made to point at each other
190   so that it is possible to find out if there exists any
191   register equivalent to an expression related to a given expression.  */
192
193/* One plus largest register number used in this function.  */
194
195static int max_reg;
196
197/* One plus largest instruction UID used in this function at time of
198   cse_main call.  */
199
200static int max_insn_uid;
201
202/* Length of vectors indexed by quantity number.
203   We know in advance we will not need a quantity number this big.  */
204
205static int max_qty;
206
207/* Next quantity number to be allocated.
208   This is 1 + the largest number needed so far.  */
209
210static int next_qty;
211
212/* Indexed by quantity number, gives the first (or last) register
213   in the chain of registers that currently contain this quantity.  */
214
215static int *qty_first_reg;
216static int *qty_last_reg;
217
218/* Index by quantity number, gives the mode of the quantity.  */
219
220static enum machine_mode *qty_mode;
221
222/* Indexed by quantity number, gives the rtx of the constant value of the
223   quantity, or zero if it does not have a known value.
224   A sum of the frame pointer (or arg pointer) plus a constant
225   can also be entered here.  */
226
227static rtx *qty_const;
228
229/* Indexed by qty number, gives the insn that stored the constant value
230   recorded in `qty_const'.  */
231
232static rtx *qty_const_insn;
233
234/* The next three variables are used to track when a comparison between a
235   quantity and some constant or register has been passed.  In that case, we
236   know the results of the comparison in case we see it again.  These variables
237   record a comparison that is known to be true.  */
238
239/* Indexed by qty number, gives the rtx code of a comparison with a known
240   result involving this quantity.  If none, it is UNKNOWN.  */
241static enum rtx_code *qty_comparison_code;
242
243/* Indexed by qty number, gives the constant being compared against in a
244   comparison of known result.  If no such comparison, it is undefined.
245   If the comparison is not with a constant, it is zero.  */
246
247static rtx *qty_comparison_const;
248
249/* Indexed by qty number, gives the quantity being compared against in a
250   comparison of known result.  If no such comparison, if it undefined.
251   If the comparison is not with a register, it is -1.  */
252
253static int *qty_comparison_qty;
254
255#ifdef HAVE_cc0
256/* For machines that have a CC0, we do not record its value in the hash
257   table since its use is guaranteed to be the insn immediately following
258   its definition and any other insn is presumed to invalidate it.
259
260   Instead, we store below the value last assigned to CC0.  If it should
261   happen to be a constant, it is stored in preference to the actual
262   assigned value.  In case it is a constant, we store the mode in which
263   the constant should be interpreted.  */
264
265static rtx prev_insn_cc0;
266static enum machine_mode prev_insn_cc0_mode;
267#endif
268
269/* Previous actual insn.  0 if at first insn of basic block.  */
270
271static rtx prev_insn;
272
273/* Insn being scanned.  */
274
275static rtx this_insn;
276
277/* Index by register number, gives the quantity number
278   of the register's current contents.  */
279
280static int *reg_qty;
281
282/* Index by register number, gives the number of the next (or
283   previous) register in the chain of registers sharing the same
284   value.
285
286   Or -1 if this register is at the end of the chain.
287
288   If reg_qty[N] == N, reg_next_eqv[N] is undefined.  */
289
290static int *reg_next_eqv;
291static int *reg_prev_eqv;
292
293/* Index by register number, gives the number of times
294   that register has been altered in the current basic block.  */
295
296static int *reg_tick;
297
298/* Index by register number, gives the reg_tick value at which
299   rtx's containing this register are valid in the hash table.
300   If this does not equal the current reg_tick value, such expressions
301   existing in the hash table are invalid.
302   If this is -1, no expressions containing this register have been
303   entered in the table.  */
304
305static int *reg_in_table;
306
307/* A HARD_REG_SET containing all the hard registers for which there is
308   currently a REG expression in the hash table.  Note the difference
309   from the above variables, which indicate if the REG is mentioned in some
310   expression in the table.  */
311
312static HARD_REG_SET hard_regs_in_table;
313
314/* A HARD_REG_SET containing all the hard registers that are invalidated
315   by a CALL_INSN.  */
316
317static HARD_REG_SET regs_invalidated_by_call;
318
319/* Two vectors of ints:
320   one containing max_reg -1's; the other max_reg + 500 (an approximation
321   for max_qty) elements where element i contains i.
322   These are used to initialize various other vectors fast.  */
323
324static int *all_minus_one;
325static int *consec_ints;
326
327/* CUID of insn that starts the basic block currently being cse-processed.  */
328
329static int cse_basic_block_start;
330
331/* CUID of insn that ends the basic block currently being cse-processed.  */
332
333static int cse_basic_block_end;
334
335/* Vector mapping INSN_UIDs to cuids.
336   The cuids are like uids but increase monotonically always.
337   We use them to see whether a reg is used outside a given basic block.  */
338
339static int *uid_cuid;
340
341/* Highest UID in UID_CUID.  */
342static int max_uid;
343
344/* Get the cuid of an insn.  */
345
346#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
347
348/* Nonzero if cse has altered conditional jump insns
349   in such a way that jump optimization should be redone.  */
350
351static int cse_jumps_altered;
352
353/* Nonzero if we put a LABEL_REF into the hash table.  Since we may have put
354   it into an INSN without a REG_LABEL, we have to rerun jump after CSE
355   to put in the note.  */
356static int recorded_label_ref;
357
358/* canon_hash stores 1 in do_not_record
359   if it notices a reference to CC0, PC, or some other volatile
360   subexpression.  */
361
362static int do_not_record;
363
364#ifdef LOAD_EXTEND_OP
365
366/* Scratch rtl used when looking for load-extended copy of a MEM.  */
367static rtx memory_extend_rtx;
368#endif
369
370/* canon_hash stores 1 in hash_arg_in_memory
371   if it notices a reference to memory within the expression being hashed.  */
372
373static int hash_arg_in_memory;
374
375/* canon_hash stores 1 in hash_arg_in_struct
376   if it notices a reference to memory that's part of a structure.  */
377
378static int hash_arg_in_struct;
379
380/* The hash table contains buckets which are chains of `struct table_elt's,
381   each recording one expression's information.
382   That expression is in the `exp' field.
383
384   Those elements with the same hash code are chained in both directions
385   through the `next_same_hash' and `prev_same_hash' fields.
386
387   Each set of expressions with equivalent values
388   are on a two-way chain through the `next_same_value'
389   and `prev_same_value' fields, and all point with
390   the `first_same_value' field at the first element in
391   that chain.  The chain is in order of increasing cost.
392   Each element's cost value is in its `cost' field.
393
394   The `in_memory' field is nonzero for elements that
395   involve any reference to memory.  These elements are removed
396   whenever a write is done to an unidentified location in memory.
397   To be safe, we assume that a memory address is unidentified unless
398   the address is either a symbol constant or a constant plus
399   the frame pointer or argument pointer.
400
401   The `in_struct' field is nonzero for elements that
402   involve any reference to memory inside a structure or array.
403
404   The `related_value' field is used to connect related expressions
405   (that differ by adding an integer).
406   The related expressions are chained in a circular fashion.
407   `related_value' is zero for expressions for which this
408   chain is not useful.
409
410   The `cost' field stores the cost of this element's expression.
411
412   The `is_const' flag is set if the element is a constant (including
413   a fixed address).
414
415   The `flag' field is used as a temporary during some search routines.
416
417   The `mode' field is usually the same as GET_MODE (`exp'), but
418   if `exp' is a CONST_INT and has no machine mode then the `mode'
419   field is the mode it was being used as.  Each constant is
420   recorded separately for each mode it is used with.  */
421
422
423struct table_elt
424{
425  rtx exp;
426  struct table_elt *next_same_hash;
427  struct table_elt *prev_same_hash;
428  struct table_elt *next_same_value;
429  struct table_elt *prev_same_value;
430  struct table_elt *first_same_value;
431  struct table_elt *related_value;
432  int cost;
433  enum machine_mode mode;
434  char in_memory;
435  char in_struct;
436  char is_const;
437  char flag;
438};
439
440/* We don't want a lot of buckets, because we rarely have very many
441   things stored in the hash table, and a lot of buckets slows
442   down a lot of loops that happen frequently.  */
443#define NBUCKETS 31
444
445/* Compute hash code of X in mode M.  Special-case case where X is a pseudo
446   register (hard registers may require `do_not_record' to be set).  */
447
448#define HASH(X, M)	\
449 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
450  ? (((unsigned) REG << 7) + (unsigned) reg_qty[REGNO (X)]) % NBUCKETS	\
451  : canon_hash (X, M) % NBUCKETS)
452
453/* Determine whether register number N is considered a fixed register for CSE.
454   It is desirable to replace other regs with fixed regs, to reduce need for
455   non-fixed hard regs.
456   A reg wins if it is either the frame pointer or designated as fixed,
457   but not if it is an overlapping register.  */
458#ifdef OVERLAPPING_REGNO_P
459#define FIXED_REGNO_P(N)  \
460  (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
461    || fixed_regs[N] || global_regs[N])	  \
462   && ! OVERLAPPING_REGNO_P ((N)))
463#else
464#define FIXED_REGNO_P(N)  \
465  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
466   || fixed_regs[N] || global_regs[N])
467#endif
468
469/* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
470   hard registers and pointers into the frame are the cheapest with a cost
471   of 0.  Next come pseudos with a cost of one and other hard registers with
472   a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
473
474#define CHEAP_REGNO(N) \
475  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM 	\
476   || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM	     	\
477   || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) 	\
478   || ((N) < FIRST_PSEUDO_REGISTER					\
479       && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
480
481/* A register is cheap if it is a user variable assigned to the register
482   or if its register number always corresponds to a cheap register.  */
483
484#define CHEAP_REG(N) \
485  ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER)	\
486   || CHEAP_REGNO (REGNO (N)))
487
488#define COST(X)								\
489  (GET_CODE (X) == REG							\
490   ? (CHEAP_REG (X) ? 0							\
491      : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1				\
492      : 2)								\
493   : notreg_cost(X))
494
495/* Determine if the quantity number for register X represents a valid index
496   into the `qty_...' variables.  */
497
498#define REGNO_QTY_VALID_P(N) (reg_qty[N] != (N))
499
500#ifdef ADDRESS_COST
501/* The ADDRESS_COST macro does not deal with ADDRESSOF nodes.  But,
502   during CSE, such nodes are present.  Using an ADDRESSOF node which
503   refers to the address of a REG is a good thing because we can then
504   turn (MEM (ADDRESSSOF (REG))) into just plain REG.  */
505#define CSE_ADDRESS_COST(RTX)					\
506  ((GET_CODE (RTX) == ADDRESSOF && REG_P (XEXP ((RTX), 0)))	\
507   ? -1 : ADDRESS_COST(RTX))
508#endif
509
510static struct table_elt *table[NBUCKETS];
511
512/* Chain of `struct table_elt's made so far for this function
513   but currently removed from the table.  */
514
515static struct table_elt *free_element_chain;
516
517/* Number of `struct table_elt' structures made so far for this function.  */
518
519static int n_elements_made;
520
521/* Maximum value `n_elements_made' has had so far in this compilation
522   for functions previously processed.  */
523
524static int max_elements_made;
525
526/* Surviving equivalence class when two equivalence classes are merged
527   by recording the effects of a jump in the last insn.  Zero if the
528   last insn was not a conditional jump.  */
529
530static struct table_elt *last_jump_equiv_class;
531
532/* Set to the cost of a constant pool reference if one was found for a
533   symbolic constant.  If this was found, it means we should try to
534   convert constants into constant pool entries if they don't fit in
535   the insn.  */
536
537static int constant_pool_entries_cost;
538
539/* Define maximum length of a branch path.  */
540
541#define PATHLENGTH	10
542
543/* This data describes a block that will be processed by cse_basic_block.  */
544
545struct cse_basic_block_data {
546  /* Lowest CUID value of insns in block.  */
547  int low_cuid;
548  /* Highest CUID value of insns in block.  */
549  int high_cuid;
550  /* Total number of SETs in block.  */
551  int nsets;
552  /* Last insn in the block.  */
553  rtx last;
554  /* Size of current branch path, if any.  */
555  int path_size;
556  /* Current branch path, indicating which branches will be taken.  */
557  struct branch_path {
558    /* The branch insn.  */
559    rtx branch;
560    /* Whether it should be taken or not.  AROUND is the same as taken
561       except that it is used when the destination label is not preceded
562       by a BARRIER.  */
563    enum taken {TAKEN, NOT_TAKEN, AROUND} status;
564  } path[PATHLENGTH];
565};
566
567/* Nonzero if X has the form (PLUS frame-pointer integer).  We check for
568   virtual regs here because the simplify_*_operation routines are called
569   by integrate.c, which is called before virtual register instantiation.  */
570
571#define FIXED_BASE_PLUS_P(X)					\
572  ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx	\
573   || (X) == arg_pointer_rtx					\
574   || (X) == virtual_stack_vars_rtx				\
575   || (X) == virtual_incoming_args_rtx				\
576   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
577       && (XEXP (X, 0) == frame_pointer_rtx			\
578	   || XEXP (X, 0) == hard_frame_pointer_rtx		\
579	   || XEXP (X, 0) == arg_pointer_rtx			\
580	   || XEXP (X, 0) == virtual_stack_vars_rtx		\
581	   || XEXP (X, 0) == virtual_incoming_args_rtx))	\
582   || GET_CODE (X) == ADDRESSOF)
583
584/* Similar, but also allows reference to the stack pointer.
585
586   This used to include FIXED_BASE_PLUS_P, however, we can't assume that
587   arg_pointer_rtx by itself is nonzero, because on at least one machine,
588   the i960, the arg pointer is zero when it is unused.  */
589
590#define NONZERO_BASE_PLUS_P(X)					\
591  ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx	\
592   || (X) == virtual_stack_vars_rtx				\
593   || (X) == virtual_incoming_args_rtx				\
594   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
595       && (XEXP (X, 0) == frame_pointer_rtx			\
596	   || XEXP (X, 0) == hard_frame_pointer_rtx		\
597	   || XEXP (X, 0) == arg_pointer_rtx			\
598	   || XEXP (X, 0) == virtual_stack_vars_rtx		\
599	   || XEXP (X, 0) == virtual_incoming_args_rtx))	\
600   || (X) == stack_pointer_rtx					\
601   || (X) == virtual_stack_dynamic_rtx				\
602   || (X) == virtual_outgoing_args_rtx				\
603   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
604       && (XEXP (X, 0) == stack_pointer_rtx			\
605	   || XEXP (X, 0) == virtual_stack_dynamic_rtx		\
606	   || XEXP (X, 0) == virtual_outgoing_args_rtx))	\
607   || GET_CODE (X) == ADDRESSOF)
608
609static int notreg_cost		PROTO((rtx));
610static void new_basic_block	PROTO((void));
611static void make_new_qty	PROTO((int));
612static void make_regs_eqv	PROTO((int, int));
613static void delete_reg_equiv	PROTO((int));
614static int mention_regs		PROTO((rtx));
615static int insert_regs		PROTO((rtx, struct table_elt *, int));
616static void free_element	PROTO((struct table_elt *));
617static void remove_from_table	PROTO((struct table_elt *, unsigned));
618static struct table_elt *get_element PROTO((void));
619static struct table_elt *lookup	PROTO((rtx, unsigned, enum machine_mode)),
620       *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
621static rtx lookup_as_function	PROTO((rtx, enum rtx_code));
622static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
623				       enum machine_mode));
624static void merge_equiv_classes PROTO((struct table_elt *,
625				       struct table_elt *));
626static void invalidate		PROTO((rtx, enum machine_mode));
627static int cse_rtx_varies_p	PROTO((rtx));
628static void remove_invalid_refs	PROTO((int));
629static void rehash_using_reg	PROTO((rtx));
630static void invalidate_memory	PROTO((void));
631static void invalidate_for_call	PROTO((void));
632static rtx use_related_value	PROTO((rtx, struct table_elt *));
633static unsigned canon_hash	PROTO((rtx, enum machine_mode));
634static unsigned safe_hash	PROTO((rtx, enum machine_mode));
635static int exp_equiv_p		PROTO((rtx, rtx, int, int));
636static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
637						     HOST_WIDE_INT *,
638						     HOST_WIDE_INT *));
639static int refers_to_p		PROTO((rtx, rtx));
640static rtx canon_reg		PROTO((rtx, rtx));
641static void find_best_addr	PROTO((rtx, rtx *));
642static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
643						 enum machine_mode *,
644						 enum machine_mode *));
645static rtx cse_gen_binary	PROTO((enum rtx_code, enum machine_mode,
646				       rtx, rtx));
647static rtx simplify_plus_minus	PROTO((enum rtx_code, enum machine_mode,
648				       rtx, rtx));
649static rtx fold_rtx		PROTO((rtx, rtx));
650static rtx equiv_constant	PROTO((rtx));
651static void record_jump_equiv	PROTO((rtx, int));
652static void record_jump_cond	PROTO((enum rtx_code, enum machine_mode,
653				       rtx, rtx, int));
654static void cse_insn		PROTO((rtx, rtx));
655static int note_mem_written	PROTO((rtx));
656static void invalidate_from_clobbers PROTO((rtx));
657static rtx cse_process_notes	PROTO((rtx, rtx));
658static void cse_around_loop	PROTO((rtx));
659static void invalidate_skipped_set PROTO((rtx, rtx));
660static void invalidate_skipped_block PROTO((rtx));
661static void cse_check_loop_start PROTO((rtx, rtx));
662static void cse_set_around_loop	PROTO((rtx, rtx, rtx));
663static rtx cse_basic_block	PROTO((rtx, rtx, struct branch_path *, int));
664static void count_reg_usage	PROTO((rtx, int *, rtx, int));
665
666extern int rtx_equal_function_value_matters;
667
668/* Return an estimate of the cost of computing rtx X.
669   One use is in cse, to decide which expression to keep in the hash table.
670   Another is in rtl generation, to pick the cheapest way to multiply.
671   Other uses like the latter are expected in the future.  */
672
673/* Internal function, to compute cost when X is not a register; called
674   from COST macro to keep it simple.  */
675
676static int
677notreg_cost (x)
678     rtx x;
679{
680  return ((GET_CODE (x) == SUBREG
681	   && GET_CODE (SUBREG_REG (x)) == REG
682	   && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
683	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
684	   && (GET_MODE_SIZE (GET_MODE (x))
685	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
686	   && subreg_lowpart_p (x)
687	   && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
688				     GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
689	  ? (CHEAP_REG (SUBREG_REG (x)) ? 0
690	     : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
691		: 2))
692	  : rtx_cost (x, SET) * 2);
693}
694
695/* Return the right cost to give to an operation
696   to make the cost of the corresponding register-to-register instruction
697   N times that of a fast register-to-register instruction.  */
698
699#define COSTS_N_INSNS(N) ((N) * 4 - 2)
700
701int
702rtx_cost (x, outer_code)
703     rtx x;
704     enum rtx_code outer_code;
705{
706  register int i, j;
707  register enum rtx_code code;
708  register char *fmt;
709  register int total;
710
711  if (x == 0)
712    return 0;
713
714  /* Compute the default costs of certain things.
715     Note that RTX_COSTS can override the defaults.  */
716
717  code = GET_CODE (x);
718  switch (code)
719    {
720    case MULT:
721      /* Count multiplication by 2**n as a shift,
722	 because if we are considering it, we would output it as a shift.  */
723      if (GET_CODE (XEXP (x, 1)) == CONST_INT
724	  && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
725	total = 2;
726      else
727	total = COSTS_N_INSNS (5);
728      break;
729    case DIV:
730    case UDIV:
731    case MOD:
732    case UMOD:
733      total = COSTS_N_INSNS (7);
734      break;
735    case USE:
736      /* Used in loop.c and combine.c as a marker.  */
737      total = 0;
738      break;
739    case ASM_OPERANDS:
740      /* We don't want these to be used in substitutions because
741	 we have no way of validating the resulting insn.  So assign
742	 anything containing an ASM_OPERANDS a very high cost.  */
743      total = 1000;
744      break;
745    default:
746      total = 2;
747    }
748
749  switch (code)
750    {
751    case REG:
752      return ! CHEAP_REG (x);
753
754    case SUBREG:
755      /* If we can't tie these modes, make this expensive.  The larger
756	 the mode, the more expensive it is.  */
757      if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
758	return COSTS_N_INSNS (2
759			      + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
760      return 2;
761#ifdef RTX_COSTS
762      RTX_COSTS (x, code, outer_code);
763#endif
764#ifdef CONST_COSTS
765      CONST_COSTS (x, code, outer_code);
766#endif
767
768    default:
769#ifdef DEFAULT_RTX_COSTS
770      DEFAULT_RTX_COSTS(x, code, outer_code);
771#endif
772      break;
773    }
774
775  /* Sum the costs of the sub-rtx's, plus cost of this operation,
776     which is already in total.  */
777
778  fmt = GET_RTX_FORMAT (code);
779  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
780    if (fmt[i] == 'e')
781      total += rtx_cost (XEXP (x, i), code);
782    else if (fmt[i] == 'E')
783      for (j = 0; j < XVECLEN (x, i); j++)
784	total += rtx_cost (XVECEXP (x, i, j), code);
785
786  return total;
787}
788
789/* Clear the hash table and initialize each register with its own quantity,
790   for a new basic block.  */
791
792static void
793new_basic_block ()
794{
795  register int i;
796
797  next_qty = max_reg;
798
799  bzero ((char *) reg_tick, max_reg * sizeof (int));
800
801  bcopy ((char *) all_minus_one, (char *) reg_in_table,
802	 max_reg * sizeof (int));
803  bcopy ((char *) consec_ints, (char *) reg_qty, max_reg * sizeof (int));
804  CLEAR_HARD_REG_SET (hard_regs_in_table);
805
806  /* The per-quantity values used to be initialized here, but it is
807     much faster to initialize each as it is made in `make_new_qty'.  */
808
809  for (i = 0; i < NBUCKETS; i++)
810    {
811      register struct table_elt *this, *next;
812      for (this = table[i]; this; this = next)
813	{
814	  next = this->next_same_hash;
815	  free_element (this);
816	}
817    }
818
819  bzero ((char *) table, sizeof table);
820
821  prev_insn = 0;
822
823#ifdef HAVE_cc0
824  prev_insn_cc0 = 0;
825#endif
826}
827
828/* Say that register REG contains a quantity not in any register before
829   and initialize that quantity.  */
830
831static void
832make_new_qty (reg)
833     register int reg;
834{
835  register int q;
836
837  if (next_qty >= max_qty)
838    abort ();
839
840  q = reg_qty[reg] = next_qty++;
841  qty_first_reg[q] = reg;
842  qty_last_reg[q] = reg;
843  qty_const[q] = qty_const_insn[q] = 0;
844  qty_comparison_code[q] = UNKNOWN;
845
846  reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
847}
848
849/* Make reg NEW equivalent to reg OLD.
850   OLD is not changing; NEW is.  */
851
852static void
853make_regs_eqv (new, old)
854     register int new, old;
855{
856  register int lastr, firstr;
857  register int q = reg_qty[old];
858
859  /* Nothing should become eqv until it has a "non-invalid" qty number.  */
860  if (! REGNO_QTY_VALID_P (old))
861    abort ();
862
863  reg_qty[new] = q;
864  firstr = qty_first_reg[q];
865  lastr = qty_last_reg[q];
866
867  /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
868     hard regs.  Among pseudos, if NEW will live longer than any other reg
869     of the same qty, and that is beyond the current basic block,
870     make it the new canonical replacement for this qty.  */
871  if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
872      /* Certain fixed registers might be of the class NO_REGS.  This means
873	 that not only can they not be allocated by the compiler, but
874	 they cannot be used in substitutions or canonicalizations
875	 either.  */
876      && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
877      && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
878	  || (new >= FIRST_PSEUDO_REGISTER
879	      && (firstr < FIRST_PSEUDO_REGISTER
880		  || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
881		       || (uid_cuid[REGNO_FIRST_UID (new)]
882			   < cse_basic_block_start))
883		      && (uid_cuid[REGNO_LAST_UID (new)]
884			  > uid_cuid[REGNO_LAST_UID (firstr)]))))))
885    {
886      reg_prev_eqv[firstr] = new;
887      reg_next_eqv[new] = firstr;
888      reg_prev_eqv[new] = -1;
889      qty_first_reg[q] = new;
890    }
891  else
892    {
893      /* If NEW is a hard reg (known to be non-fixed), insert at end.
894	 Otherwise, insert before any non-fixed hard regs that are at the
895	 end.  Registers of class NO_REGS cannot be used as an
896	 equivalent for anything.  */
897      while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
898	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
899	     && new >= FIRST_PSEUDO_REGISTER)
900	lastr = reg_prev_eqv[lastr];
901      reg_next_eqv[new] = reg_next_eqv[lastr];
902      if (reg_next_eqv[lastr] >= 0)
903	reg_prev_eqv[reg_next_eqv[lastr]] = new;
904      else
905	qty_last_reg[q] = new;
906      reg_next_eqv[lastr] = new;
907      reg_prev_eqv[new] = lastr;
908    }
909}
910
911/* Remove REG from its equivalence class.  */
912
913static void
914delete_reg_equiv (reg)
915     register int reg;
916{
917  register int q = reg_qty[reg];
918  register int p, n;
919
920  /* If invalid, do nothing.  */
921  if (q == reg)
922    return;
923
924  p = reg_prev_eqv[reg];
925  n = reg_next_eqv[reg];
926
927  if (n != -1)
928    reg_prev_eqv[n] = p;
929  else
930    qty_last_reg[q] = p;
931  if (p != -1)
932    reg_next_eqv[p] = n;
933  else
934    qty_first_reg[q] = n;
935
936  reg_qty[reg] = reg;
937}
938
939/* Remove any invalid expressions from the hash table
940   that refer to any of the registers contained in expression X.
941
942   Make sure that newly inserted references to those registers
943   as subexpressions will be considered valid.
944
945   mention_regs is not called when a register itself
946   is being stored in the table.
947
948   Return 1 if we have done something that may have changed the hash code
949   of X.  */
950
951static int
952mention_regs (x)
953     rtx x;
954{
955  register enum rtx_code code;
956  register int i, j;
957  register char *fmt;
958  register int changed = 0;
959
960  if (x == 0)
961    return 0;
962
963  code = GET_CODE (x);
964  if (code == REG)
965    {
966      register int regno = REGNO (x);
967      register int endregno
968	= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
969		   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
970      int i;
971
972      for (i = regno; i < endregno; i++)
973	{
974	  if (reg_in_table[i] >= 0 && reg_in_table[i] != reg_tick[i])
975	    remove_invalid_refs (i);
976
977	  reg_in_table[i] = reg_tick[i];
978	}
979
980      return 0;
981    }
982
983  /* If X is a comparison or a COMPARE and either operand is a register
984     that does not have a quantity, give it one.  This is so that a later
985     call to record_jump_equiv won't cause X to be assigned a different
986     hash code and not found in the table after that call.
987
988     It is not necessary to do this here, since rehash_using_reg can
989     fix up the table later, but doing this here eliminates the need to
990     call that expensive function in the most common case where the only
991     use of the register is in the comparison.  */
992
993  if (code == COMPARE || GET_RTX_CLASS (code) == '<')
994    {
995      if (GET_CODE (XEXP (x, 0)) == REG
996	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
997	if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
998	  {
999	    rehash_using_reg (XEXP (x, 0));
1000	    changed = 1;
1001	  }
1002
1003      if (GET_CODE (XEXP (x, 1)) == REG
1004	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1005	if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
1006	  {
1007	    rehash_using_reg (XEXP (x, 1));
1008	    changed = 1;
1009	  }
1010    }
1011
1012  fmt = GET_RTX_FORMAT (code);
1013  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1014    if (fmt[i] == 'e')
1015      changed |= mention_regs (XEXP (x, i));
1016    else if (fmt[i] == 'E')
1017      for (j = 0; j < XVECLEN (x, i); j++)
1018	changed |= mention_regs (XVECEXP (x, i, j));
1019
1020  return changed;
1021}
1022
1023/* Update the register quantities for inserting X into the hash table
1024   with a value equivalent to CLASSP.
1025   (If the class does not contain a REG, it is irrelevant.)
1026   If MODIFIED is nonzero, X is a destination; it is being modified.
1027   Note that delete_reg_equiv should be called on a register
1028   before insert_regs is done on that register with MODIFIED != 0.
1029
1030   Nonzero value means that elements of reg_qty have changed
1031   so X's hash code may be different.  */
1032
1033static int
1034insert_regs (x, classp, modified)
1035     rtx x;
1036     struct table_elt *classp;
1037     int modified;
1038{
1039  if (GET_CODE (x) == REG)
1040    {
1041      register int regno = REGNO (x);
1042
1043      /* If REGNO is in the equivalence table already but is of the
1044	 wrong mode for that equivalence, don't do anything here.  */
1045
1046      if (REGNO_QTY_VALID_P (regno)
1047	  && qty_mode[reg_qty[regno]] != GET_MODE (x))
1048	return 0;
1049
1050      if (modified || ! REGNO_QTY_VALID_P (regno))
1051	{
1052	  if (classp)
1053	    for (classp = classp->first_same_value;
1054		 classp != 0;
1055		 classp = classp->next_same_value)
1056	      if (GET_CODE (classp->exp) == REG
1057		  && GET_MODE (classp->exp) == GET_MODE (x))
1058		{
1059		  make_regs_eqv (regno, REGNO (classp->exp));
1060		  return 1;
1061		}
1062
1063	  make_new_qty (regno);
1064	  qty_mode[reg_qty[regno]] = GET_MODE (x);
1065	  return 1;
1066	}
1067
1068      return 0;
1069    }
1070
1071  /* If X is a SUBREG, we will likely be inserting the inner register in the
1072     table.  If that register doesn't have an assigned quantity number at
1073     this point but does later, the insertion that we will be doing now will
1074     not be accessible because its hash code will have changed.  So assign
1075     a quantity number now.  */
1076
1077  else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1078	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1079    {
1080      insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1081      mention_regs (SUBREG_REG (x));
1082      return 1;
1083    }
1084  else
1085    return mention_regs (x);
1086}
1087
1088/* Look in or update the hash table.  */
1089
1090/* Put the element ELT on the list of free elements.  */
1091
1092static void
1093free_element (elt)
1094     struct table_elt *elt;
1095{
1096  elt->next_same_hash = free_element_chain;
1097  free_element_chain = elt;
1098}
1099
1100/* Return an element that is free for use.  */
1101
1102static struct table_elt *
1103get_element ()
1104{
1105  struct table_elt *elt = free_element_chain;
1106  if (elt)
1107    {
1108      free_element_chain = elt->next_same_hash;
1109      return elt;
1110    }
1111  n_elements_made++;
1112  return (struct table_elt *) oballoc (sizeof (struct table_elt));
1113}
1114
1115/* Remove table element ELT from use in the table.
1116   HASH is its hash code, made using the HASH macro.
1117   It's an argument because often that is known in advance
1118   and we save much time not recomputing it.  */
1119
1120static void
1121remove_from_table (elt, hash)
1122     register struct table_elt *elt;
1123     unsigned hash;
1124{
1125  if (elt == 0)
1126    return;
1127
1128  /* Mark this element as removed.  See cse_insn.  */
1129  elt->first_same_value = 0;
1130
1131  /* Remove the table element from its equivalence class.  */
1132
1133  {
1134    register struct table_elt *prev = elt->prev_same_value;
1135    register struct table_elt *next = elt->next_same_value;
1136
1137    if (next) next->prev_same_value = prev;
1138
1139    if (prev)
1140      prev->next_same_value = next;
1141    else
1142      {
1143	register struct table_elt *newfirst = next;
1144	while (next)
1145	  {
1146	    next->first_same_value = newfirst;
1147	    next = next->next_same_value;
1148	  }
1149      }
1150  }
1151
1152  /* Remove the table element from its hash bucket.  */
1153
1154  {
1155    register struct table_elt *prev = elt->prev_same_hash;
1156    register struct table_elt *next = elt->next_same_hash;
1157
1158    if (next) next->prev_same_hash = prev;
1159
1160    if (prev)
1161      prev->next_same_hash = next;
1162    else if (table[hash] == elt)
1163      table[hash] = next;
1164    else
1165      {
1166	/* This entry is not in the proper hash bucket.  This can happen
1167	   when two classes were merged by `merge_equiv_classes'.  Search
1168	   for the hash bucket that it heads.  This happens only very
1169	   rarely, so the cost is acceptable.  */
1170	for (hash = 0; hash < NBUCKETS; hash++)
1171	  if (table[hash] == elt)
1172	    table[hash] = next;
1173      }
1174  }
1175
1176  /* Remove the table element from its related-value circular chain.  */
1177
1178  if (elt->related_value != 0 && elt->related_value != elt)
1179    {
1180      register struct table_elt *p = elt->related_value;
1181      while (p->related_value != elt)
1182	p = p->related_value;
1183      p->related_value = elt->related_value;
1184      if (p->related_value == p)
1185	p->related_value = 0;
1186    }
1187
1188  free_element (elt);
1189}
1190
1191/* Look up X in the hash table and return its table element,
1192   or 0 if X is not in the table.
1193
1194   MODE is the machine-mode of X, or if X is an integer constant
1195   with VOIDmode then MODE is the mode with which X will be used.
1196
1197   Here we are satisfied to find an expression whose tree structure
1198   looks like X.  */
1199
1200static struct table_elt *
1201lookup (x, hash, mode)
1202     rtx x;
1203     unsigned hash;
1204     enum machine_mode mode;
1205{
1206  register struct table_elt *p;
1207
1208  for (p = table[hash]; p; p = p->next_same_hash)
1209    if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1210			    || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1211      return p;
1212
1213  return 0;
1214}
1215
1216/* Like `lookup' but don't care whether the table element uses invalid regs.
1217   Also ignore discrepancies in the machine mode of a register.  */
1218
1219static struct table_elt *
1220lookup_for_remove (x, hash, mode)
1221     rtx x;
1222     unsigned hash;
1223     enum machine_mode mode;
1224{
1225  register struct table_elt *p;
1226
1227  if (GET_CODE (x) == REG)
1228    {
1229      int regno = REGNO (x);
1230      /* Don't check the machine mode when comparing registers;
1231	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1232      for (p = table[hash]; p; p = p->next_same_hash)
1233	if (GET_CODE (p->exp) == REG
1234	    && REGNO (p->exp) == regno)
1235	  return p;
1236    }
1237  else
1238    {
1239      for (p = table[hash]; p; p = p->next_same_hash)
1240	if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1241	  return p;
1242    }
1243
1244  return 0;
1245}
1246
1247/* Look for an expression equivalent to X and with code CODE.
1248   If one is found, return that expression.  */
1249
1250static rtx
1251lookup_as_function (x, code)
1252     rtx x;
1253     enum rtx_code code;
1254{
1255  register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1256					 GET_MODE (x));
1257  if (p == 0)
1258    return 0;
1259
1260  for (p = p->first_same_value; p; p = p->next_same_value)
1261    {
1262      if (GET_CODE (p->exp) == code
1263	  /* Make sure this is a valid entry in the table.  */
1264	  && exp_equiv_p (p->exp, p->exp, 1, 0))
1265	return p->exp;
1266    }
1267
1268  return 0;
1269}
1270
1271/* Insert X in the hash table, assuming HASH is its hash code
1272   and CLASSP is an element of the class it should go in
1273   (or 0 if a new class should be made).
1274   It is inserted at the proper position to keep the class in
1275   the order cheapest first.
1276
1277   MODE is the machine-mode of X, or if X is an integer constant
1278   with VOIDmode then MODE is the mode with which X will be used.
1279
1280   For elements of equal cheapness, the most recent one
1281   goes in front, except that the first element in the list
1282   remains first unless a cheaper element is added.  The order of
1283   pseudo-registers does not matter, as canon_reg will be called to
1284   find the cheapest when a register is retrieved from the table.
1285
1286   The in_memory field in the hash table element is set to 0.
1287   The caller must set it nonzero if appropriate.
1288
1289   You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1290   and if insert_regs returns a nonzero value
1291   you must then recompute its hash code before calling here.
1292
1293   If necessary, update table showing constant values of quantities.  */
1294
1295#define CHEAPER(X,Y)   ((X)->cost < (Y)->cost)
1296
1297static struct table_elt *
1298insert (x, classp, hash, mode)
1299     register rtx x;
1300     register struct table_elt *classp;
1301     unsigned hash;
1302     enum machine_mode mode;
1303{
1304  register struct table_elt *elt;
1305
1306  /* If X is a register and we haven't made a quantity for it,
1307     something is wrong.  */
1308  if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1309    abort ();
1310
1311  /* If X is a hard register, show it is being put in the table.  */
1312  if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1313    {
1314      int regno = REGNO (x);
1315      int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1316      int i;
1317
1318      for (i = regno; i < endregno; i++)
1319	    SET_HARD_REG_BIT (hard_regs_in_table, i);
1320    }
1321
1322  /* If X is a label, show we recorded it.  */
1323  if (GET_CODE (x) == LABEL_REF
1324      || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1325	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1326    recorded_label_ref = 1;
1327
1328  /* Put an element for X into the right hash bucket.  */
1329
1330  elt = get_element ();
1331  elt->exp = x;
1332  elt->cost = COST (x);
1333  elt->next_same_value = 0;
1334  elt->prev_same_value = 0;
1335  elt->next_same_hash = table[hash];
1336  elt->prev_same_hash = 0;
1337  elt->related_value = 0;
1338  elt->in_memory = 0;
1339  elt->mode = mode;
1340  elt->is_const = (CONSTANT_P (x)
1341		   /* GNU C++ takes advantage of this for `this'
1342		      (and other const values).  */
1343		   || (RTX_UNCHANGING_P (x)
1344		       && GET_CODE (x) == REG
1345		       && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1346		   || FIXED_BASE_PLUS_P (x));
1347
1348  if (table[hash])
1349    table[hash]->prev_same_hash = elt;
1350  table[hash] = elt;
1351
1352  /* Put it into the proper value-class.  */
1353  if (classp)
1354    {
1355      classp = classp->first_same_value;
1356      if (CHEAPER (elt, classp))
1357	/* Insert at the head of the class */
1358	{
1359	  register struct table_elt *p;
1360	  elt->next_same_value = classp;
1361	  classp->prev_same_value = elt;
1362	  elt->first_same_value = elt;
1363
1364	  for (p = classp; p; p = p->next_same_value)
1365	    p->first_same_value = elt;
1366	}
1367      else
1368	{
1369	  /* Insert not at head of the class.  */
1370	  /* Put it after the last element cheaper than X.  */
1371	  register struct table_elt *p, *next;
1372	  for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1373	       p = next);
1374	  /* Put it after P and before NEXT.  */
1375	  elt->next_same_value = next;
1376	  if (next)
1377	    next->prev_same_value = elt;
1378	  elt->prev_same_value = p;
1379	  p->next_same_value = elt;
1380	  elt->first_same_value = classp;
1381	}
1382    }
1383  else
1384    elt->first_same_value = elt;
1385
1386  /* If this is a constant being set equivalent to a register or a register
1387     being set equivalent to a constant, note the constant equivalence.
1388
1389     If this is a constant, it cannot be equivalent to a different constant,
1390     and a constant is the only thing that can be cheaper than a register.  So
1391     we know the register is the head of the class (before the constant was
1392     inserted).
1393
1394     If this is a register that is not already known equivalent to a
1395     constant, we must check the entire class.
1396
1397     If this is a register that is already known equivalent to an insn,
1398     update `qty_const_insn' to show that `this_insn' is the latest
1399     insn making that quantity equivalent to the constant.  */
1400
1401  if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1402      && GET_CODE (x) != REG)
1403    {
1404      qty_const[reg_qty[REGNO (classp->exp)]]
1405	= gen_lowpart_if_possible (qty_mode[reg_qty[REGNO (classp->exp)]], x);
1406      qty_const_insn[reg_qty[REGNO (classp->exp)]] = this_insn;
1407    }
1408
1409  else if (GET_CODE (x) == REG && classp && ! qty_const[reg_qty[REGNO (x)]]
1410	   && ! elt->is_const)
1411    {
1412      register struct table_elt *p;
1413
1414      for (p = classp; p != 0; p = p->next_same_value)
1415	{
1416	  if (p->is_const && GET_CODE (p->exp) != REG)
1417	    {
1418	      qty_const[reg_qty[REGNO (x)]]
1419		= gen_lowpart_if_possible (GET_MODE (x), p->exp);
1420	      qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1421	      break;
1422	    }
1423	}
1424    }
1425
1426  else if (GET_CODE (x) == REG && qty_const[reg_qty[REGNO (x)]]
1427	   && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]])
1428    qty_const_insn[reg_qty[REGNO (x)]] = this_insn;
1429
1430  /* If this is a constant with symbolic value,
1431     and it has a term with an explicit integer value,
1432     link it up with related expressions.  */
1433  if (GET_CODE (x) == CONST)
1434    {
1435      rtx subexp = get_related_value (x);
1436      unsigned subhash;
1437      struct table_elt *subelt, *subelt_prev;
1438
1439      if (subexp != 0)
1440	{
1441	  /* Get the integer-free subexpression in the hash table.  */
1442	  subhash = safe_hash (subexp, mode) % NBUCKETS;
1443	  subelt = lookup (subexp, subhash, mode);
1444	  if (subelt == 0)
1445	    subelt = insert (subexp, NULL_PTR, subhash, mode);
1446	  /* Initialize SUBELT's circular chain if it has none.  */
1447	  if (subelt->related_value == 0)
1448	    subelt->related_value = subelt;
1449	  /* Find the element in the circular chain that precedes SUBELT.  */
1450	  subelt_prev = subelt;
1451	  while (subelt_prev->related_value != subelt)
1452	    subelt_prev = subelt_prev->related_value;
1453	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1454	     This way the element that follows SUBELT is the oldest one.  */
1455	  elt->related_value = subelt_prev->related_value;
1456	  subelt_prev->related_value = elt;
1457	}
1458    }
1459
1460  return elt;
1461}
1462
1463/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1464   CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1465   the two classes equivalent.
1466
1467   CLASS1 will be the surviving class; CLASS2 should not be used after this
1468   call.
1469
1470   Any invalid entries in CLASS2 will not be copied.  */
1471
1472static void
1473merge_equiv_classes (class1, class2)
1474     struct table_elt *class1, *class2;
1475{
1476  struct table_elt *elt, *next, *new;
1477
1478  /* Ensure we start with the head of the classes.  */
1479  class1 = class1->first_same_value;
1480  class2 = class2->first_same_value;
1481
1482  /* If they were already equal, forget it.  */
1483  if (class1 == class2)
1484    return;
1485
1486  for (elt = class2; elt; elt = next)
1487    {
1488      unsigned hash;
1489      rtx exp = elt->exp;
1490      enum machine_mode mode = elt->mode;
1491
1492      next = elt->next_same_value;
1493
1494      /* Remove old entry, make a new one in CLASS1's class.
1495	 Don't do this for invalid entries as we cannot find their
1496	 hash code (it also isn't necessary).  */
1497      if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1498	{
1499	  hash_arg_in_memory = 0;
1500	  hash_arg_in_struct = 0;
1501	  hash = HASH (exp, mode);
1502
1503	  if (GET_CODE (exp) == REG)
1504	    delete_reg_equiv (REGNO (exp));
1505
1506	  remove_from_table (elt, hash);
1507
1508	  if (insert_regs (exp, class1, 0))
1509	    {
1510	      rehash_using_reg (exp);
1511	      hash = HASH (exp, mode);
1512	    }
1513	  new = insert (exp, class1, hash, mode);
1514	  new->in_memory = hash_arg_in_memory;
1515	  new->in_struct = hash_arg_in_struct;
1516	}
1517    }
1518}
1519
1520/* Remove from the hash table, or mark as invalid,
1521   all expressions whose values could be altered by storing in X.
1522   X is a register, a subreg, or a memory reference with nonvarying address
1523   (because, when a memory reference with a varying address is stored in,
1524   all memory references are removed by invalidate_memory
1525   so specific invalidation is superfluous).
1526   FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1527   instead of just the amount indicated by the mode of X.  This is only used
1528   for bitfield stores into memory.
1529
1530   A nonvarying address may be just a register or just
1531   a symbol reference, or it may be either of those plus
1532   a numeric offset.  */
1533
1534static void
1535invalidate (x, full_mode)
1536     rtx x;
1537     enum machine_mode full_mode;
1538{
1539  register int i;
1540  register struct table_elt *p;
1541
1542  /* If X is a register, dependencies on its contents
1543     are recorded through the qty number mechanism.
1544     Just change the qty number of the register,
1545     mark it as invalid for expressions that refer to it,
1546     and remove it itself.  */
1547
1548  if (GET_CODE (x) == REG)
1549    {
1550      register int regno = REGNO (x);
1551      register unsigned hash = HASH (x, GET_MODE (x));
1552
1553      /* Remove REGNO from any quantity list it might be on and indicate
1554	 that its value might have changed.  If it is a pseudo, remove its
1555	 entry from the hash table.
1556
1557	 For a hard register, we do the first two actions above for any
1558	 additional hard registers corresponding to X.  Then, if any of these
1559	 registers are in the table, we must remove any REG entries that
1560	 overlap these registers.  */
1561
1562      delete_reg_equiv (regno);
1563      reg_tick[regno]++;
1564
1565      if (regno >= FIRST_PSEUDO_REGISTER)
1566	{
1567	  /* Because a register can be referenced in more than one mode,
1568	     we might have to remove more than one table entry.  */
1569
1570	  struct table_elt *elt;
1571
1572	  while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1573	    remove_from_table (elt, hash);
1574	}
1575      else
1576	{
1577	  HOST_WIDE_INT in_table
1578	    = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1579	  int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1580	  int tregno, tendregno;
1581	  register struct table_elt *p, *next;
1582
1583	  CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1584
1585	  for (i = regno + 1; i < endregno; i++)
1586	    {
1587	      in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1588	      CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1589	      delete_reg_equiv (i);
1590	      reg_tick[i]++;
1591	    }
1592
1593	  if (in_table)
1594	    for (hash = 0; hash < NBUCKETS; hash++)
1595	      for (p = table[hash]; p; p = next)
1596		{
1597		  next = p->next_same_hash;
1598
1599		  if (GET_CODE (p->exp) != REG
1600		      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1601		    continue;
1602
1603		  tregno = REGNO (p->exp);
1604		  tendregno
1605		    = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1606		  if (tendregno > regno && tregno < endregno)
1607		  remove_from_table (p, hash);
1608		}
1609	}
1610
1611      return;
1612    }
1613
1614  if (GET_CODE (x) == SUBREG)
1615    {
1616      if (GET_CODE (SUBREG_REG (x)) != REG)
1617	abort ();
1618      invalidate (SUBREG_REG (x), VOIDmode);
1619      return;
1620    }
1621
1622  /* If X is a parallel, invalidate all of its elements.  */
1623
1624  if (GET_CODE (x) == PARALLEL)
1625    {
1626      for (i = XVECLEN (x, 0) - 1; i >= 0 ; --i)
1627	invalidate (XVECEXP (x, 0, i), VOIDmode);
1628      return;
1629    }
1630
1631  /* If X is an expr_list, this is part of a disjoint return value;
1632     extract the location in question ignoring the offset.  */
1633
1634  if (GET_CODE (x) == EXPR_LIST)
1635    {
1636      invalidate (XEXP (x, 0), VOIDmode);
1637      return;
1638    }
1639
1640  /* X is not a register; it must be a memory reference with
1641     a nonvarying address.  Remove all hash table elements
1642     that refer to overlapping pieces of memory.  */
1643
1644  if (GET_CODE (x) != MEM)
1645    abort ();
1646
1647  if (full_mode == VOIDmode)
1648    full_mode = GET_MODE (x);
1649
1650  for (i = 0; i < NBUCKETS; i++)
1651    {
1652      register struct table_elt *next;
1653      for (p = table[i]; p; p = next)
1654	{
1655	  next = p->next_same_hash;
1656	  /* Invalidate ASM_OPERANDS which reference memory (this is easier
1657	     than checking all the aliases).  */
1658	  if (p->in_memory
1659	      && (GET_CODE (p->exp) != MEM
1660		  || true_dependence (x, full_mode, p->exp, cse_rtx_varies_p)))
1661	    remove_from_table (p, i);
1662	}
1663    }
1664}
1665
1666/* Remove all expressions that refer to register REGNO,
1667   since they are already invalid, and we are about to
1668   mark that register valid again and don't want the old
1669   expressions to reappear as valid.  */
1670
1671static void
1672remove_invalid_refs (regno)
1673     int regno;
1674{
1675  register int i;
1676  register struct table_elt *p, *next;
1677
1678  for (i = 0; i < NBUCKETS; i++)
1679    for (p = table[i]; p; p = next)
1680      {
1681	next = p->next_same_hash;
1682	if (GET_CODE (p->exp) != REG
1683	    && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1684	  remove_from_table (p, i);
1685      }
1686}
1687
1688/* Recompute the hash codes of any valid entries in the hash table that
1689   reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1690
1691   This is called when we make a jump equivalence.  */
1692
1693static void
1694rehash_using_reg (x)
1695     rtx x;
1696{
1697  int i;
1698  struct table_elt *p, *next;
1699  unsigned hash;
1700
1701  if (GET_CODE (x) == SUBREG)
1702    x = SUBREG_REG (x);
1703
1704  /* If X is not a register or if the register is known not to be in any
1705     valid entries in the table, we have no work to do.  */
1706
1707  if (GET_CODE (x) != REG
1708      || reg_in_table[REGNO (x)] < 0
1709      || reg_in_table[REGNO (x)] != reg_tick[REGNO (x)])
1710    return;
1711
1712  /* Scan all hash chains looking for valid entries that mention X.
1713     If we find one and it is in the wrong hash chain, move it.  We can skip
1714     objects that are registers, since they are handled specially.  */
1715
1716  for (i = 0; i < NBUCKETS; i++)
1717    for (p = table[i]; p; p = next)
1718      {
1719	next = p->next_same_hash;
1720	if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1721	    && exp_equiv_p (p->exp, p->exp, 1, 0)
1722	    && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1723	  {
1724	    if (p->next_same_hash)
1725	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
1726
1727	    if (p->prev_same_hash)
1728	      p->prev_same_hash->next_same_hash = p->next_same_hash;
1729	    else
1730	      table[i] = p->next_same_hash;
1731
1732	    p->next_same_hash = table[hash];
1733	    p->prev_same_hash = 0;
1734	    if (table[hash])
1735	      table[hash]->prev_same_hash = p;
1736	    table[hash] = p;
1737	  }
1738      }
1739}
1740
1741/* Remove from the hash table any expression that is a call-clobbered
1742   register.  Also update their TICK values.  */
1743
1744static void
1745invalidate_for_call ()
1746{
1747  int regno, endregno;
1748  int i;
1749  unsigned hash;
1750  struct table_elt *p, *next;
1751  int in_table = 0;
1752
1753  /* Go through all the hard registers.  For each that is clobbered in
1754     a CALL_INSN, remove the register from quantity chains and update
1755     reg_tick if defined.  Also see if any of these registers is currently
1756     in the table.  */
1757
1758  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1759    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1760      {
1761	delete_reg_equiv (regno);
1762	if (reg_tick[regno] >= 0)
1763	  reg_tick[regno]++;
1764
1765	in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1766      }
1767
1768  /* In the case where we have no call-clobbered hard registers in the
1769     table, we are done.  Otherwise, scan the table and remove any
1770     entry that overlaps a call-clobbered register.  */
1771
1772  if (in_table)
1773    for (hash = 0; hash < NBUCKETS; hash++)
1774      for (p = table[hash]; p; p = next)
1775	{
1776	  next = p->next_same_hash;
1777
1778	  if (p->in_memory)
1779	    {
1780	      remove_from_table (p, hash);
1781	      continue;
1782	    }
1783
1784	  if (GET_CODE (p->exp) != REG
1785	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1786	    continue;
1787
1788	  regno = REGNO (p->exp);
1789	  endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1790
1791	  for (i = regno; i < endregno; i++)
1792	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1793	      {
1794		remove_from_table (p, hash);
1795		break;
1796	      }
1797	}
1798}
1799
1800/* Given an expression X of type CONST,
1801   and ELT which is its table entry (or 0 if it
1802   is not in the hash table),
1803   return an alternate expression for X as a register plus integer.
1804   If none can be found, return 0.  */
1805
1806static rtx
1807use_related_value (x, elt)
1808     rtx x;
1809     struct table_elt *elt;
1810{
1811  register struct table_elt *relt = 0;
1812  register struct table_elt *p, *q;
1813  HOST_WIDE_INT offset;
1814
1815  /* First, is there anything related known?
1816     If we have a table element, we can tell from that.
1817     Otherwise, must look it up.  */
1818
1819  if (elt != 0 && elt->related_value != 0)
1820    relt = elt;
1821  else if (elt == 0 && GET_CODE (x) == CONST)
1822    {
1823      rtx subexp = get_related_value (x);
1824      if (subexp != 0)
1825	relt = lookup (subexp,
1826		       safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
1827		       GET_MODE (subexp));
1828    }
1829
1830  if (relt == 0)
1831    return 0;
1832
1833  /* Search all related table entries for one that has an
1834     equivalent register.  */
1835
1836  p = relt;
1837  while (1)
1838    {
1839      /* This loop is strange in that it is executed in two different cases.
1840	 The first is when X is already in the table.  Then it is searching
1841	 the RELATED_VALUE list of X's class (RELT).  The second case is when
1842	 X is not in the table.  Then RELT points to a class for the related
1843	 value.
1844
1845	 Ensure that, whatever case we are in, that we ignore classes that have
1846	 the same value as X.  */
1847
1848      if (rtx_equal_p (x, p->exp))
1849	q = 0;
1850      else
1851	for (q = p->first_same_value; q; q = q->next_same_value)
1852	  if (GET_CODE (q->exp) == REG)
1853	    break;
1854
1855      if (q)
1856	break;
1857
1858      p = p->related_value;
1859
1860      /* We went all the way around, so there is nothing to be found.
1861	 Alternatively, perhaps RELT was in the table for some other reason
1862	 and it has no related values recorded.  */
1863      if (p == relt || p == 0)
1864	break;
1865    }
1866
1867  if (q == 0)
1868    return 0;
1869
1870  offset = (get_integer_term (x) - get_integer_term (p->exp));
1871  /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
1872  return plus_constant (q->exp, offset);
1873}
1874
1875/* Hash an rtx.  We are careful to make sure the value is never negative.
1876   Equivalent registers hash identically.
1877   MODE is used in hashing for CONST_INTs only;
1878   otherwise the mode of X is used.
1879
1880   Store 1 in do_not_record if any subexpression is volatile.
1881
1882   Store 1 in hash_arg_in_memory if X contains a MEM rtx
1883   which does not have the RTX_UNCHANGING_P bit set.
1884   In this case, also store 1 in hash_arg_in_struct
1885   if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
1886
1887   Note that cse_insn knows that the hash code of a MEM expression
1888   is just (int) MEM plus the hash code of the address.  */
1889
1890static unsigned
1891canon_hash (x, mode)
1892     rtx x;
1893     enum machine_mode mode;
1894{
1895  register int i, j;
1896  register unsigned hash = 0;
1897  register enum rtx_code code;
1898  register char *fmt;
1899
1900  /* repeat is used to turn tail-recursion into iteration.  */
1901 repeat:
1902  if (x == 0)
1903    return hash;
1904
1905  code = GET_CODE (x);
1906  switch (code)
1907    {
1908    case REG:
1909      {
1910	register int regno = REGNO (x);
1911
1912	/* On some machines, we can't record any non-fixed hard register,
1913	   because extending its life will cause reload problems.  We
1914	   consider ap, fp, and sp to be fixed for this purpose.
1915	   On all machines, we can't record any global registers.  */
1916
1917	if (regno < FIRST_PSEUDO_REGISTER
1918	    && (global_regs[regno]
1919		|| (SMALL_REGISTER_CLASSES
1920		    && ! fixed_regs[regno]
1921		    && regno != FRAME_POINTER_REGNUM
1922		    && regno != HARD_FRAME_POINTER_REGNUM
1923		    && regno != ARG_POINTER_REGNUM
1924		    && regno != STACK_POINTER_REGNUM)))
1925	  {
1926	    do_not_record = 1;
1927	    return 0;
1928	  }
1929	hash += ((unsigned) REG << 7) + (unsigned) reg_qty[regno];
1930	return hash;
1931      }
1932
1933    case CONST_INT:
1934      {
1935	unsigned HOST_WIDE_INT tem = INTVAL (x);
1936	hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
1937	return hash;
1938      }
1939
1940    case CONST_DOUBLE:
1941      /* This is like the general case, except that it only counts
1942	 the integers representing the constant.  */
1943      hash += (unsigned) code + (unsigned) GET_MODE (x);
1944      if (GET_MODE (x) != VOIDmode)
1945	for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1946	  {
1947	    unsigned tem = XINT (x, i);
1948	    hash += tem;
1949	  }
1950      else
1951	hash += ((unsigned) CONST_DOUBLE_LOW (x)
1952		 + (unsigned) CONST_DOUBLE_HIGH (x));
1953      return hash;
1954
1955      /* Assume there is only one rtx object for any given label.  */
1956    case LABEL_REF:
1957      hash
1958	+= ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
1959      return hash;
1960
1961    case SYMBOL_REF:
1962      hash
1963	+= ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
1964      return hash;
1965
1966    case MEM:
1967      if (MEM_VOLATILE_P (x))
1968	{
1969	  do_not_record = 1;
1970	  return 0;
1971	}
1972      if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
1973	{
1974	  hash_arg_in_memory = 1;
1975	  if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
1976	}
1977      /* Now that we have already found this special case,
1978	 might as well speed it up as much as possible.  */
1979      hash += (unsigned) MEM;
1980      x = XEXP (x, 0);
1981      goto repeat;
1982
1983    case PRE_DEC:
1984    case PRE_INC:
1985    case POST_DEC:
1986    case POST_INC:
1987    case PC:
1988    case CC0:
1989    case CALL:
1990    case UNSPEC_VOLATILE:
1991      do_not_record = 1;
1992      return 0;
1993
1994    case ASM_OPERANDS:
1995      if (MEM_VOLATILE_P (x))
1996	{
1997	  do_not_record = 1;
1998	  return 0;
1999	}
2000      break;
2001
2002    default:
2003      break;
2004    }
2005
2006  i = GET_RTX_LENGTH (code) - 1;
2007  hash += (unsigned) code + (unsigned) GET_MODE (x);
2008  fmt = GET_RTX_FORMAT (code);
2009  for (; i >= 0; i--)
2010    {
2011      if (fmt[i] == 'e')
2012	{
2013	  rtx tem = XEXP (x, i);
2014
2015	  /* If we are about to do the last recursive call
2016	     needed at this level, change it into iteration.
2017	     This function  is called enough to be worth it.  */
2018	  if (i == 0)
2019	    {
2020	      x = tem;
2021	      goto repeat;
2022	    }
2023	  hash += canon_hash (tem, 0);
2024	}
2025      else if (fmt[i] == 'E')
2026	for (j = 0; j < XVECLEN (x, i); j++)
2027	  hash += canon_hash (XVECEXP (x, i, j), 0);
2028      else if (fmt[i] == 's')
2029	{
2030	  register unsigned char *p = (unsigned char *) XSTR (x, i);
2031	  if (p)
2032	    while (*p)
2033	      hash += *p++;
2034	}
2035      else if (fmt[i] == 'i')
2036	{
2037	  register unsigned tem = XINT (x, i);
2038	  hash += tem;
2039	}
2040      else if (fmt[i] == '0')
2041	/* unused */;
2042      else
2043	abort ();
2044    }
2045  return hash;
2046}
2047
2048/* Like canon_hash but with no side effects.  */
2049
2050static unsigned
2051safe_hash (x, mode)
2052     rtx x;
2053     enum machine_mode mode;
2054{
2055  int save_do_not_record = do_not_record;
2056  int save_hash_arg_in_memory = hash_arg_in_memory;
2057  int save_hash_arg_in_struct = hash_arg_in_struct;
2058  unsigned hash = canon_hash (x, mode);
2059  hash_arg_in_memory = save_hash_arg_in_memory;
2060  hash_arg_in_struct = save_hash_arg_in_struct;
2061  do_not_record = save_do_not_record;
2062  return hash;
2063}
2064
2065/* Return 1 iff X and Y would canonicalize into the same thing,
2066   without actually constructing the canonicalization of either one.
2067   If VALIDATE is nonzero,
2068   we assume X is an expression being processed from the rtl
2069   and Y was found in the hash table.  We check register refs
2070   in Y for being marked as valid.
2071
2072   If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2073   that is known to be in the register.  Ordinarily, we don't allow them
2074   to match, because letting them match would cause unpredictable results
2075   in all the places that search a hash table chain for an equivalent
2076   for a given value.  A possible equivalent that has different structure
2077   has its hash code computed from different data.  Whether the hash code
2078   is the same as that of the given value is pure luck.  */
2079
2080static int
2081exp_equiv_p (x, y, validate, equal_values)
2082     rtx x, y;
2083     int validate;
2084     int equal_values;
2085{
2086  register int i, j;
2087  register enum rtx_code code;
2088  register char *fmt;
2089
2090  /* Note: it is incorrect to assume an expression is equivalent to itself
2091     if VALIDATE is nonzero.  */
2092  if (x == y && !validate)
2093    return 1;
2094  if (x == 0 || y == 0)
2095    return x == y;
2096
2097  code = GET_CODE (x);
2098  if (code != GET_CODE (y))
2099    {
2100      if (!equal_values)
2101	return 0;
2102
2103      /* If X is a constant and Y is a register or vice versa, they may be
2104	 equivalent.  We only have to validate if Y is a register.  */
2105      if (CONSTANT_P (x) && GET_CODE (y) == REG
2106	  && REGNO_QTY_VALID_P (REGNO (y))
2107	  && GET_MODE (y) == qty_mode[reg_qty[REGNO (y)]]
2108	  && rtx_equal_p (x, qty_const[reg_qty[REGNO (y)]])
2109	  && (! validate || reg_in_table[REGNO (y)] == reg_tick[REGNO (y)]))
2110	return 1;
2111
2112      if (CONSTANT_P (y) && code == REG
2113	  && REGNO_QTY_VALID_P (REGNO (x))
2114	  && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2115	  && rtx_equal_p (y, qty_const[reg_qty[REGNO (x)]]))
2116	return 1;
2117
2118      return 0;
2119    }
2120
2121  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2122  if (GET_MODE (x) != GET_MODE (y))
2123    return 0;
2124
2125  switch (code)
2126    {
2127    case PC:
2128    case CC0:
2129      return x == y;
2130
2131    case CONST_INT:
2132      return INTVAL (x) == INTVAL (y);
2133
2134    case LABEL_REF:
2135      return XEXP (x, 0) == XEXP (y, 0);
2136
2137    case SYMBOL_REF:
2138      return XSTR (x, 0) == XSTR (y, 0);
2139
2140    case REG:
2141      {
2142	int regno = REGNO (y);
2143	int endregno
2144	  = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2145		     : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2146	int i;
2147
2148	/* If the quantities are not the same, the expressions are not
2149	   equivalent.  If there are and we are not to validate, they
2150	   are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2151
2152	if (reg_qty[REGNO (x)] != reg_qty[regno])
2153	  return 0;
2154
2155	if (! validate)
2156	  return 1;
2157
2158	for (i = regno; i < endregno; i++)
2159	  if (reg_in_table[i] != reg_tick[i])
2160	    return 0;
2161
2162	return 1;
2163      }
2164
2165    /*  For commutative operations, check both orders.  */
2166    case PLUS:
2167    case MULT:
2168    case AND:
2169    case IOR:
2170    case XOR:
2171    case NE:
2172    case EQ:
2173      return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2174	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2175			       validate, equal_values))
2176	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2177			       validate, equal_values)
2178		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2179				  validate, equal_values)));
2180
2181    default:
2182      break;
2183    }
2184
2185  /* Compare the elements.  If any pair of corresponding elements
2186     fail to match, return 0 for the whole things.  */
2187
2188  fmt = GET_RTX_FORMAT (code);
2189  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2190    {
2191      switch (fmt[i])
2192	{
2193	case 'e':
2194	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2195	    return 0;
2196	  break;
2197
2198	case 'E':
2199	  if (XVECLEN (x, i) != XVECLEN (y, i))
2200	    return 0;
2201	  for (j = 0; j < XVECLEN (x, i); j++)
2202	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2203			       validate, equal_values))
2204	      return 0;
2205	  break;
2206
2207	case 's':
2208	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2209	    return 0;
2210	  break;
2211
2212	case 'i':
2213	  if (XINT (x, i) != XINT (y, i))
2214	    return 0;
2215	  break;
2216
2217	case 'w':
2218	  if (XWINT (x, i) != XWINT (y, i))
2219	    return 0;
2220	break;
2221
2222	case '0':
2223	  break;
2224
2225	default:
2226	  abort ();
2227	}
2228      }
2229
2230  return 1;
2231}
2232
2233/* Return 1 iff any subexpression of X matches Y.
2234   Here we do not require that X or Y be valid (for registers referred to)
2235   for being in the hash table.  */
2236
2237static int
2238refers_to_p (x, y)
2239     rtx x, y;
2240{
2241  register int i;
2242  register enum rtx_code code;
2243  register char *fmt;
2244
2245 repeat:
2246  if (x == y)
2247    return 1;
2248  if (x == 0 || y == 0)
2249    return 0;
2250
2251  code = GET_CODE (x);
2252  /* If X as a whole has the same code as Y, they may match.
2253     If so, return 1.  */
2254  if (code == GET_CODE (y))
2255    {
2256      if (exp_equiv_p (x, y, 0, 1))
2257	return 1;
2258    }
2259
2260  /* X does not match, so try its subexpressions.  */
2261
2262  fmt = GET_RTX_FORMAT (code);
2263  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2264    if (fmt[i] == 'e')
2265      {
2266	if (i == 0)
2267	  {
2268	    x = XEXP (x, 0);
2269	    goto repeat;
2270	  }
2271	else
2272	  if (refers_to_p (XEXP (x, i), y))
2273	    return 1;
2274      }
2275    else if (fmt[i] == 'E')
2276      {
2277	int j;
2278	for (j = 0; j < XVECLEN (x, i); j++)
2279	  if (refers_to_p (XVECEXP (x, i, j), y))
2280	    return 1;
2281      }
2282
2283  return 0;
2284}
2285
2286/* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2287   set PBASE, PSTART, and PEND which correspond to the base of the address,
2288   the starting offset, and ending offset respectively.
2289
2290   ADDR is known to be a nonvarying address.  */
2291
2292/* ??? Despite what the comments say, this function is in fact frequently
2293   passed varying addresses.  This does not appear to cause any problems.  */
2294
2295static void
2296set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2297     rtx addr;
2298     int size;
2299     rtx *pbase;
2300     HOST_WIDE_INT *pstart, *pend;
2301{
2302  rtx base;
2303  HOST_WIDE_INT start, end;
2304
2305  base = addr;
2306  start = 0;
2307  end = 0;
2308
2309  if (flag_pic && GET_CODE (base) == PLUS
2310      && XEXP (base, 0) == pic_offset_table_rtx)
2311    base = XEXP (base, 1);
2312
2313  /* Registers with nonvarying addresses usually have constant equivalents;
2314     but the frame pointer register is also possible.  */
2315  if (GET_CODE (base) == REG
2316      && qty_const != 0
2317      && REGNO_QTY_VALID_P (REGNO (base))
2318      && qty_mode[reg_qty[REGNO (base)]] == GET_MODE (base)
2319      && qty_const[reg_qty[REGNO (base)]] != 0)
2320    base = qty_const[reg_qty[REGNO (base)]];
2321  else if (GET_CODE (base) == PLUS
2322	   && GET_CODE (XEXP (base, 1)) == CONST_INT
2323	   && GET_CODE (XEXP (base, 0)) == REG
2324	   && qty_const != 0
2325	   && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2326	   && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2327	       == GET_MODE (XEXP (base, 0)))
2328	   && qty_const[reg_qty[REGNO (XEXP (base, 0))]])
2329    {
2330      start = INTVAL (XEXP (base, 1));
2331      base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2332    }
2333  /* This can happen as the result of virtual register instantiation,
2334     if the initial offset is too large to be a valid address.  */
2335  else if (GET_CODE (base) == PLUS
2336	   && GET_CODE (XEXP (base, 0)) == REG
2337	   && GET_CODE (XEXP (base, 1)) == REG
2338	   && qty_const != 0
2339	   && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2340	   && (qty_mode[reg_qty[REGNO (XEXP (base, 0))]]
2341	       == GET_MODE (XEXP (base, 0)))
2342	   && qty_const[reg_qty[REGNO (XEXP (base, 0))]]
2343	   && REGNO_QTY_VALID_P (REGNO (XEXP (base, 1)))
2344	   && (qty_mode[reg_qty[REGNO (XEXP (base, 1))]]
2345	       == GET_MODE (XEXP (base, 1)))
2346	   && qty_const[reg_qty[REGNO (XEXP (base, 1))]])
2347    {
2348      rtx tem = qty_const[reg_qty[REGNO (XEXP (base, 1))]];
2349      base = qty_const[reg_qty[REGNO (XEXP (base, 0))]];
2350
2351      /* One of the two values must be a constant.  */
2352      if (GET_CODE (base) != CONST_INT)
2353	{
2354	  if (GET_CODE (tem) != CONST_INT)
2355	    abort ();
2356	  start = INTVAL (tem);
2357	}
2358      else
2359	{
2360	  start = INTVAL (base);
2361	  base = tem;
2362	}
2363    }
2364
2365  /* Handle everything that we can find inside an address that has been
2366     viewed as constant.  */
2367
2368  while (1)
2369    {
2370      /* If no part of this switch does a "continue", the code outside
2371	 will exit this loop.  */
2372
2373      switch (GET_CODE (base))
2374	{
2375	case LO_SUM:
2376	  /* By definition, operand1 of a LO_SUM is the associated constant
2377	     address.  Use the associated constant address as the base
2378	     instead.  */
2379	  base = XEXP (base, 1);
2380	  continue;
2381
2382	case CONST:
2383	  /* Strip off CONST.  */
2384	  base = XEXP (base, 0);
2385	  continue;
2386
2387	case PLUS:
2388	  if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2389	    {
2390	      start += INTVAL (XEXP (base, 1));
2391	      base = XEXP (base, 0);
2392	      continue;
2393	    }
2394	  break;
2395
2396	case AND:
2397	  /* Handle the case of an AND which is the negative of a power of
2398	     two.  This is used to represent unaligned memory operations.  */
2399	  if (GET_CODE (XEXP (base, 1)) == CONST_INT
2400	      && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2401	    {
2402	      set_nonvarying_address_components (XEXP (base, 0), size,
2403						 pbase, pstart, pend);
2404
2405	      /* Assume the worst misalignment.  START is affected, but not
2406		 END, so compensate but adjusting SIZE.  Don't lose any
2407		 constant we already had.  */
2408
2409	      size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
2410	      start += *pstart + INTVAL (XEXP (base, 1)) + 1;
2411	      end += *pend;
2412	      base = *pbase;
2413	    }
2414	  break;
2415
2416	default:
2417	  break;
2418	}
2419
2420      break;
2421    }
2422
2423  if (GET_CODE (base) == CONST_INT)
2424    {
2425      start += INTVAL (base);
2426      base = const0_rtx;
2427    }
2428
2429  end = start + size;
2430
2431  /* Set the return values.  */
2432  *pbase = base;
2433  *pstart = start;
2434  *pend = end;
2435}
2436
2437/* Return 1 if X has a value that can vary even between two
2438   executions of the program.  0 means X can be compared reliably
2439   against certain constants or near-constants.  */
2440
2441static int
2442cse_rtx_varies_p (x)
2443     register rtx x;
2444{
2445  /* We need not check for X and the equivalence class being of the same
2446     mode because if X is equivalent to a constant in some mode, it
2447     doesn't vary in any mode.  */
2448
2449  if (GET_CODE (x) == REG
2450      && REGNO_QTY_VALID_P (REGNO (x))
2451      && GET_MODE (x) == qty_mode[reg_qty[REGNO (x)]]
2452      && qty_const[reg_qty[REGNO (x)]] != 0)
2453    return 0;
2454
2455  if (GET_CODE (x) == PLUS
2456      && GET_CODE (XEXP (x, 1)) == CONST_INT
2457      && GET_CODE (XEXP (x, 0)) == REG
2458      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2459      && (GET_MODE (XEXP (x, 0))
2460	  == qty_mode[reg_qty[REGNO (XEXP (x, 0))]])
2461      && qty_const[reg_qty[REGNO (XEXP (x, 0))]])
2462    return 0;
2463
2464  /* This can happen as the result of virtual register instantiation, if
2465     the initial constant is too large to be a valid address.  This gives
2466     us a three instruction sequence, load large offset into a register,
2467     load fp minus a constant into a register, then a MEM which is the
2468     sum of the two `constant' registers.  */
2469  if (GET_CODE (x) == PLUS
2470      && GET_CODE (XEXP (x, 0)) == REG
2471      && GET_CODE (XEXP (x, 1)) == REG
2472      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2473      && (GET_MODE (XEXP (x, 0))
2474	  == qty_mode[reg_qty[REGNO (XEXP (x, 0))]])
2475      && qty_const[reg_qty[REGNO (XEXP (x, 0))]]
2476      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1)))
2477      && (GET_MODE (XEXP (x, 1))
2478	  == qty_mode[reg_qty[REGNO (XEXP (x, 1))]])
2479      && qty_const[reg_qty[REGNO (XEXP (x, 1))]])
2480    return 0;
2481
2482  return rtx_varies_p (x);
2483}
2484
2485/* Canonicalize an expression:
2486   replace each register reference inside it
2487   with the "oldest" equivalent register.
2488
2489   If INSN is non-zero and we are replacing a pseudo with a hard register
2490   or vice versa, validate_change is used to ensure that INSN remains valid
2491   after we make our substitution.  The calls are made with IN_GROUP non-zero
2492   so apply_change_group must be called upon the outermost return from this
2493   function (unless INSN is zero).  The result of apply_change_group can
2494   generally be discarded since the changes we are making are optional.  */
2495
2496static rtx
2497canon_reg (x, insn)
2498     rtx x;
2499     rtx insn;
2500{
2501  register int i;
2502  register enum rtx_code code;
2503  register char *fmt;
2504
2505  if (x == 0)
2506    return x;
2507
2508  code = GET_CODE (x);
2509  switch (code)
2510    {
2511    case PC:
2512    case CC0:
2513    case CONST:
2514    case CONST_INT:
2515    case CONST_DOUBLE:
2516    case SYMBOL_REF:
2517    case LABEL_REF:
2518    case ADDR_VEC:
2519    case ADDR_DIFF_VEC:
2520      return x;
2521
2522    case REG:
2523      {
2524	register int first;
2525
2526	/* Never replace a hard reg, because hard regs can appear
2527	   in more than one machine mode, and we must preserve the mode
2528	   of each occurrence.  Also, some hard regs appear in
2529	   MEMs that are shared and mustn't be altered.  Don't try to
2530	   replace any reg that maps to a reg of class NO_REGS.  */
2531	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2532	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2533	  return x;
2534
2535	first = qty_first_reg[reg_qty[REGNO (x)]];
2536	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2537		: REGNO_REG_CLASS (first) == NO_REGS ? x
2538		: gen_rtx_REG (qty_mode[reg_qty[REGNO (x)]], first));
2539      }
2540
2541    default:
2542      break;
2543    }
2544
2545  fmt = GET_RTX_FORMAT (code);
2546  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2547    {
2548      register int j;
2549
2550      if (fmt[i] == 'e')
2551	{
2552	  rtx new = canon_reg (XEXP (x, i), insn);
2553	  int insn_code;
2554
2555	  /* If replacing pseudo with hard reg or vice versa, ensure the
2556	     insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2557	  if (insn != 0 && new != 0
2558	      && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2559	      && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2560		   != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2561		  || (insn_code = recog_memoized (insn)) < 0
2562		  || insn_n_dups[insn_code] > 0))
2563	    validate_change (insn, &XEXP (x, i), new, 1);
2564	  else
2565	    XEXP (x, i) = new;
2566	}
2567      else if (fmt[i] == 'E')
2568	for (j = 0; j < XVECLEN (x, i); j++)
2569	  XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2570    }
2571
2572  return x;
2573}
2574
2575/* LOC is a location within INSN that is an operand address (the contents of
2576   a MEM).  Find the best equivalent address to use that is valid for this
2577   insn.
2578
2579   On most CISC machines, complicated address modes are costly, and rtx_cost
2580   is a good approximation for that cost.  However, most RISC machines have
2581   only a few (usually only one) memory reference formats.  If an address is
2582   valid at all, it is often just as cheap as any other address.  Hence, for
2583   RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2584   costs of various addresses.  For two addresses of equal cost, choose the one
2585   with the highest `rtx_cost' value as that has the potential of eliminating
2586   the most insns.  For equal costs, we choose the first in the equivalence
2587   class.  Note that we ignore the fact that pseudo registers are cheaper
2588   than hard registers here because we would also prefer the pseudo registers.
2589  */
2590
2591static void
2592find_best_addr (insn, loc)
2593     rtx insn;
2594     rtx *loc;
2595{
2596  struct table_elt *elt;
2597  rtx addr = *loc;
2598#ifdef ADDRESS_COST
2599  struct table_elt *p;
2600  int found_better = 1;
2601#endif
2602  int save_do_not_record = do_not_record;
2603  int save_hash_arg_in_memory = hash_arg_in_memory;
2604  int save_hash_arg_in_struct = hash_arg_in_struct;
2605  int addr_volatile;
2606  int regno;
2607  unsigned hash;
2608
2609  /* Do not try to replace constant addresses or addresses of local and
2610     argument slots.  These MEM expressions are made only once and inserted
2611     in many instructions, as well as being used to control symbol table
2612     output.  It is not safe to clobber them.
2613
2614     There are some uncommon cases where the address is already in a register
2615     for some reason, but we cannot take advantage of that because we have
2616     no easy way to unshare the MEM.  In addition, looking up all stack
2617     addresses is costly.  */
2618  if ((GET_CODE (addr) == PLUS
2619       && GET_CODE (XEXP (addr, 0)) == REG
2620       && GET_CODE (XEXP (addr, 1)) == CONST_INT
2621       && (regno = REGNO (XEXP (addr, 0)),
2622	   regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2623	   || regno == ARG_POINTER_REGNUM))
2624      || (GET_CODE (addr) == REG
2625	  && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2626	      || regno == HARD_FRAME_POINTER_REGNUM
2627	      || regno == ARG_POINTER_REGNUM))
2628      || GET_CODE (addr) == ADDRESSOF
2629      || CONSTANT_ADDRESS_P (addr))
2630    return;
2631
2632  /* If this address is not simply a register, try to fold it.  This will
2633     sometimes simplify the expression.  Many simplifications
2634     will not be valid, but some, usually applying the associative rule, will
2635     be valid and produce better code.  */
2636  if (GET_CODE (addr) != REG)
2637    {
2638      rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2639
2640      if (1
2641#ifdef ADDRESS_COST
2642	  && (CSE_ADDRESS_COST (folded) < CSE_ADDRESS_COST (addr)
2643	      || (CSE_ADDRESS_COST (folded) == CSE_ADDRESS_COST (addr)
2644		  && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
2645#else
2646	  && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
2647#endif
2648	  && validate_change (insn, loc, folded, 0))
2649	addr = folded;
2650    }
2651
2652  /* If this address is not in the hash table, we can't look for equivalences
2653     of the whole address.  Also, ignore if volatile.  */
2654
2655  do_not_record = 0;
2656  hash = HASH (addr, Pmode);
2657  addr_volatile = do_not_record;
2658  do_not_record = save_do_not_record;
2659  hash_arg_in_memory = save_hash_arg_in_memory;
2660  hash_arg_in_struct = save_hash_arg_in_struct;
2661
2662  if (addr_volatile)
2663    return;
2664
2665  elt = lookup (addr, hash, Pmode);
2666
2667#ifndef ADDRESS_COST
2668  if (elt)
2669    {
2670      int our_cost = elt->cost;
2671
2672      /* Find the lowest cost below ours that works.  */
2673      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2674	if (elt->cost < our_cost
2675	    && (GET_CODE (elt->exp) == REG
2676		|| exp_equiv_p (elt->exp, elt->exp, 1, 0))
2677	    && validate_change (insn, loc,
2678				canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2679	  return;
2680    }
2681#else
2682
2683  if (elt)
2684    {
2685      /* We need to find the best (under the criteria documented above) entry
2686	 in the class that is valid.  We use the `flag' field to indicate
2687	 choices that were invalid and iterate until we can't find a better
2688	 one that hasn't already been tried.  */
2689
2690      for (p = elt->first_same_value; p; p = p->next_same_value)
2691	p->flag = 0;
2692
2693      while (found_better)
2694	{
2695	  int best_addr_cost = CSE_ADDRESS_COST (*loc);
2696	  int best_rtx_cost = (elt->cost + 1) >> 1;
2697	  struct table_elt *best_elt = elt;
2698
2699	  found_better = 0;
2700	  for (p = elt->first_same_value; p; p = p->next_same_value)
2701	    if (! p->flag)
2702	      {
2703		if ((GET_CODE (p->exp) == REG
2704		     || exp_equiv_p (p->exp, p->exp, 1, 0))
2705		    && (CSE_ADDRESS_COST (p->exp) < best_addr_cost
2706			|| (CSE_ADDRESS_COST (p->exp) == best_addr_cost
2707			    && (p->cost + 1) >> 1 > best_rtx_cost)))
2708		  {
2709		    found_better = 1;
2710		    best_addr_cost = CSE_ADDRESS_COST (p->exp);
2711		    best_rtx_cost = (p->cost + 1) >> 1;
2712		    best_elt = p;
2713		  }
2714	      }
2715
2716	  if (found_better)
2717	    {
2718	      if (validate_change (insn, loc,
2719				   canon_reg (copy_rtx (best_elt->exp),
2720					      NULL_RTX), 0))
2721		return;
2722	      else
2723		best_elt->flag = 1;
2724	    }
2725	}
2726    }
2727
2728  /* If the address is a binary operation with the first operand a register
2729     and the second a constant, do the same as above, but looking for
2730     equivalences of the register.  Then try to simplify before checking for
2731     the best address to use.  This catches a few cases:  First is when we
2732     have REG+const and the register is another REG+const.  We can often merge
2733     the constants and eliminate one insn and one register.  It may also be
2734     that a machine has a cheap REG+REG+const.  Finally, this improves the
2735     code on the Alpha for unaligned byte stores.  */
2736
2737  if (flag_expensive_optimizations
2738      && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2739	  || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2740      && GET_CODE (XEXP (*loc, 0)) == REG
2741      && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2742    {
2743      rtx c = XEXP (*loc, 1);
2744
2745      do_not_record = 0;
2746      hash = HASH (XEXP (*loc, 0), Pmode);
2747      do_not_record = save_do_not_record;
2748      hash_arg_in_memory = save_hash_arg_in_memory;
2749      hash_arg_in_struct = save_hash_arg_in_struct;
2750
2751      elt = lookup (XEXP (*loc, 0), hash, Pmode);
2752      if (elt == 0)
2753	return;
2754
2755      /* We need to find the best (under the criteria documented above) entry
2756	 in the class that is valid.  We use the `flag' field to indicate
2757	 choices that were invalid and iterate until we can't find a better
2758	 one that hasn't already been tried.  */
2759
2760      for (p = elt->first_same_value; p; p = p->next_same_value)
2761	p->flag = 0;
2762
2763      while (found_better)
2764	{
2765	  int best_addr_cost = CSE_ADDRESS_COST (*loc);
2766	  int best_rtx_cost = (COST (*loc) + 1) >> 1;
2767	  struct table_elt *best_elt = elt;
2768	  rtx best_rtx = *loc;
2769	  int count;
2770
2771	  /* This is at worst case an O(n^2) algorithm, so limit our search
2772	     to the first 32 elements on the list.  This avoids trouble
2773	     compiling code with very long basic blocks that can easily
2774	     call cse_gen_binary so many times that we run out of memory.  */
2775
2776	  found_better = 0;
2777	  for (p = elt->first_same_value, count = 0;
2778	       p && count < 32;
2779	       p = p->next_same_value, count++)
2780	    if (! p->flag
2781		&& (GET_CODE (p->exp) == REG
2782		    || exp_equiv_p (p->exp, p->exp, 1, 0)))
2783	      {
2784		rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
2785
2786		if ((CSE_ADDRESS_COST (new) < best_addr_cost
2787		    || (CSE_ADDRESS_COST (new) == best_addr_cost
2788			&& (COST (new) + 1) >> 1 > best_rtx_cost)))
2789		  {
2790		    found_better = 1;
2791		    best_addr_cost = CSE_ADDRESS_COST (new);
2792		    best_rtx_cost = (COST (new) + 1) >> 1;
2793		    best_elt = p;
2794		    best_rtx = new;
2795		  }
2796	      }
2797
2798	  if (found_better)
2799	    {
2800	      if (validate_change (insn, loc,
2801				   canon_reg (copy_rtx (best_rtx),
2802					      NULL_RTX), 0))
2803		return;
2804	      else
2805		best_elt->flag = 1;
2806	    }
2807	}
2808    }
2809#endif
2810}
2811
2812/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2813   operation (EQ, NE, GT, etc.), follow it back through the hash table and
2814   what values are being compared.
2815
2816   *PARG1 and *PARG2 are updated to contain the rtx representing the values
2817   actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
2818   was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2819   compared to produce cc0.
2820
2821   The return value is the comparison operator and is either the code of
2822   A or the code corresponding to the inverse of the comparison.  */
2823
2824static enum rtx_code
2825find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2826     enum rtx_code code;
2827     rtx *parg1, *parg2;
2828     enum machine_mode *pmode1, *pmode2;
2829{
2830  rtx arg1, arg2;
2831
2832  arg1 = *parg1, arg2 = *parg2;
2833
2834  /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
2835
2836  while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2837    {
2838      /* Set non-zero when we find something of interest.  */
2839      rtx x = 0;
2840      int reverse_code = 0;
2841      struct table_elt *p = 0;
2842
2843      /* If arg1 is a COMPARE, extract the comparison arguments from it.
2844	 On machines with CC0, this is the only case that can occur, since
2845	 fold_rtx will return the COMPARE or item being compared with zero
2846	 when given CC0.  */
2847
2848      if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2849	x = arg1;
2850
2851      /* If ARG1 is a comparison operator and CODE is testing for
2852	 STORE_FLAG_VALUE, get the inner arguments.  */
2853
2854      else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2855	{
2856	  if (code == NE
2857	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2858		  && code == LT && STORE_FLAG_VALUE == -1)
2859#ifdef FLOAT_STORE_FLAG_VALUE
2860	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2861		  && FLOAT_STORE_FLAG_VALUE < 0)
2862#endif
2863	      )
2864	    x = arg1;
2865	  else if (code == EQ
2866		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2867		       && code == GE && STORE_FLAG_VALUE == -1)
2868#ifdef FLOAT_STORE_FLAG_VALUE
2869		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2870		       && FLOAT_STORE_FLAG_VALUE < 0)
2871#endif
2872		   )
2873	    x = arg1, reverse_code = 1;
2874	}
2875
2876      /* ??? We could also check for
2877
2878	 (ne (and (eq (...) (const_int 1))) (const_int 0))
2879
2880	 and related forms, but let's wait until we see them occurring.  */
2881
2882      if (x == 0)
2883	/* Look up ARG1 in the hash table and see if it has an equivalence
2884	   that lets us see what is being compared.  */
2885	p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
2886		    GET_MODE (arg1));
2887      if (p) p = p->first_same_value;
2888
2889      for (; p; p = p->next_same_value)
2890	{
2891	  enum machine_mode inner_mode = GET_MODE (p->exp);
2892
2893	  /* If the entry isn't valid, skip it.  */
2894	  if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2895	    continue;
2896
2897	  if (GET_CODE (p->exp) == COMPARE
2898	      /* Another possibility is that this machine has a compare insn
2899		 that includes the comparison code.  In that case, ARG1 would
2900		 be equivalent to a comparison operation that would set ARG1 to
2901		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
2902		 ORIG_CODE is the actual comparison being done; if it is an EQ,
2903		 we must reverse ORIG_CODE.  On machine with a negative value
2904		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
2905	      || ((code == NE
2906		   || (code == LT
2907		       && GET_MODE_CLASS (inner_mode) == MODE_INT
2908		       && (GET_MODE_BITSIZE (inner_mode)
2909			   <= HOST_BITS_PER_WIDE_INT)
2910		       && (STORE_FLAG_VALUE
2911			   & ((HOST_WIDE_INT) 1
2912			      << (GET_MODE_BITSIZE (inner_mode) - 1))))
2913#ifdef FLOAT_STORE_FLAG_VALUE
2914		   || (code == LT
2915		       && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2916		       && FLOAT_STORE_FLAG_VALUE < 0)
2917#endif
2918		   )
2919		  && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2920	    {
2921	      x = p->exp;
2922	      break;
2923	    }
2924	  else if ((code == EQ
2925		    || (code == GE
2926			&& GET_MODE_CLASS (inner_mode) == MODE_INT
2927			&& (GET_MODE_BITSIZE (inner_mode)
2928			    <= HOST_BITS_PER_WIDE_INT)
2929			&& (STORE_FLAG_VALUE
2930			    & ((HOST_WIDE_INT) 1
2931			       << (GET_MODE_BITSIZE (inner_mode) - 1))))
2932#ifdef FLOAT_STORE_FLAG_VALUE
2933		    || (code == GE
2934			&& GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2935			&& FLOAT_STORE_FLAG_VALUE < 0)
2936#endif
2937		    )
2938		   && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2939	    {
2940	      reverse_code = 1;
2941	      x = p->exp;
2942	      break;
2943	    }
2944
2945	  /* If this is fp + constant, the equivalent is a better operand since
2946	     it may let us predict the value of the comparison.  */
2947	  else if (NONZERO_BASE_PLUS_P (p->exp))
2948	    {
2949	      arg1 = p->exp;
2950	      continue;
2951	    }
2952	}
2953
2954      /* If we didn't find a useful equivalence for ARG1, we are done.
2955	 Otherwise, set up for the next iteration.  */
2956      if (x == 0)
2957	break;
2958
2959      arg1 = XEXP (x, 0),  arg2 = XEXP (x, 1);
2960      if (GET_RTX_CLASS (GET_CODE (x)) == '<')
2961	code = GET_CODE (x);
2962
2963      if (reverse_code)
2964	code = reverse_condition (code);
2965    }
2966
2967  /* Return our results.  Return the modes from before fold_rtx
2968     because fold_rtx might produce const_int, and then it's too late.  */
2969  *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
2970  *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
2971
2972  return code;
2973}
2974
2975/* Try to simplify a unary operation CODE whose output mode is to be
2976   MODE with input operand OP whose mode was originally OP_MODE.
2977   Return zero if no simplification can be made.  */
2978
2979rtx
2980simplify_unary_operation (code, mode, op, op_mode)
2981     enum rtx_code code;
2982     enum machine_mode mode;
2983     rtx op;
2984     enum machine_mode op_mode;
2985{
2986  register int width = GET_MODE_BITSIZE (mode);
2987
2988  /* The order of these tests is critical so that, for example, we don't
2989     check the wrong mode (input vs. output) for a conversion operation,
2990     such as FIX.  At some point, this should be simplified.  */
2991
2992#if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
2993
2994  if (code == FLOAT && GET_MODE (op) == VOIDmode
2995      && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
2996    {
2997      HOST_WIDE_INT hv, lv;
2998      REAL_VALUE_TYPE d;
2999
3000      if (GET_CODE (op) == CONST_INT)
3001	lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3002      else
3003	lv = CONST_DOUBLE_LOW (op),  hv = CONST_DOUBLE_HIGH (op);
3004
3005#ifdef REAL_ARITHMETIC
3006      REAL_VALUE_FROM_INT (d, lv, hv, mode);
3007#else
3008      if (hv < 0)
3009	{
3010	  d = (double) (~ hv);
3011	  d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3012		* (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3013	  d += (double) (unsigned HOST_WIDE_INT) (~ lv);
3014	  d = (- d - 1.0);
3015	}
3016      else
3017	{
3018	  d = (double) hv;
3019	  d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3020		* (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3021	  d += (double) (unsigned HOST_WIDE_INT) lv;
3022	}
3023#endif  /* REAL_ARITHMETIC */
3024      d = real_value_truncate (mode, d);
3025      return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3026    }
3027  else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
3028	   && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3029    {
3030      HOST_WIDE_INT hv, lv;
3031      REAL_VALUE_TYPE d;
3032
3033      if (GET_CODE (op) == CONST_INT)
3034	lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3035      else
3036	lv = CONST_DOUBLE_LOW (op),  hv = CONST_DOUBLE_HIGH (op);
3037
3038      if (op_mode == VOIDmode)
3039	{
3040	  /* We don't know how to interpret negative-looking numbers in
3041	     this case, so don't try to fold those.  */
3042	  if (hv < 0)
3043	    return 0;
3044	}
3045      else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
3046	;
3047      else
3048	hv = 0, lv &= GET_MODE_MASK (op_mode);
3049
3050#ifdef REAL_ARITHMETIC
3051      REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
3052#else
3053
3054      d = (double) (unsigned HOST_WIDE_INT) hv;
3055      d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3056	    * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3057      d += (double) (unsigned HOST_WIDE_INT) lv;
3058#endif  /* REAL_ARITHMETIC */
3059      d = real_value_truncate (mode, d);
3060      return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3061    }
3062#endif
3063
3064  if (GET_CODE (op) == CONST_INT
3065      && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3066    {
3067      register HOST_WIDE_INT arg0 = INTVAL (op);
3068      register HOST_WIDE_INT val;
3069
3070      switch (code)
3071	{
3072	case NOT:
3073	  val = ~ arg0;
3074	  break;
3075
3076	case NEG:
3077	  val = - arg0;
3078	  break;
3079
3080	case ABS:
3081	  val = (arg0 >= 0 ? arg0 : - arg0);
3082	  break;
3083
3084	case FFS:
3085	  /* Don't use ffs here.  Instead, get low order bit and then its
3086	     number.  If arg0 is zero, this will return 0, as desired.  */
3087	  arg0 &= GET_MODE_MASK (mode);
3088	  val = exact_log2 (arg0 & (- arg0)) + 1;
3089	  break;
3090
3091	case TRUNCATE:
3092	  val = arg0;
3093	  break;
3094
3095	case ZERO_EXTEND:
3096	  if (op_mode == VOIDmode)
3097	    op_mode = mode;
3098	  if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3099	    {
3100	      /* If we were really extending the mode,
3101		 we would have to distinguish between zero-extension
3102		 and sign-extension.  */
3103	      if (width != GET_MODE_BITSIZE (op_mode))
3104		abort ();
3105	      val = arg0;
3106	    }
3107	  else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3108	    val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3109	  else
3110	    return 0;
3111	  break;
3112
3113	case SIGN_EXTEND:
3114	  if (op_mode == VOIDmode)
3115	    op_mode = mode;
3116	  if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3117	    {
3118	      /* If we were really extending the mode,
3119		 we would have to distinguish between zero-extension
3120		 and sign-extension.  */
3121	      if (width != GET_MODE_BITSIZE (op_mode))
3122		abort ();
3123	      val = arg0;
3124	    }
3125	  else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3126	    {
3127	      val
3128		= arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3129	      if (val
3130		  & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3131		val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3132	    }
3133	  else
3134	    return 0;
3135	  break;
3136
3137	case SQRT:
3138	  return 0;
3139
3140	default:
3141	  abort ();
3142	}
3143
3144      /* Clear the bits that don't belong in our mode,
3145	 unless they and our sign bit are all one.
3146	 So we get either a reasonable negative value or a reasonable
3147	 unsigned value for this mode.  */
3148      if (width < HOST_BITS_PER_WIDE_INT
3149	  && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3150	      != ((HOST_WIDE_INT) (-1) << (width - 1))))
3151	val &= ((HOST_WIDE_INT) 1 << width) - 1;
3152
3153      return GEN_INT (val);
3154    }
3155
3156  /* We can do some operations on integer CONST_DOUBLEs.  Also allow
3157     for a DImode operation on a CONST_INT.  */
3158  else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
3159	   && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3160    {
3161      HOST_WIDE_INT l1, h1, lv, hv;
3162
3163      if (GET_CODE (op) == CONST_DOUBLE)
3164	l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3165      else
3166	l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3167
3168      switch (code)
3169	{
3170	case NOT:
3171	  lv = ~ l1;
3172	  hv = ~ h1;
3173	  break;
3174
3175	case NEG:
3176	  neg_double (l1, h1, &lv, &hv);
3177	  break;
3178
3179	case ABS:
3180	  if (h1 < 0)
3181	    neg_double (l1, h1, &lv, &hv);
3182	  else
3183	    lv = l1, hv = h1;
3184	  break;
3185
3186	case FFS:
3187	  hv = 0;
3188	  if (l1 == 0)
3189	    lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3190	  else
3191	    lv = exact_log2 (l1 & (-l1)) + 1;
3192	  break;
3193
3194	case TRUNCATE:
3195	  /* This is just a change-of-mode, so do nothing.  */
3196	  lv = l1, hv = h1;
3197	  break;
3198
3199	case ZERO_EXTEND:
3200	  if (op_mode == VOIDmode
3201	      || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3202	    return 0;
3203
3204	  hv = 0;
3205	  lv = l1 & GET_MODE_MASK (op_mode);
3206	  break;
3207
3208	case SIGN_EXTEND:
3209	  if (op_mode == VOIDmode
3210	      || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3211	    return 0;
3212	  else
3213	    {
3214	      lv = l1 & GET_MODE_MASK (op_mode);
3215	      if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3216		  && (lv & ((HOST_WIDE_INT) 1
3217			    << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3218		lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3219
3220	      hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3221	    }
3222	  break;
3223
3224	case SQRT:
3225	  return 0;
3226
3227	default:
3228	  return 0;
3229	}
3230
3231      return immed_double_const (lv, hv, mode);
3232    }
3233
3234#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3235  else if (GET_CODE (op) == CONST_DOUBLE
3236	   && GET_MODE_CLASS (mode) == MODE_FLOAT)
3237    {
3238      REAL_VALUE_TYPE d;
3239      jmp_buf handler;
3240      rtx x;
3241
3242      if (setjmp (handler))
3243	/* There used to be a warning here, but that is inadvisable.
3244	   People may want to cause traps, and the natural way
3245	   to do it should not get a warning.  */
3246	return 0;
3247
3248      set_float_handler (handler);
3249
3250      REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3251
3252      switch (code)
3253	{
3254	case NEG:
3255	  d = REAL_VALUE_NEGATE (d);
3256	  break;
3257
3258	case ABS:
3259	  if (REAL_VALUE_NEGATIVE (d))
3260	    d = REAL_VALUE_NEGATE (d);
3261	  break;
3262
3263	case FLOAT_TRUNCATE:
3264	  d = real_value_truncate (mode, d);
3265	  break;
3266
3267	case FLOAT_EXTEND:
3268	  /* All this does is change the mode.  */
3269	  break;
3270
3271	case FIX:
3272	  d = REAL_VALUE_RNDZINT (d);
3273	  break;
3274
3275	case UNSIGNED_FIX:
3276	  d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3277	  break;
3278
3279	case SQRT:
3280	  return 0;
3281
3282	default:
3283	  abort ();
3284	}
3285
3286      x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3287      set_float_handler (NULL_PTR);
3288      return x;
3289    }
3290
3291  else if (GET_CODE (op) == CONST_DOUBLE
3292	   && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3293	   && GET_MODE_CLASS (mode) == MODE_INT
3294	   && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3295    {
3296      REAL_VALUE_TYPE d;
3297      jmp_buf handler;
3298      HOST_WIDE_INT val;
3299
3300      if (setjmp (handler))
3301	return 0;
3302
3303      set_float_handler (handler);
3304
3305      REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3306
3307      switch (code)
3308	{
3309	case FIX:
3310	  val = REAL_VALUE_FIX (d);
3311	  break;
3312
3313	case UNSIGNED_FIX:
3314	  val = REAL_VALUE_UNSIGNED_FIX (d);
3315	  break;
3316
3317	default:
3318	  abort ();
3319	}
3320
3321      set_float_handler (NULL_PTR);
3322
3323      /* Clear the bits that don't belong in our mode,
3324	 unless they and our sign bit are all one.
3325	 So we get either a reasonable negative value or a reasonable
3326	 unsigned value for this mode.  */
3327      if (width < HOST_BITS_PER_WIDE_INT
3328	  && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3329	      != ((HOST_WIDE_INT) (-1) << (width - 1))))
3330	val &= ((HOST_WIDE_INT) 1 << width) - 1;
3331
3332      /* If this would be an entire word for the target, but is not for
3333	 the host, then sign-extend on the host so that the number will look
3334	 the same way on the host that it would on the target.
3335
3336	 For example, when building a 64 bit alpha hosted 32 bit sparc
3337	 targeted compiler, then we want the 32 bit unsigned value -1 to be
3338	 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3339	 The later confuses the sparc backend.  */
3340
3341      if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3342	  && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3343	val |= ((HOST_WIDE_INT) (-1) << width);
3344
3345      return GEN_INT (val);
3346    }
3347#endif
3348  /* This was formerly used only for non-IEEE float.
3349     eggert@twinsun.com says it is safe for IEEE also.  */
3350  else
3351    {
3352      /* There are some simplifications we can do even if the operands
3353	 aren't constant.  */
3354      switch (code)
3355	{
3356	case NEG:
3357	case NOT:
3358	  /* (not (not X)) == X, similarly for NEG.  */
3359	  if (GET_CODE (op) == code)
3360	    return XEXP (op, 0);
3361	  break;
3362
3363	case SIGN_EXTEND:
3364	  /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3365	     becomes just the MINUS if its mode is MODE.  This allows
3366	     folding switch statements on machines using casesi (such as
3367	     the Vax).  */
3368	  if (GET_CODE (op) == TRUNCATE
3369	      && GET_MODE (XEXP (op, 0)) == mode
3370	      && GET_CODE (XEXP (op, 0)) == MINUS
3371	      && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3372	      && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3373	    return XEXP (op, 0);
3374
3375#ifdef POINTERS_EXTEND_UNSIGNED
3376	  if (! POINTERS_EXTEND_UNSIGNED
3377	      && mode == Pmode && GET_MODE (op) == ptr_mode
3378	      && CONSTANT_P (op))
3379	    return convert_memory_address (Pmode, op);
3380#endif
3381	  break;
3382
3383#ifdef POINTERS_EXTEND_UNSIGNED
3384	case ZERO_EXTEND:
3385	  if (POINTERS_EXTEND_UNSIGNED
3386	      && mode == Pmode && GET_MODE (op) == ptr_mode
3387	      && CONSTANT_P (op))
3388	    return convert_memory_address (Pmode, op);
3389	  break;
3390#endif
3391
3392	default:
3393	  break;
3394	}
3395
3396      return 0;
3397    }
3398}
3399
3400/* Simplify a binary operation CODE with result mode MODE, operating on OP0
3401   and OP1.  Return 0 if no simplification is possible.
3402
3403   Don't use this for relational operations such as EQ or LT.
3404   Use simplify_relational_operation instead.  */
3405
3406rtx
3407simplify_binary_operation (code, mode, op0, op1)
3408     enum rtx_code code;
3409     enum machine_mode mode;
3410     rtx op0, op1;
3411{
3412  register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3413  HOST_WIDE_INT val;
3414  int width = GET_MODE_BITSIZE (mode);
3415  rtx tem;
3416
3417  /* Relational operations don't work here.  We must know the mode
3418     of the operands in order to do the comparison correctly.
3419     Assuming a full word can give incorrect results.
3420     Consider comparing 128 with -128 in QImode.  */
3421
3422  if (GET_RTX_CLASS (code) == '<')
3423    abort ();
3424
3425#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3426  if (GET_MODE_CLASS (mode) == MODE_FLOAT
3427      && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3428      && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3429    {
3430      REAL_VALUE_TYPE f0, f1, value;
3431      jmp_buf handler;
3432
3433      if (setjmp (handler))
3434	return 0;
3435
3436      set_float_handler (handler);
3437
3438      REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3439      REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3440      f0 = real_value_truncate (mode, f0);
3441      f1 = real_value_truncate (mode, f1);
3442
3443#ifdef REAL_ARITHMETIC
3444#ifndef REAL_INFINITY
3445      if (code == DIV && REAL_VALUES_EQUAL (f1, dconst0))
3446	return 0;
3447#endif
3448      REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3449#else
3450      switch (code)
3451	{
3452	case PLUS:
3453	  value = f0 + f1;
3454	  break;
3455	case MINUS:
3456	  value = f0 - f1;
3457	  break;
3458	case MULT:
3459	  value = f0 * f1;
3460	  break;
3461	case DIV:
3462#ifndef REAL_INFINITY
3463	  if (f1 == 0)
3464	    return 0;
3465#endif
3466	  value = f0 / f1;
3467	  break;
3468	case SMIN:
3469	  value = MIN (f0, f1);
3470	  break;
3471	case SMAX:
3472	  value = MAX (f0, f1);
3473	  break;
3474	default:
3475	  abort ();
3476	}
3477#endif
3478
3479      value = real_value_truncate (mode, value);
3480      set_float_handler (NULL_PTR);
3481      return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
3482    }
3483#endif  /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3484
3485  /* We can fold some multi-word operations.  */
3486  if (GET_MODE_CLASS (mode) == MODE_INT
3487      && width == HOST_BITS_PER_WIDE_INT * 2
3488      && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
3489      && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3490    {
3491      HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3492
3493      if (GET_CODE (op0) == CONST_DOUBLE)
3494	l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3495      else
3496	l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
3497
3498      if (GET_CODE (op1) == CONST_DOUBLE)
3499	l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3500      else
3501	l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3502
3503      switch (code)
3504	{
3505	case MINUS:
3506	  /* A - B == A + (-B).  */
3507	  neg_double (l2, h2, &lv, &hv);
3508	  l2 = lv, h2 = hv;
3509
3510	  /* .. fall through ...  */
3511
3512	case PLUS:
3513	  add_double (l1, h1, l2, h2, &lv, &hv);
3514	  break;
3515
3516	case MULT:
3517	  mul_double (l1, h1, l2, h2, &lv, &hv);
3518	  break;
3519
3520	case DIV:  case MOD:   case UDIV:  case UMOD:
3521	  /* We'd need to include tree.h to do this and it doesn't seem worth
3522	     it.  */
3523	  return 0;
3524
3525	case AND:
3526	  lv = l1 & l2, hv = h1 & h2;
3527	  break;
3528
3529	case IOR:
3530	  lv = l1 | l2, hv = h1 | h2;
3531	  break;
3532
3533	case XOR:
3534	  lv = l1 ^ l2, hv = h1 ^ h2;
3535	  break;
3536
3537	case SMIN:
3538	  if (h1 < h2
3539	      || (h1 == h2
3540		  && ((unsigned HOST_WIDE_INT) l1
3541		      < (unsigned HOST_WIDE_INT) l2)))
3542	    lv = l1, hv = h1;
3543	  else
3544	    lv = l2, hv = h2;
3545	  break;
3546
3547	case SMAX:
3548	  if (h1 > h2
3549	      || (h1 == h2
3550		  && ((unsigned HOST_WIDE_INT) l1
3551		      > (unsigned HOST_WIDE_INT) l2)))
3552	    lv = l1, hv = h1;
3553	  else
3554	    lv = l2, hv = h2;
3555	  break;
3556
3557	case UMIN:
3558	  if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3559	      || (h1 == h2
3560		  && ((unsigned HOST_WIDE_INT) l1
3561		      < (unsigned HOST_WIDE_INT) l2)))
3562	    lv = l1, hv = h1;
3563	  else
3564	    lv = l2, hv = h2;
3565	  break;
3566
3567	case UMAX:
3568	  if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3569	      || (h1 == h2
3570		  && ((unsigned HOST_WIDE_INT) l1
3571		      > (unsigned HOST_WIDE_INT) l2)))
3572	    lv = l1, hv = h1;
3573	  else
3574	    lv = l2, hv = h2;
3575	  break;
3576
3577	case LSHIFTRT:   case ASHIFTRT:
3578	case ASHIFT:
3579	case ROTATE:     case ROTATERT:
3580#ifdef SHIFT_COUNT_TRUNCATED
3581	  if (SHIFT_COUNT_TRUNCATED)
3582	    l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3583#endif
3584
3585	  if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3586	    return 0;
3587
3588	  if (code == LSHIFTRT || code == ASHIFTRT)
3589	    rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3590			   code == ASHIFTRT);
3591	  else if (code == ASHIFT)
3592	    lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3593	  else if (code == ROTATE)
3594	    lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3595	  else /* code == ROTATERT */
3596	    rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3597	  break;
3598
3599	default:
3600	  return 0;
3601	}
3602
3603      return immed_double_const (lv, hv, mode);
3604    }
3605
3606  if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3607      || width > HOST_BITS_PER_WIDE_INT || width == 0)
3608    {
3609      /* Even if we can't compute a constant result,
3610	 there are some cases worth simplifying.  */
3611
3612      switch (code)
3613	{
3614	case PLUS:
3615	  /* In IEEE floating point, x+0 is not the same as x.  Similarly
3616	     for the other optimizations below.  */
3617	  if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3618	      && FLOAT_MODE_P (mode) && ! flag_fast_math)
3619	    break;
3620
3621	  if (op1 == CONST0_RTX (mode))
3622	    return op0;
3623
3624	  /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3625	  if (GET_CODE (op0) == NEG)
3626	    return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3627	  else if (GET_CODE (op1) == NEG)
3628	    return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3629
3630	  /* Handle both-operands-constant cases.  We can only add
3631	     CONST_INTs to constants since the sum of relocatable symbols
3632	     can't be handled by most assemblers.  Don't add CONST_INT
3633	     to CONST_INT since overflow won't be computed properly if wider
3634	     than HOST_BITS_PER_WIDE_INT.  */
3635
3636	  if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3637	      && GET_CODE (op1) == CONST_INT)
3638	    return plus_constant (op0, INTVAL (op1));
3639	  else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3640		   && GET_CODE (op0) == CONST_INT)
3641	    return plus_constant (op1, INTVAL (op0));
3642
3643	  /* See if this is something like X * C - X or vice versa or
3644	     if the multiplication is written as a shift.  If so, we can
3645	     distribute and make a new multiply, shift, or maybe just
3646	     have X (if C is 2 in the example above).  But don't make
3647	     real multiply if we didn't have one before.  */
3648
3649	  if (! FLOAT_MODE_P (mode))
3650	    {
3651	      HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3652	      rtx lhs = op0, rhs = op1;
3653	      int had_mult = 0;
3654
3655	      if (GET_CODE (lhs) == NEG)
3656		coeff0 = -1, lhs = XEXP (lhs, 0);
3657	      else if (GET_CODE (lhs) == MULT
3658		       && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3659		{
3660		  coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3661		  had_mult = 1;
3662		}
3663	      else if (GET_CODE (lhs) == ASHIFT
3664		       && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3665		       && INTVAL (XEXP (lhs, 1)) >= 0
3666		       && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3667		{
3668		  coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3669		  lhs = XEXP (lhs, 0);
3670		}
3671
3672	      if (GET_CODE (rhs) == NEG)
3673		coeff1 = -1, rhs = XEXP (rhs, 0);
3674	      else if (GET_CODE (rhs) == MULT
3675		       && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3676		{
3677		  coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3678		  had_mult = 1;
3679		}
3680	      else if (GET_CODE (rhs) == ASHIFT
3681		       && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3682		       && INTVAL (XEXP (rhs, 1)) >= 0
3683		       && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3684		{
3685		  coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3686		  rhs = XEXP (rhs, 0);
3687		}
3688
3689	      if (rtx_equal_p (lhs, rhs))
3690		{
3691		  tem = cse_gen_binary (MULT, mode, lhs,
3692					GEN_INT (coeff0 + coeff1));
3693		  return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3694		}
3695	    }
3696
3697	  /* If one of the operands is a PLUS or a MINUS, see if we can
3698	     simplify this by the associative law.
3699	     Don't use the associative law for floating point.
3700	     The inaccuracy makes it nonassociative,
3701	     and subtle programs can break if operations are associated.  */
3702
3703	  if (INTEGRAL_MODE_P (mode)
3704	      && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3705		  || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3706	      && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3707	    return tem;
3708	  break;
3709
3710	case COMPARE:
3711#ifdef HAVE_cc0
3712	  /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3713	     using cc0, in which case we want to leave it as a COMPARE
3714	     so we can distinguish it from a register-register-copy.
3715
3716	     In IEEE floating point, x-0 is not the same as x.  */
3717
3718	  if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3719	       || ! FLOAT_MODE_P (mode) || flag_fast_math)
3720	      && op1 == CONST0_RTX (mode))
3721	    return op0;
3722#else
3723	  /* Do nothing here.  */
3724#endif
3725	  break;
3726
3727	case MINUS:
3728	  /* None of these optimizations can be done for IEEE
3729	     floating point.  */
3730	  if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3731	      && FLOAT_MODE_P (mode) && ! flag_fast_math)
3732	    break;
3733
3734	  /* We can't assume x-x is 0 even with non-IEEE floating point,
3735	     but since it is zero except in very strange circumstances, we
3736	     will treat it as zero with -ffast-math.  */
3737	  if (rtx_equal_p (op0, op1)
3738	      && ! side_effects_p (op0)
3739	      && (! FLOAT_MODE_P (mode) || flag_fast_math))
3740	    return CONST0_RTX (mode);
3741
3742	  /* Change subtraction from zero into negation.  */
3743	  if (op0 == CONST0_RTX (mode))
3744	    return gen_rtx_NEG (mode, op1);
3745
3746	  /* (-1 - a) is ~a.  */
3747	  if (op0 == constm1_rtx)
3748	    return gen_rtx_NOT (mode, op1);
3749
3750	  /* Subtracting 0 has no effect.  */
3751	  if (op1 == CONST0_RTX (mode))
3752	    return op0;
3753
3754	  /* See if this is something like X * C - X or vice versa or
3755	     if the multiplication is written as a shift.  If so, we can
3756	     distribute and make a new multiply, shift, or maybe just
3757	     have X (if C is 2 in the example above).  But don't make
3758	     real multiply if we didn't have one before.  */
3759
3760	  if (! FLOAT_MODE_P (mode))
3761	    {
3762	      HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3763	      rtx lhs = op0, rhs = op1;
3764	      int had_mult = 0;
3765
3766	      if (GET_CODE (lhs) == NEG)
3767		coeff0 = -1, lhs = XEXP (lhs, 0);
3768	      else if (GET_CODE (lhs) == MULT
3769		       && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3770		{
3771		  coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3772		  had_mult = 1;
3773		}
3774	      else if (GET_CODE (lhs) == ASHIFT
3775		       && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3776		       && INTVAL (XEXP (lhs, 1)) >= 0
3777		       && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3778		{
3779		  coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3780		  lhs = XEXP (lhs, 0);
3781		}
3782
3783	      if (GET_CODE (rhs) == NEG)
3784		coeff1 = - 1, rhs = XEXP (rhs, 0);
3785	      else if (GET_CODE (rhs) == MULT
3786		       && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3787		{
3788		  coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3789		  had_mult = 1;
3790		}
3791	      else if (GET_CODE (rhs) == ASHIFT
3792		       && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3793		       && INTVAL (XEXP (rhs, 1)) >= 0
3794		       && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3795		{
3796		  coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3797		  rhs = XEXP (rhs, 0);
3798		}
3799
3800	      if (rtx_equal_p (lhs, rhs))
3801		{
3802		  tem = cse_gen_binary (MULT, mode, lhs,
3803					GEN_INT (coeff0 - coeff1));
3804		  return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3805		}
3806	    }
3807
3808	  /* (a - (-b)) -> (a + b).  */
3809	  if (GET_CODE (op1) == NEG)
3810	    return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
3811
3812	  /* If one of the operands is a PLUS or a MINUS, see if we can
3813	     simplify this by the associative law.
3814	     Don't use the associative law for floating point.
3815	     The inaccuracy makes it nonassociative,
3816	     and subtle programs can break if operations are associated.  */
3817
3818	  if (INTEGRAL_MODE_P (mode)
3819	      && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3820		  || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3821	      && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3822	    return tem;
3823
3824	  /* Don't let a relocatable value get a negative coeff.  */
3825	  if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
3826	    return plus_constant (op0, - INTVAL (op1));
3827
3828	  /* (x - (x & y)) -> (x & ~y) */
3829	  if (GET_CODE (op1) == AND)
3830	    {
3831	     if (rtx_equal_p (op0, XEXP (op1, 0)))
3832	       return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 1)));
3833	     if (rtx_equal_p (op0, XEXP (op1, 1)))
3834	       return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 0)));
3835	   }
3836	  break;
3837
3838	case MULT:
3839	  if (op1 == constm1_rtx)
3840	    {
3841	      tem = simplify_unary_operation (NEG, mode, op0, mode);
3842
3843	      return tem ? tem : gen_rtx_NEG (mode, op0);
3844	    }
3845
3846	  /* In IEEE floating point, x*0 is not always 0.  */
3847	  if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3848	       || ! FLOAT_MODE_P (mode) || flag_fast_math)
3849	      && op1 == CONST0_RTX (mode)
3850	      && ! side_effects_p (op0))
3851	    return op1;
3852
3853	  /* In IEEE floating point, x*1 is not equivalent to x for nans.
3854	     However, ANSI says we can drop signals,
3855	     so we can do this anyway.  */
3856	  if (op1 == CONST1_RTX (mode))
3857	    return op0;
3858
3859	  /* Convert multiply by constant power of two into shift unless
3860	     we are still generating RTL.  This test is a kludge.  */
3861	  if (GET_CODE (op1) == CONST_INT
3862	      && (val = exact_log2 (INTVAL (op1))) >= 0
3863	      /* If the mode is larger than the host word size, and the
3864		 uppermost bit is set, then this isn't a power of two due
3865		 to implicit sign extension.  */
3866	      && (width <= HOST_BITS_PER_WIDE_INT
3867		  || val != HOST_BITS_PER_WIDE_INT - 1)
3868	      && ! rtx_equal_function_value_matters)
3869	    return gen_rtx_ASHIFT (mode, op0, GEN_INT (val));
3870
3871	  if (GET_CODE (op1) == CONST_DOUBLE
3872	      && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
3873	    {
3874	      REAL_VALUE_TYPE d;
3875	      jmp_buf handler;
3876	      int op1is2, op1ism1;
3877
3878	      if (setjmp (handler))
3879		return 0;
3880
3881	      set_float_handler (handler);
3882	      REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3883	      op1is2 = REAL_VALUES_EQUAL (d, dconst2);
3884	      op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
3885	      set_float_handler (NULL_PTR);
3886
3887	      /* x*2 is x+x and x*(-1) is -x */
3888	      if (op1is2 && GET_MODE (op0) == mode)
3889		return gen_rtx_PLUS (mode, op0, copy_rtx (op0));
3890
3891	      else if (op1ism1 && GET_MODE (op0) == mode)
3892		return gen_rtx_NEG (mode, op0);
3893	    }
3894	  break;
3895
3896	case IOR:
3897	  if (op1 == const0_rtx)
3898	    return op0;
3899	  if (GET_CODE (op1) == CONST_INT
3900	      && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3901	    return op1;
3902	  if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
3903	    return op0;
3904	  /* A | (~A) -> -1 */
3905	  if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3906	       || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3907	      && ! side_effects_p (op0)
3908	      && GET_MODE_CLASS (mode) != MODE_CC)
3909	    return constm1_rtx;
3910	  break;
3911
3912	case XOR:
3913	  if (op1 == const0_rtx)
3914	    return op0;
3915	  if (GET_CODE (op1) == CONST_INT
3916	      && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3917	    return gen_rtx_NOT (mode, op0);
3918	  if (op0 == op1 && ! side_effects_p (op0)
3919	      && GET_MODE_CLASS (mode) != MODE_CC)
3920	    return const0_rtx;
3921	  break;
3922
3923	case AND:
3924	  if (op1 == const0_rtx && ! side_effects_p (op0))
3925	    return const0_rtx;
3926	  if (GET_CODE (op1) == CONST_INT
3927	      && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
3928	    return op0;
3929	  if (op0 == op1 && ! side_effects_p (op0)
3930	      && GET_MODE_CLASS (mode) != MODE_CC)
3931	    return op0;
3932	  /* A & (~A) -> 0 */
3933	  if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
3934	       || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
3935	      && ! side_effects_p (op0)
3936	      && GET_MODE_CLASS (mode) != MODE_CC)
3937	    return const0_rtx;
3938	  break;
3939
3940	case UDIV:
3941	  /* Convert divide by power of two into shift (divide by 1 handled
3942	     below).  */
3943	  if (GET_CODE (op1) == CONST_INT
3944	      && (arg1 = exact_log2 (INTVAL (op1))) > 0)
3945	    return gen_rtx_LSHIFTRT (mode, op0, GEN_INT (arg1));
3946
3947	  /* ... fall through ...  */
3948
3949	case DIV:
3950	  if (op1 == CONST1_RTX (mode))
3951	    return op0;
3952
3953	  /* In IEEE floating point, 0/x is not always 0.  */
3954	  if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3955	       || ! FLOAT_MODE_P (mode) || flag_fast_math)
3956	      && op0 == CONST0_RTX (mode)
3957	      && ! side_effects_p (op1))
3958	    return op0;
3959
3960#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3961	  /* Change division by a constant into multiplication.  Only do
3962	     this with -ffast-math until an expert says it is safe in
3963	     general.  */
3964	  else if (GET_CODE (op1) == CONST_DOUBLE
3965		   && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
3966		   && op1 != CONST0_RTX (mode)
3967		   && flag_fast_math)
3968	    {
3969	      REAL_VALUE_TYPE d;
3970	      REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
3971
3972	      if (! REAL_VALUES_EQUAL (d, dconst0))
3973		{
3974#if defined (REAL_ARITHMETIC)
3975		  REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
3976		  return gen_rtx_MULT (mode, op0,
3977				       CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
3978#else
3979		  return gen_rtx_MULT (mode, op0,
3980				       CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
3981#endif
3982		}
3983	    }
3984#endif
3985	  break;
3986
3987	case UMOD:
3988	  /* Handle modulus by power of two (mod with 1 handled below).  */
3989	  if (GET_CODE (op1) == CONST_INT
3990	      && exact_log2 (INTVAL (op1)) > 0)
3991	    return gen_rtx_AND (mode, op0, GEN_INT (INTVAL (op1) - 1));
3992
3993	  /* ... fall through ...  */
3994
3995	case MOD:
3996	  if ((op0 == const0_rtx || op1 == const1_rtx)
3997	      && ! side_effects_p (op0) && ! side_effects_p (op1))
3998	    return const0_rtx;
3999	  break;
4000
4001	case ROTATERT:
4002	case ROTATE:
4003	  /* Rotating ~0 always results in ~0.  */
4004	  if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
4005	      && INTVAL (op0) == GET_MODE_MASK (mode)
4006	      && ! side_effects_p (op1))
4007	    return op0;
4008
4009	  /* ... fall through ...  */
4010
4011	case ASHIFT:
4012	case ASHIFTRT:
4013	case LSHIFTRT:
4014	  if (op1 == const0_rtx)
4015	    return op0;
4016	  if (op0 == const0_rtx && ! side_effects_p (op1))
4017	    return op0;
4018	  break;
4019
4020	case SMIN:
4021	  if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4022	      && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
4023	      && ! side_effects_p (op0))
4024	    return op1;
4025	  else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4026	    return op0;
4027	  break;
4028
4029	case SMAX:
4030	  if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4031	      && (INTVAL (op1)
4032		  == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
4033	      && ! side_effects_p (op0))
4034	    return op1;
4035	  else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4036	    return op0;
4037	  break;
4038
4039	case UMIN:
4040	  if (op1 == const0_rtx && ! side_effects_p (op0))
4041	    return op1;
4042	  else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4043	    return op0;
4044	  break;
4045
4046	case UMAX:
4047	  if (op1 == constm1_rtx && ! side_effects_p (op0))
4048	    return op1;
4049	  else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4050	    return op0;
4051	  break;
4052
4053	default:
4054	  abort ();
4055	}
4056
4057      return 0;
4058    }
4059
4060  /* Get the integer argument values in two forms:
4061     zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S.  */
4062
4063  arg0 = INTVAL (op0);
4064  arg1 = INTVAL (op1);
4065
4066  if (width < HOST_BITS_PER_WIDE_INT)
4067    {
4068      arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4069      arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
4070
4071      arg0s = arg0;
4072      if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4073	arg0s |= ((HOST_WIDE_INT) (-1) << width);
4074
4075      arg1s = arg1;
4076      if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4077	arg1s |= ((HOST_WIDE_INT) (-1) << width);
4078    }
4079  else
4080    {
4081      arg0s = arg0;
4082      arg1s = arg1;
4083    }
4084
4085  /* Compute the value of the arithmetic.  */
4086
4087  switch (code)
4088    {
4089    case PLUS:
4090      val = arg0s + arg1s;
4091      break;
4092
4093    case MINUS:
4094      val = arg0s - arg1s;
4095      break;
4096
4097    case MULT:
4098      val = arg0s * arg1s;
4099      break;
4100
4101    case DIV:
4102      if (arg1s == 0)
4103	return 0;
4104      val = arg0s / arg1s;
4105      break;
4106
4107    case MOD:
4108      if (arg1s == 0)
4109	return 0;
4110      val = arg0s % arg1s;
4111      break;
4112
4113    case UDIV:
4114      if (arg1 == 0)
4115	return 0;
4116      val = (unsigned HOST_WIDE_INT) arg0 / arg1;
4117      break;
4118
4119    case UMOD:
4120      if (arg1 == 0)
4121	return 0;
4122      val = (unsigned HOST_WIDE_INT) arg0 % arg1;
4123      break;
4124
4125    case AND:
4126      val = arg0 & arg1;
4127      break;
4128
4129    case IOR:
4130      val = arg0 | arg1;
4131      break;
4132
4133    case XOR:
4134      val = arg0 ^ arg1;
4135      break;
4136
4137    case LSHIFTRT:
4138      /* If shift count is undefined, don't fold it; let the machine do
4139	 what it wants.  But truncate it if the machine will do that.  */
4140      if (arg1 < 0)
4141	return 0;
4142
4143#ifdef SHIFT_COUNT_TRUNCATED
4144      if (SHIFT_COUNT_TRUNCATED)
4145	arg1 %= width;
4146#endif
4147
4148      val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
4149      break;
4150
4151    case ASHIFT:
4152      if (arg1 < 0)
4153	return 0;
4154
4155#ifdef SHIFT_COUNT_TRUNCATED
4156      if (SHIFT_COUNT_TRUNCATED)
4157	arg1 %= width;
4158#endif
4159
4160      val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
4161      break;
4162
4163    case ASHIFTRT:
4164      if (arg1 < 0)
4165	return 0;
4166
4167#ifdef SHIFT_COUNT_TRUNCATED
4168      if (SHIFT_COUNT_TRUNCATED)
4169	arg1 %= width;
4170#endif
4171
4172      val = arg0s >> arg1;
4173
4174      /* Bootstrap compiler may not have sign extended the right shift.
4175	 Manually extend the sign to insure bootstrap cc matches gcc.  */
4176      if (arg0s < 0 && arg1 > 0)
4177	val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4178
4179      break;
4180
4181    case ROTATERT:
4182      if (arg1 < 0)
4183	return 0;
4184
4185      arg1 %= width;
4186      val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4187	     | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
4188      break;
4189
4190    case ROTATE:
4191      if (arg1 < 0)
4192	return 0;
4193
4194      arg1 %= width;
4195      val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4196	     | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
4197      break;
4198
4199    case COMPARE:
4200      /* Do nothing here.  */
4201      return 0;
4202
4203    case SMIN:
4204      val = arg0s <= arg1s ? arg0s : arg1s;
4205      break;
4206
4207    case UMIN:
4208      val = ((unsigned HOST_WIDE_INT) arg0
4209	     <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4210      break;
4211
4212    case SMAX:
4213      val = arg0s > arg1s ? arg0s : arg1s;
4214      break;
4215
4216    case UMAX:
4217      val = ((unsigned HOST_WIDE_INT) arg0
4218	     > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4219      break;
4220
4221    default:
4222      abort ();
4223    }
4224
4225  /* Clear the bits that don't belong in our mode, unless they and our sign
4226     bit are all one.  So we get either a reasonable negative value or a
4227     reasonable unsigned value for this mode.  */
4228  if (width < HOST_BITS_PER_WIDE_INT
4229      && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4230	  != ((HOST_WIDE_INT) (-1) << (width - 1))))
4231    val &= ((HOST_WIDE_INT) 1 << width) - 1;
4232
4233  /* If this would be an entire word for the target, but is not for
4234     the host, then sign-extend on the host so that the number will look
4235     the same way on the host that it would on the target.
4236
4237     For example, when building a 64 bit alpha hosted 32 bit sparc
4238     targeted compiler, then we want the 32 bit unsigned value -1 to be
4239     represented as a 64 bit value -1, and not as 0x00000000ffffffff.
4240     The later confuses the sparc backend.  */
4241
4242  if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
4243      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
4244    val |= ((HOST_WIDE_INT) (-1) << width);
4245
4246  return GEN_INT (val);
4247}
4248
4249/* Simplify a PLUS or MINUS, at least one of whose operands may be another
4250   PLUS or MINUS.
4251
4252   Rather than test for specific case, we do this by a brute-force method
4253   and do all possible simplifications until no more changes occur.  Then
4254   we rebuild the operation.  */
4255
4256static rtx
4257simplify_plus_minus (code, mode, op0, op1)
4258     enum rtx_code code;
4259     enum machine_mode mode;
4260     rtx op0, op1;
4261{
4262  rtx ops[8];
4263  int negs[8];
4264  rtx result, tem;
4265  int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
4266  int first = 1, negate = 0, changed;
4267  int i, j;
4268
4269  bzero ((char *) ops, sizeof ops);
4270
4271  /* Set up the two operands and then expand them until nothing has been
4272     changed.  If we run out of room in our array, give up; this should
4273     almost never happen.  */
4274
4275  ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4276
4277  changed = 1;
4278  while (changed)
4279    {
4280      changed = 0;
4281
4282      for (i = 0; i < n_ops; i++)
4283	switch (GET_CODE (ops[i]))
4284	  {
4285	  case PLUS:
4286	  case MINUS:
4287	    if (n_ops == 7)
4288	      return 0;
4289
4290	    ops[n_ops] = XEXP (ops[i], 1);
4291	    negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4292	    ops[i] = XEXP (ops[i], 0);
4293	    input_ops++;
4294	    changed = 1;
4295	    break;
4296
4297	  case NEG:
4298	    ops[i] = XEXP (ops[i], 0);
4299	    negs[i] = ! negs[i];
4300	    changed = 1;
4301	    break;
4302
4303	  case CONST:
4304	    ops[i] = XEXP (ops[i], 0);
4305	    input_consts++;
4306	    changed = 1;
4307	    break;
4308
4309	  case NOT:
4310	    /* ~a -> (-a - 1) */
4311	    if (n_ops != 7)
4312	      {
4313		ops[n_ops] = constm1_rtx;
4314		negs[n_ops++] = negs[i];
4315		ops[i] = XEXP (ops[i], 0);
4316		negs[i] = ! negs[i];
4317		changed = 1;
4318	      }
4319	    break;
4320
4321	  case CONST_INT:
4322	    if (negs[i])
4323	      ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4324	    break;
4325
4326	  default:
4327	    break;
4328	  }
4329    }
4330
4331  /* If we only have two operands, we can't do anything.  */
4332  if (n_ops <= 2)
4333    return 0;
4334
4335  /* Now simplify each pair of operands until nothing changes.  The first
4336     time through just simplify constants against each other.  */
4337
4338  changed = 1;
4339  while (changed)
4340    {
4341      changed = first;
4342
4343      for (i = 0; i < n_ops - 1; i++)
4344	for (j = i + 1; j < n_ops; j++)
4345	  if (ops[i] != 0 && ops[j] != 0
4346	      && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4347	    {
4348	      rtx lhs = ops[i], rhs = ops[j];
4349	      enum rtx_code ncode = PLUS;
4350
4351	      if (negs[i] && ! negs[j])
4352		lhs = ops[j], rhs = ops[i], ncode = MINUS;
4353	      else if (! negs[i] && negs[j])
4354		ncode = MINUS;
4355
4356	      tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4357	      if (tem)
4358		{
4359		  ops[i] = tem, ops[j] = 0;
4360		  negs[i] = negs[i] && negs[j];
4361		  if (GET_CODE (tem) == NEG)
4362		    ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4363
4364		  if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4365		    ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4366		  changed = 1;
4367		}
4368	    }
4369
4370      first = 0;
4371    }
4372
4373  /* Pack all the operands to the lower-numbered entries and give up if
4374     we didn't reduce the number of operands we had.  Make sure we
4375     count a CONST as two operands.  If we have the same number of
4376     operands, but have made more CONSTs than we had, this is also
4377     an improvement, so accept it.  */
4378
4379  for (i = 0, j = 0; j < n_ops; j++)
4380    if (ops[j] != 0)
4381      {
4382	ops[i] = ops[j], negs[i++] = negs[j];
4383	if (GET_CODE (ops[j]) == CONST)
4384	  n_consts++;
4385      }
4386
4387  if (i + n_consts > input_ops
4388      || (i + n_consts == input_ops && n_consts <= input_consts))
4389    return 0;
4390
4391  n_ops = i;
4392
4393  /* If we have a CONST_INT, put it last.  */
4394  for (i = 0; i < n_ops - 1; i++)
4395    if (GET_CODE (ops[i]) == CONST_INT)
4396      {
4397	tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4398	j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4399      }
4400
4401  /* Put a non-negated operand first.  If there aren't any, make all
4402     operands positive and negate the whole thing later.  */
4403  for (i = 0; i < n_ops && negs[i]; i++)
4404    ;
4405
4406  if (i == n_ops)
4407    {
4408      for (i = 0; i < n_ops; i++)
4409	negs[i] = 0;
4410      negate = 1;
4411    }
4412  else if (i != 0)
4413    {
4414      tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4415      j = negs[0], negs[0] = negs[i], negs[i] = j;
4416    }
4417
4418  /* Now make the result by performing the requested operations.  */
4419  result = ops[0];
4420  for (i = 1; i < n_ops; i++)
4421    result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4422
4423  return negate ? gen_rtx_NEG (mode, result) : result;
4424}
4425
4426/* Make a binary operation by properly ordering the operands and
4427   seeing if the expression folds.  */
4428
4429static rtx
4430cse_gen_binary (code, mode, op0, op1)
4431     enum rtx_code code;
4432     enum machine_mode mode;
4433     rtx op0, op1;
4434{
4435  rtx tem;
4436
4437  /* Put complex operands first and constants second if commutative.  */
4438  if (GET_RTX_CLASS (code) == 'c'
4439      && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4440	  || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4441	      && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4442	  || (GET_CODE (op0) == SUBREG
4443	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4444	      && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4445    tem = op0, op0 = op1, op1 = tem;
4446
4447  /* If this simplifies, do it.  */
4448  tem = simplify_binary_operation (code, mode, op0, op1);
4449
4450  if (tem)
4451    return tem;
4452
4453  /* Handle addition and subtraction of CONST_INT specially.  Otherwise,
4454     just form the operation.  */
4455
4456  if (code == PLUS && GET_CODE (op1) == CONST_INT
4457      && GET_MODE (op0) != VOIDmode)
4458    return plus_constant (op0, INTVAL (op1));
4459  else if (code == MINUS && GET_CODE (op1) == CONST_INT
4460	   && GET_MODE (op0) != VOIDmode)
4461    return plus_constant (op0, - INTVAL (op1));
4462  else
4463    return gen_rtx_fmt_ee (code, mode, op0, op1);
4464}
4465
4466/* Like simplify_binary_operation except used for relational operators.
4467   MODE is the mode of the operands, not that of the result.  If MODE
4468   is VOIDmode, both operands must also be VOIDmode and we compare the
4469   operands in "infinite precision".
4470
4471   If no simplification is possible, this function returns zero.  Otherwise,
4472   it returns either const_true_rtx or const0_rtx.  */
4473
4474rtx
4475simplify_relational_operation (code, mode, op0, op1)
4476     enum rtx_code code;
4477     enum machine_mode mode;
4478     rtx op0, op1;
4479{
4480  int equal, op0lt, op0ltu, op1lt, op1ltu;
4481  rtx tem;
4482
4483  /* If op0 is a compare, extract the comparison arguments from it.  */
4484  if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4485    op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4486
4487  /* We can't simplify MODE_CC values since we don't know what the
4488     actual comparison is.  */
4489  if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4490#ifdef HAVE_cc0
4491      || op0 == cc0_rtx
4492#endif
4493      )
4494    return 0;
4495
4496  /* For integer comparisons of A and B maybe we can simplify A - B and can
4497     then simplify a comparison of that with zero.  If A and B are both either
4498     a register or a CONST_INT, this can't help; testing for these cases will
4499     prevent infinite recursion here and speed things up.
4500
4501     If CODE is an unsigned comparison, then we can never do this optimization,
4502     because it gives an incorrect result if the subtraction wraps around zero.
4503     ANSI C defines unsigned operations such that they never overflow, and
4504     thus such cases can not be ignored.  */
4505
4506  if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4507      && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4508	    && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4509      && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4510      && code != GTU && code != GEU && code != LTU && code != LEU)
4511    return simplify_relational_operation (signed_condition (code),
4512					  mode, tem, const0_rtx);
4513
4514  /* For non-IEEE floating-point, if the two operands are equal, we know the
4515     result.  */
4516  if (rtx_equal_p (op0, op1)
4517      && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4518	  || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4519    equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4520
4521  /* If the operands are floating-point constants, see if we can fold
4522     the result.  */
4523#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4524  else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4525	   && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4526    {
4527      REAL_VALUE_TYPE d0, d1;
4528      jmp_buf handler;
4529
4530      if (setjmp (handler))
4531	return 0;
4532
4533      set_float_handler (handler);
4534      REAL_VALUE_FROM_CONST_DOUBLE (d0, op0);
4535      REAL_VALUE_FROM_CONST_DOUBLE (d1, op1);
4536      equal = REAL_VALUES_EQUAL (d0, d1);
4537      op0lt = op0ltu = REAL_VALUES_LESS (d0, d1);
4538      op1lt = op1ltu = REAL_VALUES_LESS (d1, d0);
4539      set_float_handler (NULL_PTR);
4540    }
4541#endif  /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4542
4543  /* Otherwise, see if the operands are both integers.  */
4544  else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4545	   && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4546	   && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4547    {
4548      int width = GET_MODE_BITSIZE (mode);
4549      HOST_WIDE_INT l0s, h0s, l1s, h1s;
4550      unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4551
4552      /* Get the two words comprising each integer constant.  */
4553      if (GET_CODE (op0) == CONST_DOUBLE)
4554	{
4555	  l0u = l0s = CONST_DOUBLE_LOW (op0);
4556	  h0u = h0s = CONST_DOUBLE_HIGH (op0);
4557	}
4558      else
4559	{
4560	  l0u = l0s = INTVAL (op0);
4561	  h0u = h0s = l0s < 0 ? -1 : 0;
4562	}
4563
4564      if (GET_CODE (op1) == CONST_DOUBLE)
4565	{
4566	  l1u = l1s = CONST_DOUBLE_LOW (op1);
4567	  h1u = h1s = CONST_DOUBLE_HIGH (op1);
4568	}
4569      else
4570	{
4571	  l1u = l1s = INTVAL (op1);
4572	  h1u = h1s = l1s < 0 ? -1 : 0;
4573	}
4574
4575      /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4576	 we have to sign or zero-extend the values.  */
4577      if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4578	h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
4579
4580      if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4581	{
4582	  l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4583	  l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4584
4585	  if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4586	    l0s |= ((HOST_WIDE_INT) (-1) << width);
4587
4588	  if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4589	    l1s |= ((HOST_WIDE_INT) (-1) << width);
4590	}
4591
4592      equal = (h0u == h1u && l0u == l1u);
4593      op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4594      op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4595      op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4596      op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4597    }
4598
4599  /* Otherwise, there are some code-specific tests we can make.  */
4600  else
4601    {
4602      switch (code)
4603	{
4604	case EQ:
4605	  /* References to the frame plus a constant or labels cannot
4606	     be zero, but a SYMBOL_REF can due to #pragma weak.  */
4607	  if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4608	       || GET_CODE (op0) == LABEL_REF)
4609#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4610	      /* On some machines, the ap reg can be 0 sometimes.  */
4611	      && op0 != arg_pointer_rtx
4612#endif
4613		)
4614	    return const0_rtx;
4615	  break;
4616
4617	case NE:
4618	  if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4619	       || GET_CODE (op0) == LABEL_REF)
4620#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4621	      && op0 != arg_pointer_rtx
4622#endif
4623	      )
4624	    return const_true_rtx;
4625	  break;
4626
4627	case GEU:
4628	  /* Unsigned values are never negative.  */
4629	  if (op1 == const0_rtx)
4630	    return const_true_rtx;
4631	  break;
4632
4633	case LTU:
4634	  if (op1 == const0_rtx)
4635	    return const0_rtx;
4636	  break;
4637
4638	case LEU:
4639	  /* Unsigned values are never greater than the largest
4640	     unsigned value.  */
4641	  if (GET_CODE (op1) == CONST_INT
4642	      && INTVAL (op1) == GET_MODE_MASK (mode)
4643	    && INTEGRAL_MODE_P (mode))
4644	  return const_true_rtx;
4645	  break;
4646
4647	case GTU:
4648	  if (GET_CODE (op1) == CONST_INT
4649	      && INTVAL (op1) == GET_MODE_MASK (mode)
4650	      && INTEGRAL_MODE_P (mode))
4651	    return const0_rtx;
4652	  break;
4653
4654	default:
4655	  break;
4656	}
4657
4658      return 0;
4659    }
4660
4661  /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4662     as appropriate.  */
4663  switch (code)
4664    {
4665    case EQ:
4666      return equal ? const_true_rtx : const0_rtx;
4667    case NE:
4668      return ! equal ? const_true_rtx : const0_rtx;
4669    case LT:
4670      return op0lt ? const_true_rtx : const0_rtx;
4671    case GT:
4672      return op1lt ? const_true_rtx : const0_rtx;
4673    case LTU:
4674      return op0ltu ? const_true_rtx : const0_rtx;
4675    case GTU:
4676      return op1ltu ? const_true_rtx : const0_rtx;
4677    case LE:
4678      return equal || op0lt ? const_true_rtx : const0_rtx;
4679    case GE:
4680      return equal || op1lt ? const_true_rtx : const0_rtx;
4681    case LEU:
4682      return equal || op0ltu ? const_true_rtx : const0_rtx;
4683    case GEU:
4684      return equal || op1ltu ? const_true_rtx : const0_rtx;
4685    default:
4686      abort ();
4687    }
4688}
4689
4690/* Simplify CODE, an operation with result mode MODE and three operands,
4691   OP0, OP1, and OP2.  OP0_MODE was the mode of OP0 before it became
4692   a constant.  Return 0 if no simplifications is possible.  */
4693
4694rtx
4695simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4696     enum rtx_code code;
4697     enum machine_mode mode, op0_mode;
4698     rtx op0, op1, op2;
4699{
4700  int width = GET_MODE_BITSIZE (mode);
4701
4702  /* VOIDmode means "infinite" precision.  */
4703  if (width == 0)
4704    width = HOST_BITS_PER_WIDE_INT;
4705
4706  switch (code)
4707    {
4708    case SIGN_EXTRACT:
4709    case ZERO_EXTRACT:
4710      if (GET_CODE (op0) == CONST_INT
4711	  && GET_CODE (op1) == CONST_INT
4712	  && GET_CODE (op2) == CONST_INT
4713	  && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4714	  && width <= HOST_BITS_PER_WIDE_INT)
4715	{
4716	  /* Extracting a bit-field from a constant */
4717	  HOST_WIDE_INT val = INTVAL (op0);
4718
4719	  if (BITS_BIG_ENDIAN)
4720	    val >>= (GET_MODE_BITSIZE (op0_mode)
4721		     - INTVAL (op2) - INTVAL (op1));
4722	  else
4723	    val >>= INTVAL (op2);
4724
4725	  if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4726	    {
4727	      /* First zero-extend.  */
4728	      val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4729	      /* If desired, propagate sign bit.  */
4730	      if (code == SIGN_EXTRACT
4731		  && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4732		val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4733	    }
4734
4735	  /* Clear the bits that don't belong in our mode,
4736	     unless they and our sign bit are all one.
4737	     So we get either a reasonable negative value or a reasonable
4738	     unsigned value for this mode.  */
4739	  if (width < HOST_BITS_PER_WIDE_INT
4740	      && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4741		  != ((HOST_WIDE_INT) (-1) << (width - 1))))
4742	    val &= ((HOST_WIDE_INT) 1 << width) - 1;
4743
4744	  return GEN_INT (val);
4745	}
4746      break;
4747
4748    case IF_THEN_ELSE:
4749      if (GET_CODE (op0) == CONST_INT)
4750	return op0 != const0_rtx ? op1 : op2;
4751
4752      /* Convert a == b ? b : a to "a".  */
4753      if (GET_CODE (op0) == NE && ! side_effects_p (op0)
4754	  && rtx_equal_p (XEXP (op0, 0), op1)
4755	  && rtx_equal_p (XEXP (op0, 1), op2))
4756	return op1;
4757      else if (GET_CODE (op0) == EQ && ! side_effects_p (op0)
4758	  && rtx_equal_p (XEXP (op0, 1), op1)
4759	  && rtx_equal_p (XEXP (op0, 0), op2))
4760	return op2;
4761      else if (GET_RTX_CLASS (GET_CODE (op0)) == '<' && ! side_effects_p (op0))
4762	{
4763	  rtx temp;
4764	  temp = simplify_relational_operation (GET_CODE (op0), op0_mode,
4765						XEXP (op0, 0), XEXP (op0, 1));
4766	  /* See if any simplifications were possible.  */
4767	  if (temp == const0_rtx)
4768	    return op2;
4769	  else if (temp == const1_rtx)
4770	    return op1;
4771	}
4772      break;
4773
4774    default:
4775      abort ();
4776    }
4777
4778  return 0;
4779}
4780
4781/* If X is a nontrivial arithmetic operation on an argument
4782   for which a constant value can be determined, return
4783   the result of operating on that value, as a constant.
4784   Otherwise, return X, possibly with one or more operands
4785   modified by recursive calls to this function.
4786
4787   If X is a register whose contents are known, we do NOT
4788   return those contents here.  equiv_constant is called to
4789   perform that task.
4790
4791   INSN is the insn that we may be modifying.  If it is 0, make a copy
4792   of X before modifying it.  */
4793
4794static rtx
4795fold_rtx (x, insn)
4796     rtx x;
4797     rtx insn;
4798{
4799  register enum rtx_code code;
4800  register enum machine_mode mode;
4801  register char *fmt;
4802  register int i;
4803  rtx new = 0;
4804  int copied = 0;
4805  int must_swap = 0;
4806
4807  /* Folded equivalents of first two operands of X.  */
4808  rtx folded_arg0;
4809  rtx folded_arg1;
4810
4811  /* Constant equivalents of first three operands of X;
4812     0 when no such equivalent is known.  */
4813  rtx const_arg0;
4814  rtx const_arg1;
4815  rtx const_arg2;
4816
4817  /* The mode of the first operand of X.  We need this for sign and zero
4818     extends.  */
4819  enum machine_mode mode_arg0;
4820
4821  if (x == 0)
4822    return x;
4823
4824  mode = GET_MODE (x);
4825  code = GET_CODE (x);
4826  switch (code)
4827    {
4828    case CONST:
4829    case CONST_INT:
4830    case CONST_DOUBLE:
4831    case SYMBOL_REF:
4832    case LABEL_REF:
4833    case REG:
4834      /* No use simplifying an EXPR_LIST
4835	 since they are used only for lists of args
4836	 in a function call's REG_EQUAL note.  */
4837    case EXPR_LIST:
4838      /* Changing anything inside an ADDRESSOF is incorrect; we don't
4839	 want to (e.g.,) make (addressof (const_int 0)) just because
4840	 the location is known to be zero.  */
4841    case ADDRESSOF:
4842      return x;
4843
4844#ifdef HAVE_cc0
4845    case CC0:
4846      return prev_insn_cc0;
4847#endif
4848
4849    case PC:
4850      /* If the next insn is a CODE_LABEL followed by a jump table,
4851	 PC's value is a LABEL_REF pointing to that label.  That
4852	 lets us fold switch statements on the Vax.  */
4853      if (insn && GET_CODE (insn) == JUMP_INSN)
4854	{
4855	  rtx next = next_nonnote_insn (insn);
4856
4857	  if (next && GET_CODE (next) == CODE_LABEL
4858	      && NEXT_INSN (next) != 0
4859	      && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
4860	      && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
4861		  || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
4862	    return gen_rtx_LABEL_REF (Pmode, next);
4863	}
4864      break;
4865
4866    case SUBREG:
4867      /* See if we previously assigned a constant value to this SUBREG.  */
4868      if ((new = lookup_as_function (x, CONST_INT)) != 0
4869	  || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
4870	return new;
4871
4872      /* If this is a paradoxical SUBREG, we have no idea what value the
4873	 extra bits would have.  However, if the operand is equivalent
4874	 to a SUBREG whose operand is the same as our mode, and all the
4875	 modes are within a word, we can just use the inner operand
4876	 because these SUBREGs just say how to treat the register.
4877
4878	 Similarly if we find an integer constant.  */
4879
4880      if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4881	{
4882	  enum machine_mode imode = GET_MODE (SUBREG_REG (x));
4883	  struct table_elt *elt;
4884
4885	  if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
4886	      && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
4887	      && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
4888				imode)) != 0)
4889	    for (elt = elt->first_same_value;
4890		 elt; elt = elt->next_same_value)
4891	      {
4892		if (CONSTANT_P (elt->exp)
4893		    && GET_MODE (elt->exp) == VOIDmode)
4894		  return elt->exp;
4895
4896		if (GET_CODE (elt->exp) == SUBREG
4897		    && GET_MODE (SUBREG_REG (elt->exp)) == mode
4898		    && exp_equiv_p (elt->exp, elt->exp, 1, 0))
4899		  return copy_rtx (SUBREG_REG (elt->exp));
4900	    }
4901
4902	  return x;
4903	}
4904
4905      /* Fold SUBREG_REG.  If it changed, see if we can simplify the SUBREG.
4906	 We might be able to if the SUBREG is extracting a single word in an
4907	 integral mode or extracting the low part.  */
4908
4909      folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
4910      const_arg0 = equiv_constant (folded_arg0);
4911      if (const_arg0)
4912	folded_arg0 = const_arg0;
4913
4914      if (folded_arg0 != SUBREG_REG (x))
4915	{
4916	  new = 0;
4917
4918	  if (GET_MODE_CLASS (mode) == MODE_INT
4919	      && GET_MODE_SIZE (mode) == UNITS_PER_WORD
4920	      && GET_MODE (SUBREG_REG (x)) != VOIDmode)
4921	    new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
4922				   GET_MODE (SUBREG_REG (x)));
4923	  if (new == 0 && subreg_lowpart_p (x))
4924	    new = gen_lowpart_if_possible (mode, folded_arg0);
4925	  if (new)
4926	    return new;
4927	}
4928
4929      /* If this is a narrowing SUBREG and our operand is a REG, see if
4930	 we can find an equivalence for REG that is an arithmetic operation
4931	 in a wider mode where both operands are paradoxical SUBREGs
4932	 from objects of our result mode.  In that case, we couldn't report
4933	 an equivalent value for that operation, since we don't know what the
4934	 extra bits will be.  But we can find an equivalence for this SUBREG
4935	 by folding that operation is the narrow mode.  This allows us to
4936	 fold arithmetic in narrow modes when the machine only supports
4937	 word-sized arithmetic.
4938
4939	 Also look for a case where we have a SUBREG whose operand is the
4940	 same as our result.  If both modes are smaller than a word, we
4941	 are simply interpreting a register in different modes and we
4942	 can use the inner value.  */
4943
4944      if (GET_CODE (folded_arg0) == REG
4945	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
4946	  && subreg_lowpart_p (x))
4947	{
4948	  struct table_elt *elt;
4949
4950	  /* We can use HASH here since we know that canon_hash won't be
4951	     called.  */
4952	  elt = lookup (folded_arg0,
4953			HASH (folded_arg0, GET_MODE (folded_arg0)),
4954			GET_MODE (folded_arg0));
4955
4956	  if (elt)
4957	    elt = elt->first_same_value;
4958
4959	  for (; elt; elt = elt->next_same_value)
4960	    {
4961	      enum rtx_code eltcode = GET_CODE (elt->exp);
4962
4963	      /* Just check for unary and binary operations.  */
4964	      if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
4965		  && GET_CODE (elt->exp) != SIGN_EXTEND
4966		  && GET_CODE (elt->exp) != ZERO_EXTEND
4967		  && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4968		  && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
4969		{
4970		  rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
4971
4972		  if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4973		    op0 = fold_rtx (op0, NULL_RTX);
4974
4975		  op0 = equiv_constant (op0);
4976		  if (op0)
4977		    new = simplify_unary_operation (GET_CODE (elt->exp), mode,
4978						    op0, mode);
4979		}
4980	      else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
4981			|| GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
4982		       && eltcode != DIV && eltcode != MOD
4983		       && eltcode != UDIV && eltcode != UMOD
4984		       && eltcode != ASHIFTRT && eltcode != LSHIFTRT
4985		       && eltcode != ROTATE && eltcode != ROTATERT
4986		       && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
4987			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
4988				== mode))
4989			   || CONSTANT_P (XEXP (elt->exp, 0)))
4990		       && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
4991			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
4992				== mode))
4993			   || CONSTANT_P (XEXP (elt->exp, 1))))
4994		{
4995		  rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
4996		  rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
4997
4998		  if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
4999		    op0 = fold_rtx (op0, NULL_RTX);
5000
5001		  if (op0)
5002		    op0 = equiv_constant (op0);
5003
5004		  if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
5005		    op1 = fold_rtx (op1, NULL_RTX);
5006
5007		  if (op1)
5008		    op1 = equiv_constant (op1);
5009
5010		  /* If we are looking for the low SImode part of
5011		     (ashift:DI c (const_int 32)), it doesn't work
5012		     to compute that in SImode, because a 32-bit shift
5013		     in SImode is unpredictable.  We know the value is 0.  */
5014		  if (op0 && op1
5015		      && GET_CODE (elt->exp) == ASHIFT
5016		      && GET_CODE (op1) == CONST_INT
5017		      && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
5018		    {
5019		      if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
5020
5021			/* If the count fits in the inner mode's width,
5022			   but exceeds the outer mode's width,
5023			   the value will get truncated to 0
5024			   by the subreg.  */
5025			new = const0_rtx;
5026		      else
5027			/* If the count exceeds even the inner mode's width,
5028			   don't fold this expression.  */
5029			new = 0;
5030		    }
5031		  else if (op0 && op1)
5032		    new = simplify_binary_operation (GET_CODE (elt->exp), mode,
5033						     op0, op1);
5034		}
5035
5036	      else if (GET_CODE (elt->exp) == SUBREG
5037		       && GET_MODE (SUBREG_REG (elt->exp)) == mode
5038		       && (GET_MODE_SIZE (GET_MODE (folded_arg0))
5039			   <= UNITS_PER_WORD)
5040		       && exp_equiv_p (elt->exp, elt->exp, 1, 0))
5041		new = copy_rtx (SUBREG_REG (elt->exp));
5042
5043	      if (new)
5044		return new;
5045	    }
5046	}
5047
5048      return x;
5049
5050    case NOT:
5051    case NEG:
5052      /* If we have (NOT Y), see if Y is known to be (NOT Z).
5053	 If so, (NOT Y) simplifies to Z.  Similarly for NEG.  */
5054      new = lookup_as_function (XEXP (x, 0), code);
5055      if (new)
5056	return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
5057      break;
5058
5059    case MEM:
5060      /* If we are not actually processing an insn, don't try to find the
5061	 best address.  Not only don't we care, but we could modify the
5062	 MEM in an invalid way since we have no insn to validate against.  */
5063      if (insn != 0)
5064	find_best_addr (insn, &XEXP (x, 0));
5065
5066      {
5067	/* Even if we don't fold in the insn itself,
5068	   we can safely do so here, in hopes of getting a constant.  */
5069	rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
5070	rtx base = 0;
5071	HOST_WIDE_INT offset = 0;
5072
5073	if (GET_CODE (addr) == REG
5074	    && REGNO_QTY_VALID_P (REGNO (addr))
5075	    && GET_MODE (addr) == qty_mode[reg_qty[REGNO (addr)]]
5076	    && qty_const[reg_qty[REGNO (addr)]] != 0)
5077	  addr = qty_const[reg_qty[REGNO (addr)]];
5078
5079	/* If address is constant, split it into a base and integer offset.  */
5080	if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
5081	  base = addr;
5082	else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
5083		 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
5084	  {
5085	    base = XEXP (XEXP (addr, 0), 0);
5086	    offset = INTVAL (XEXP (XEXP (addr, 0), 1));
5087	  }
5088	else if (GET_CODE (addr) == LO_SUM
5089		 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
5090	  base = XEXP (addr, 1);
5091	else if (GET_CODE (addr) == ADDRESSOF)
5092	  return change_address (x, VOIDmode, addr);
5093
5094	/* If this is a constant pool reference, we can fold it into its
5095	   constant to allow better value tracking.  */
5096	if (base && GET_CODE (base) == SYMBOL_REF
5097	    && CONSTANT_POOL_ADDRESS_P (base))
5098	  {
5099	    rtx constant = get_pool_constant (base);
5100	    enum machine_mode const_mode = get_pool_mode (base);
5101	    rtx new;
5102
5103	    if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
5104	      constant_pool_entries_cost = COST (constant);
5105
5106	    /* If we are loading the full constant, we have an equivalence.  */
5107	    if (offset == 0 && mode == const_mode)
5108	      return constant;
5109
5110	    /* If this actually isn't a constant (weird!), we can't do
5111	       anything.  Otherwise, handle the two most common cases:
5112	       extracting a word from a multi-word constant, and extracting
5113	       the low-order bits.  Other cases don't seem common enough to
5114	       worry about.  */
5115	    if (! CONSTANT_P (constant))
5116	      return x;
5117
5118	    if (GET_MODE_CLASS (mode) == MODE_INT
5119		&& GET_MODE_SIZE (mode) == UNITS_PER_WORD
5120		&& offset % UNITS_PER_WORD == 0
5121		&& (new = operand_subword (constant,
5122					   offset / UNITS_PER_WORD,
5123					   0, const_mode)) != 0)
5124	      return new;
5125
5126	    if (((BYTES_BIG_ENDIAN
5127		  && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5128		 || (! BYTES_BIG_ENDIAN && offset == 0))
5129		&& (new = gen_lowpart_if_possible (mode, constant)) != 0)
5130	      return new;
5131	  }
5132
5133	/* If this is a reference to a label at a known position in a jump
5134	   table, we also know its value.  */
5135	if (base && GET_CODE (base) == LABEL_REF)
5136	  {
5137	    rtx label = XEXP (base, 0);
5138	    rtx table_insn = NEXT_INSN (label);
5139
5140	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5141		&& GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5142	      {
5143		rtx table = PATTERN (table_insn);
5144
5145		if (offset >= 0
5146		    && (offset / GET_MODE_SIZE (GET_MODE (table))
5147			< XVECLEN (table, 0)))
5148		  return XVECEXP (table, 0,
5149				  offset / GET_MODE_SIZE (GET_MODE (table)));
5150	      }
5151	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5152		&& GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5153	      {
5154		rtx table = PATTERN (table_insn);
5155
5156		if (offset >= 0
5157		    && (offset / GET_MODE_SIZE (GET_MODE (table))
5158			< XVECLEN (table, 1)))
5159		  {
5160		    offset /= GET_MODE_SIZE (GET_MODE (table));
5161		    new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
5162					 XEXP (table, 0));
5163
5164		    if (GET_MODE (table) != Pmode)
5165		      new = gen_rtx_TRUNCATE (GET_MODE (table), new);
5166
5167		    /* Indicate this is a constant.  This isn't a
5168		       valid form of CONST, but it will only be used
5169		       to fold the next insns and then discarded, so
5170		       it should be safe.
5171
5172		       Note this expression must be explicitly discarded,
5173		       by cse_insn, else it may end up in a REG_EQUAL note
5174		       and "escape" to cause problems elsewhere.  */
5175		    return gen_rtx_CONST (GET_MODE (new), new);
5176		  }
5177	      }
5178	  }
5179
5180	return x;
5181      }
5182
5183    case ASM_OPERANDS:
5184      for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
5185	validate_change (insn, &XVECEXP (x, 3, i),
5186			 fold_rtx (XVECEXP (x, 3, i), insn), 0);
5187      break;
5188
5189    default:
5190      break;
5191    }
5192
5193  const_arg0 = 0;
5194  const_arg1 = 0;
5195  const_arg2 = 0;
5196  mode_arg0 = VOIDmode;
5197
5198  /* Try folding our operands.
5199     Then see which ones have constant values known.  */
5200
5201  fmt = GET_RTX_FORMAT (code);
5202  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5203    if (fmt[i] == 'e')
5204      {
5205	rtx arg = XEXP (x, i);
5206	rtx folded_arg = arg, const_arg = 0;
5207	enum machine_mode mode_arg = GET_MODE (arg);
5208	rtx cheap_arg, expensive_arg;
5209	rtx replacements[2];
5210	int j;
5211
5212	/* Most arguments are cheap, so handle them specially.  */
5213	switch (GET_CODE (arg))
5214	  {
5215	  case REG:
5216	    /* This is the same as calling equiv_constant; it is duplicated
5217	       here for speed.  */
5218	    if (REGNO_QTY_VALID_P (REGNO (arg))
5219		&& qty_const[reg_qty[REGNO (arg)]] != 0
5220		&& GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != REG
5221		&& GET_CODE (qty_const[reg_qty[REGNO (arg)]]) != PLUS)
5222	      const_arg
5223		= gen_lowpart_if_possible (GET_MODE (arg),
5224					   qty_const[reg_qty[REGNO (arg)]]);
5225	    break;
5226
5227	  case CONST:
5228	  case CONST_INT:
5229	  case SYMBOL_REF:
5230	  case LABEL_REF:
5231	  case CONST_DOUBLE:
5232	    const_arg = arg;
5233	    break;
5234
5235#ifdef HAVE_cc0
5236	  case CC0:
5237	    folded_arg = prev_insn_cc0;
5238	    mode_arg = prev_insn_cc0_mode;
5239	    const_arg = equiv_constant (folded_arg);
5240	    break;
5241#endif
5242
5243	  default:
5244	    folded_arg = fold_rtx (arg, insn);
5245	    const_arg = equiv_constant (folded_arg);
5246	  }
5247
5248	/* For the first three operands, see if the operand
5249	   is constant or equivalent to a constant.  */
5250	switch (i)
5251	  {
5252	  case 0:
5253	    folded_arg0 = folded_arg;
5254	    const_arg0 = const_arg;
5255	    mode_arg0 = mode_arg;
5256	    break;
5257	  case 1:
5258	    folded_arg1 = folded_arg;
5259	    const_arg1 = const_arg;
5260	    break;
5261	  case 2:
5262	    const_arg2 = const_arg;
5263	    break;
5264	  }
5265
5266	/* Pick the least expensive of the folded argument and an
5267	   equivalent constant argument.  */
5268	if (const_arg == 0 || const_arg == folded_arg
5269	    || COST (const_arg) > COST (folded_arg))
5270	  cheap_arg = folded_arg, expensive_arg = const_arg;
5271	else
5272	  cheap_arg = const_arg, expensive_arg = folded_arg;
5273
5274	/* Try to replace the operand with the cheapest of the two
5275	   possibilities.  If it doesn't work and this is either of the first
5276	   two operands of a commutative operation, try swapping them.
5277	   If THAT fails, try the more expensive, provided it is cheaper
5278	   than what is already there.  */
5279
5280	if (cheap_arg == XEXP (x, i))
5281	  continue;
5282
5283	if (insn == 0 && ! copied)
5284	  {
5285	    x = copy_rtx (x);
5286	    copied = 1;
5287	  }
5288
5289	replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5290	for (j = 0;
5291	     j < 2 && replacements[j]
5292	     && COST (replacements[j]) < COST (XEXP (x, i));
5293	     j++)
5294	  {
5295	    if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5296	      break;
5297
5298	    if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5299	      {
5300		validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5301		validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5302
5303		if (apply_change_group ())
5304		  {
5305		    /* Swap them back to be invalid so that this loop can
5306		       continue and flag them to be swapped back later.  */
5307		    rtx tem;
5308
5309		    tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5310				       XEXP (x, 1) = tem;
5311		    must_swap = 1;
5312		    break;
5313		  }
5314	      }
5315	  }
5316      }
5317
5318    else
5319      {
5320	if (fmt[i] == 'E')
5321	  /* Don't try to fold inside of a vector of expressions.
5322	     Doing nothing is harmless.  */
5323	  {;}
5324      }
5325
5326  /* If a commutative operation, place a constant integer as the second
5327     operand unless the first operand is also a constant integer.  Otherwise,
5328     place any constant second unless the first operand is also a constant.  */
5329
5330  if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5331    {
5332      if (must_swap || (const_arg0
5333	  		&& (const_arg1 == 0
5334	      		    || (GET_CODE (const_arg0) == CONST_INT
5335			        && GET_CODE (const_arg1) != CONST_INT))))
5336	{
5337	  register rtx tem = XEXP (x, 0);
5338
5339	  if (insn == 0 && ! copied)
5340	    {
5341	      x = copy_rtx (x);
5342	      copied = 1;
5343	    }
5344
5345	  validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5346	  validate_change (insn, &XEXP (x, 1), tem, 1);
5347	  if (apply_change_group ())
5348	    {
5349	      tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5350	      tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5351	    }
5352	}
5353    }
5354
5355  /* If X is an arithmetic operation, see if we can simplify it.  */
5356
5357  switch (GET_RTX_CLASS (code))
5358    {
5359    case '1':
5360      {
5361	int is_const = 0;
5362
5363	/* We can't simplify extension ops unless we know the
5364	   original mode.  */
5365	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5366	    && mode_arg0 == VOIDmode)
5367	  break;
5368
5369	/* If we had a CONST, strip it off and put it back later if we
5370	   fold.  */
5371	if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5372	  is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5373
5374	new = simplify_unary_operation (code, mode,
5375					const_arg0 ? const_arg0 : folded_arg0,
5376					mode_arg0);
5377	if (new != 0 && is_const)
5378	  new = gen_rtx_CONST (mode, new);
5379      }
5380      break;
5381
5382    case '<':
5383      /* See what items are actually being compared and set FOLDED_ARG[01]
5384	 to those values and CODE to the actual comparison code.  If any are
5385	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
5386	 do anything if both operands are already known to be constant.  */
5387
5388      if (const_arg0 == 0 || const_arg1 == 0)
5389	{
5390	  struct table_elt *p0, *p1;
5391	  rtx true = const_true_rtx, false = const0_rtx;
5392	  enum machine_mode mode_arg1;
5393
5394#ifdef FLOAT_STORE_FLAG_VALUE
5395	  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5396	    {
5397	      true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5398						   mode);
5399	      false = CONST0_RTX (mode);
5400	    }
5401#endif
5402
5403	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5404				       &mode_arg0, &mode_arg1);
5405	  const_arg0 = equiv_constant (folded_arg0);
5406	  const_arg1 = equiv_constant (folded_arg1);
5407
5408	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5409	     what kinds of things are being compared, so we can't do
5410	     anything with this comparison.  */
5411
5412	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5413	    break;
5414
5415	  /* If we do not now have two constants being compared, see
5416	     if we can nevertheless deduce some things about the
5417	     comparison.  */
5418	  if (const_arg0 == 0 || const_arg1 == 0)
5419	    {
5420	      /* Is FOLDED_ARG0 frame-pointer plus a constant?  Or
5421		 non-explicit constant?  These aren't zero, but we
5422		 don't know their sign.  */
5423	      if (const_arg1 == const0_rtx
5424		  && (NONZERO_BASE_PLUS_P (folded_arg0)
5425#if 0  /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5426	  come out as 0.  */
5427		      || GET_CODE (folded_arg0) == SYMBOL_REF
5428#endif
5429		      || GET_CODE (folded_arg0) == LABEL_REF
5430		      || GET_CODE (folded_arg0) == CONST))
5431		{
5432		  if (code == EQ)
5433		    return false;
5434		  else if (code == NE)
5435		    return true;
5436		}
5437
5438	      /* See if the two operands are the same.  We don't do this
5439		 for IEEE floating-point since we can't assume x == x
5440		 since x might be a NaN.  */
5441
5442	      if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5443		   || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
5444		  && (folded_arg0 == folded_arg1
5445		      || (GET_CODE (folded_arg0) == REG
5446			  && GET_CODE (folded_arg1) == REG
5447			  && (reg_qty[REGNO (folded_arg0)]
5448			      == reg_qty[REGNO (folded_arg1)]))
5449		      || ((p0 = lookup (folded_arg0,
5450					(safe_hash (folded_arg0, mode_arg0)
5451					 % NBUCKETS), mode_arg0))
5452			  && (p1 = lookup (folded_arg1,
5453					   (safe_hash (folded_arg1, mode_arg0)
5454					    % NBUCKETS), mode_arg0))
5455			  && p0->first_same_value == p1->first_same_value)))
5456		return ((code == EQ || code == LE || code == GE
5457			 || code == LEU || code == GEU)
5458			? true : false);
5459
5460	      /* If FOLDED_ARG0 is a register, see if the comparison we are
5461		 doing now is either the same as we did before or the reverse
5462		 (we only check the reverse if not floating-point).  */
5463	      else if (GET_CODE (folded_arg0) == REG)
5464		{
5465		  int qty = reg_qty[REGNO (folded_arg0)];
5466
5467		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5468		      && (comparison_dominates_p (qty_comparison_code[qty], code)
5469			  || (comparison_dominates_p (qty_comparison_code[qty],
5470						      reverse_condition (code))
5471			      && ! FLOAT_MODE_P (mode_arg0)))
5472		      && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5473			  || (const_arg1
5474			      && rtx_equal_p (qty_comparison_const[qty],
5475					      const_arg1))
5476			  || (GET_CODE (folded_arg1) == REG
5477			      && (reg_qty[REGNO (folded_arg1)]
5478				  == qty_comparison_qty[qty]))))
5479		    return (comparison_dominates_p (qty_comparison_code[qty],
5480						    code)
5481			    ? true : false);
5482		}
5483	    }
5484	}
5485
5486      /* If we are comparing against zero, see if the first operand is
5487	 equivalent to an IOR with a constant.  If so, we may be able to
5488	 determine the result of this comparison.  */
5489
5490      if (const_arg1 == const0_rtx)
5491	{
5492	  rtx y = lookup_as_function (folded_arg0, IOR);
5493	  rtx inner_const;
5494
5495	  if (y != 0
5496	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5497	      && GET_CODE (inner_const) == CONST_INT
5498	      && INTVAL (inner_const) != 0)
5499	    {
5500	      int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5501	      int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5502			      && (INTVAL (inner_const)
5503				  & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5504	      rtx true = const_true_rtx, false = const0_rtx;
5505
5506#ifdef FLOAT_STORE_FLAG_VALUE
5507	      if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5508		{
5509		  true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5510						       mode);
5511		  false = CONST0_RTX (mode);
5512		}
5513#endif
5514
5515	      switch (code)
5516		{
5517		case EQ:
5518		  return false;
5519		case NE:
5520		  return true;
5521		case LT:  case LE:
5522		  if (has_sign)
5523		    return true;
5524		  break;
5525		case GT:  case GE:
5526		  if (has_sign)
5527		    return false;
5528		  break;
5529		default:
5530		  break;
5531		}
5532	    }
5533	}
5534
5535      new = simplify_relational_operation (code, mode_arg0,
5536					   const_arg0 ? const_arg0 : folded_arg0,
5537					   const_arg1 ? const_arg1 : folded_arg1);
5538#ifdef FLOAT_STORE_FLAG_VALUE
5539      if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5540	new = ((new == const0_rtx) ? CONST0_RTX (mode)
5541	       : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
5542#endif
5543      break;
5544
5545    case '2':
5546    case 'c':
5547      switch (code)
5548	{
5549	case PLUS:
5550	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
5551	     with that LABEL_REF as its second operand.  If so, the result is
5552	     the first operand of that MINUS.  This handles switches with an
5553	     ADDR_DIFF_VEC table.  */
5554	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5555	    {
5556	      rtx y
5557		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
5558		  : lookup_as_function (folded_arg0, MINUS);
5559
5560	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5561		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5562		return XEXP (y, 0);
5563
5564	      /* Now try for a CONST of a MINUS like the above.  */
5565	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5566			: lookup_as_function (folded_arg0, CONST))) != 0
5567		  && GET_CODE (XEXP (y, 0)) == MINUS
5568		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5569		  && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5570		return XEXP (XEXP (y, 0), 0);
5571	    }
5572
5573	  /* Likewise if the operands are in the other order.  */
5574	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5575	    {
5576	      rtx y
5577		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
5578		  : lookup_as_function (folded_arg1, MINUS);
5579
5580	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5581		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5582		return XEXP (y, 0);
5583
5584	      /* Now try for a CONST of a MINUS like the above.  */
5585	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5586			: lookup_as_function (folded_arg1, CONST))) != 0
5587		  && GET_CODE (XEXP (y, 0)) == MINUS
5588		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5589		  && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5590		return XEXP (XEXP (y, 0), 0);
5591	    }
5592
5593	  /* If second operand is a register equivalent to a negative
5594	     CONST_INT, see if we can find a register equivalent to the
5595	     positive constant.  Make a MINUS if so.  Don't do this for
5596	     a non-negative constant since we might then alternate between
5597	     chosing positive and negative constants.  Having the positive
5598	     constant previously-used is the more common case.  Be sure
5599	     the resulting constant is non-negative; if const_arg1 were
5600	     the smallest negative number this would overflow: depending
5601	     on the mode, this would either just be the same value (and
5602	     hence not save anything) or be incorrect.  */
5603	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
5604	      && INTVAL (const_arg1) < 0
5605	      && - INTVAL (const_arg1) >= 0
5606	      && GET_CODE (folded_arg1) == REG)
5607	    {
5608	      rtx new_const = GEN_INT (- INTVAL (const_arg1));
5609	      struct table_elt *p
5610		= lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5611			  mode);
5612
5613	      if (p)
5614		for (p = p->first_same_value; p; p = p->next_same_value)
5615		  if (GET_CODE (p->exp) == REG)
5616		    return cse_gen_binary (MINUS, mode, folded_arg0,
5617					   canon_reg (p->exp, NULL_RTX));
5618	    }
5619	  goto from_plus;
5620
5621	case MINUS:
5622	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5623	     If so, produce (PLUS Z C2-C).  */
5624	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5625	    {
5626	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5627	      if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5628		return fold_rtx (plus_constant (copy_rtx (y),
5629						-INTVAL (const_arg1)),
5630				 NULL_RTX);
5631	    }
5632
5633	  /* ... fall through ...  */
5634
5635	from_plus:
5636	case SMIN:    case SMAX:      case UMIN:    case UMAX:
5637	case IOR:     case AND:       case XOR:
5638	case MULT:    case DIV:       case UDIV:
5639	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
5640	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5641	     is known to be of similar form, we may be able to replace the
5642	     operation with a combined operation.  This may eliminate the
5643	     intermediate operation if every use is simplified in this way.
5644	     Note that the similar optimization done by combine.c only works
5645	     if the intermediate operation's result has only one reference.  */
5646
5647	  if (GET_CODE (folded_arg0) == REG
5648	      && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5649	    {
5650	      int is_shift
5651		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5652	      rtx y = lookup_as_function (folded_arg0, code);
5653	      rtx inner_const;
5654	      enum rtx_code associate_code;
5655	      rtx new_const;
5656
5657	      if (y == 0
5658		  || 0 == (inner_const
5659			   = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5660		  || GET_CODE (inner_const) != CONST_INT
5661		  /* If we have compiled a statement like
5662		     "if (x == (x & mask1))", and now are looking at
5663		     "x & mask2", we will have a case where the first operand
5664		     of Y is the same as our first operand.  Unless we detect
5665		     this case, an infinite loop will result.  */
5666		  || XEXP (y, 0) == folded_arg0)
5667		break;
5668
5669	      /* Don't associate these operations if they are a PLUS with the
5670		 same constant and it is a power of two.  These might be doable
5671		 with a pre- or post-increment.  Similarly for two subtracts of
5672		 identical powers of two with post decrement.  */
5673
5674	      if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5675		  && (0
5676#if defined(HAVE_PRE_INCREMENT) || defined(HAVE_POST_INCREMENT)
5677		      || exact_log2 (INTVAL (const_arg1)) >= 0
5678#endif
5679#if defined(HAVE_PRE_DECREMENT) || defined(HAVE_POST_DECREMENT)
5680		      || exact_log2 (- INTVAL (const_arg1)) >= 0
5681#endif
5682		  ))
5683		break;
5684
5685	      /* Compute the code used to compose the constants.  For example,
5686		 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT.  */
5687
5688	      associate_code
5689		= (code == MULT || code == DIV || code == UDIV ? MULT
5690		   : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5691
5692	      new_const = simplify_binary_operation (associate_code, mode,
5693						     const_arg1, inner_const);
5694
5695	      if (new_const == 0)
5696		break;
5697
5698	      /* If we are associating shift operations, don't let this
5699		 produce a shift of the size of the object or larger.
5700		 This could occur when we follow a sign-extend by a right
5701		 shift on a machine that does a sign-extend as a pair
5702		 of shifts.  */
5703
5704	      if (is_shift && GET_CODE (new_const) == CONST_INT
5705		  && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5706		{
5707		  /* As an exception, we can turn an ASHIFTRT of this
5708		     form into a shift of the number of bits - 1.  */
5709		  if (code == ASHIFTRT)
5710		    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5711		  else
5712		    break;
5713		}
5714
5715	      y = copy_rtx (XEXP (y, 0));
5716
5717	      /* If Y contains our first operand (the most common way this
5718		 can happen is if Y is a MEM), we would do into an infinite
5719		 loop if we tried to fold it.  So don't in that case.  */
5720
5721	      if (! reg_mentioned_p (folded_arg0, y))
5722		y = fold_rtx (y, insn);
5723
5724	      return cse_gen_binary (code, mode, y, new_const);
5725	    }
5726	  break;
5727
5728	default:
5729	  break;
5730	}
5731
5732      new = simplify_binary_operation (code, mode,
5733				       const_arg0 ? const_arg0 : folded_arg0,
5734				       const_arg1 ? const_arg1 : folded_arg1);
5735      break;
5736
5737    case 'o':
5738      /* (lo_sum (high X) X) is simply X.  */
5739      if (code == LO_SUM && const_arg0 != 0
5740	  && GET_CODE (const_arg0) == HIGH
5741	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
5742	return const_arg1;
5743      break;
5744
5745    case '3':
5746    case 'b':
5747      new = simplify_ternary_operation (code, mode, mode_arg0,
5748					const_arg0 ? const_arg0 : folded_arg0,
5749					const_arg1 ? const_arg1 : folded_arg1,
5750					const_arg2 ? const_arg2 : XEXP (x, 2));
5751      break;
5752
5753    case 'x':
5754      /* Always eliminate CONSTANT_P_RTX at this stage. */
5755      if (code == CONSTANT_P_RTX)
5756	return (const_arg0 ? const1_rtx : const0_rtx);
5757      break;
5758    }
5759
5760  return new ? new : x;
5761}
5762
5763/* Return a constant value currently equivalent to X.
5764   Return 0 if we don't know one.  */
5765
5766static rtx
5767equiv_constant (x)
5768     rtx x;
5769{
5770  if (GET_CODE (x) == REG
5771      && REGNO_QTY_VALID_P (REGNO (x))
5772      && qty_const[reg_qty[REGNO (x)]])
5773    x = gen_lowpart_if_possible (GET_MODE (x), qty_const[reg_qty[REGNO (x)]]);
5774
5775  if (x != 0 && CONSTANT_P (x))
5776    return x;
5777
5778  /* If X is a MEM, try to fold it outside the context of any insn to see if
5779     it might be equivalent to a constant.  That handles the case where it
5780     is a constant-pool reference.  Then try to look it up in the hash table
5781     in case it is something whose value we have seen before.  */
5782
5783  if (GET_CODE (x) == MEM)
5784    {
5785      struct table_elt *elt;
5786
5787      x = fold_rtx (x, NULL_RTX);
5788      if (CONSTANT_P (x))
5789	return x;
5790
5791      elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
5792      if (elt == 0)
5793	return 0;
5794
5795      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
5796	if (elt->is_const && CONSTANT_P (elt->exp))
5797	  return elt->exp;
5798    }
5799
5800  return 0;
5801}
5802
5803/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
5804   number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
5805   least-significant part of X.
5806   MODE specifies how big a part of X to return.
5807
5808   If the requested operation cannot be done, 0 is returned.
5809
5810   This is similar to gen_lowpart in emit-rtl.c.  */
5811
5812rtx
5813gen_lowpart_if_possible (mode, x)
5814     enum machine_mode mode;
5815     register rtx x;
5816{
5817  rtx result = gen_lowpart_common (mode, x);
5818
5819  if (result)
5820    return result;
5821  else if (GET_CODE (x) == MEM)
5822    {
5823      /* This is the only other case we handle.  */
5824      register int offset = 0;
5825      rtx new;
5826
5827      if (WORDS_BIG_ENDIAN)
5828	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
5829		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
5830      if (BYTES_BIG_ENDIAN)
5831	/* Adjust the address so that the address-after-the-data is
5832	   unchanged.  */
5833	offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
5834		   - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
5835      new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
5836      if (! memory_address_p (mode, XEXP (new, 0)))
5837	return 0;
5838      MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
5839      RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
5840      MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
5841      return new;
5842    }
5843  else
5844    return 0;
5845}
5846
5847/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
5848   branch.  It will be zero if not.
5849
5850   In certain cases, this can cause us to add an equivalence.  For example,
5851   if we are following the taken case of
5852   	if (i == 2)
5853   we can add the fact that `i' and '2' are now equivalent.
5854
5855   In any case, we can record that this comparison was passed.  If the same
5856   comparison is seen later, we will know its value.  */
5857
5858static void
5859record_jump_equiv (insn, taken)
5860     rtx insn;
5861     int taken;
5862{
5863  int cond_known_true;
5864  rtx op0, op1;
5865  enum machine_mode mode, mode0, mode1;
5866  int reversed_nonequality = 0;
5867  enum rtx_code code;
5868
5869  /* Ensure this is the right kind of insn.  */
5870  if (! condjump_p (insn) || simplejump_p (insn))
5871    return;
5872
5873  /* See if this jump condition is known true or false.  */
5874  if (taken)
5875    cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
5876  else
5877    cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
5878
5879  /* Get the type of comparison being done and the operands being compared.
5880     If we had to reverse a non-equality condition, record that fact so we
5881     know that it isn't valid for floating-point.  */
5882  code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
5883  op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
5884  op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
5885
5886  code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
5887  if (! cond_known_true)
5888    {
5889      reversed_nonequality = (code != EQ && code != NE);
5890      code = reverse_condition (code);
5891    }
5892
5893  /* The mode is the mode of the non-constant.  */
5894  mode = mode0;
5895  if (mode1 != VOIDmode)
5896    mode = mode1;
5897
5898  record_jump_cond (code, mode, op0, op1, reversed_nonequality);
5899}
5900
5901/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
5902   REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
5903   Make any useful entries we can with that information.  Called from
5904   above function and called recursively.  */
5905
5906static void
5907record_jump_cond (code, mode, op0, op1, reversed_nonequality)
5908     enum rtx_code code;
5909     enum machine_mode mode;
5910     rtx op0, op1;
5911     int reversed_nonequality;
5912{
5913  unsigned op0_hash, op1_hash;
5914  int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
5915  struct table_elt *op0_elt, *op1_elt;
5916
5917  /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
5918     we know that they are also equal in the smaller mode (this is also
5919     true for all smaller modes whether or not there is a SUBREG, but
5920     is not worth testing for with no SUBREG).  */
5921
5922  /* Note that GET_MODE (op0) may not equal MODE.  */
5923  if (code == EQ && GET_CODE (op0) == SUBREG
5924      && (GET_MODE_SIZE (GET_MODE (op0))
5925	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5926    {
5927      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5928      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5929
5930      record_jump_cond (code, mode, SUBREG_REG (op0),
5931			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
5932			reversed_nonequality);
5933    }
5934
5935  if (code == EQ && GET_CODE (op1) == SUBREG
5936      && (GET_MODE_SIZE (GET_MODE (op1))
5937	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5938    {
5939      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5940      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5941
5942      record_jump_cond (code, mode, SUBREG_REG (op1),
5943			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
5944			reversed_nonequality);
5945    }
5946
5947  /* Similarly, if this is an NE comparison, and either is a SUBREG
5948     making a smaller mode, we know the whole thing is also NE.  */
5949
5950  /* Note that GET_MODE (op0) may not equal MODE;
5951     if we test MODE instead, we can get an infinite recursion
5952     alternating between two modes each wider than MODE.  */
5953
5954  if (code == NE && GET_CODE (op0) == SUBREG
5955      && subreg_lowpart_p (op0)
5956      && (GET_MODE_SIZE (GET_MODE (op0))
5957	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
5958    {
5959      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
5960      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
5961
5962      record_jump_cond (code, mode, SUBREG_REG (op0),
5963			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
5964			reversed_nonequality);
5965    }
5966
5967  if (code == NE && GET_CODE (op1) == SUBREG
5968      && subreg_lowpart_p (op1)
5969      && (GET_MODE_SIZE (GET_MODE (op1))
5970	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
5971    {
5972      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
5973      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
5974
5975      record_jump_cond (code, mode, SUBREG_REG (op1),
5976			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
5977			reversed_nonequality);
5978    }
5979
5980  /* Hash both operands.  */
5981
5982  do_not_record = 0;
5983  hash_arg_in_memory = 0;
5984  hash_arg_in_struct = 0;
5985  op0_hash = HASH (op0, mode);
5986  op0_in_memory = hash_arg_in_memory;
5987  op0_in_struct = hash_arg_in_struct;
5988
5989  if (do_not_record)
5990    return;
5991
5992  do_not_record = 0;
5993  hash_arg_in_memory = 0;
5994  hash_arg_in_struct = 0;
5995  op1_hash = HASH (op1, mode);
5996  op1_in_memory = hash_arg_in_memory;
5997  op1_in_struct = hash_arg_in_struct;
5998
5999  if (do_not_record)
6000    return;
6001
6002  /* Look up both operands.  */
6003  op0_elt = lookup (op0, op0_hash, mode);
6004  op1_elt = lookup (op1, op1_hash, mode);
6005
6006  /* If both operands are already equivalent or if they are not in the
6007     table but are identical, do nothing.  */
6008  if ((op0_elt != 0 && op1_elt != 0
6009       && op0_elt->first_same_value == op1_elt->first_same_value)
6010      || op0 == op1 || rtx_equal_p (op0, op1))
6011    return;
6012
6013  /* If we aren't setting two things equal all we can do is save this
6014     comparison.   Similarly if this is floating-point.  In the latter
6015     case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
6016     If we record the equality, we might inadvertently delete code
6017     whose intent was to change -0 to +0.  */
6018
6019  if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
6020    {
6021      /* If we reversed a floating-point comparison, if OP0 is not a
6022	 register, or if OP1 is neither a register or constant, we can't
6023	 do anything.  */
6024
6025      if (GET_CODE (op1) != REG)
6026	op1 = equiv_constant (op1);
6027
6028      if ((reversed_nonequality && FLOAT_MODE_P (mode))
6029	  || GET_CODE (op0) != REG || op1 == 0)
6030	return;
6031
6032      /* Put OP0 in the hash table if it isn't already.  This gives it a
6033	 new quantity number.  */
6034      if (op0_elt == 0)
6035	{
6036	  if (insert_regs (op0, NULL_PTR, 0))
6037	    {
6038	      rehash_using_reg (op0);
6039	      op0_hash = HASH (op0, mode);
6040
6041	      /* If OP0 is contained in OP1, this changes its hash code
6042		 as well.  Faster to rehash than to check, except
6043		 for the simple case of a constant.  */
6044	      if (! CONSTANT_P (op1))
6045		op1_hash = HASH (op1,mode);
6046	    }
6047
6048	  op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6049	  op0_elt->in_memory = op0_in_memory;
6050	  op0_elt->in_struct = op0_in_struct;
6051	}
6052
6053      qty_comparison_code[reg_qty[REGNO (op0)]] = code;
6054      if (GET_CODE (op1) == REG)
6055	{
6056	  /* Look it up again--in case op0 and op1 are the same.  */
6057	  op1_elt = lookup (op1, op1_hash, mode);
6058
6059	  /* Put OP1 in the hash table so it gets a new quantity number.  */
6060	  if (op1_elt == 0)
6061	    {
6062	      if (insert_regs (op1, NULL_PTR, 0))
6063		{
6064		  rehash_using_reg (op1);
6065		  op1_hash = HASH (op1, mode);
6066		}
6067
6068	      op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6069	      op1_elt->in_memory = op1_in_memory;
6070	      op1_elt->in_struct = op1_in_struct;
6071	    }
6072
6073	  qty_comparison_qty[reg_qty[REGNO (op0)]] = reg_qty[REGNO (op1)];
6074	  qty_comparison_const[reg_qty[REGNO (op0)]] = 0;
6075	}
6076      else
6077	{
6078	  qty_comparison_qty[reg_qty[REGNO (op0)]] = -1;
6079	  qty_comparison_const[reg_qty[REGNO (op0)]] = op1;
6080	}
6081
6082      return;
6083    }
6084
6085  /* If either side is still missing an equivalence, make it now,
6086     then merge the equivalences.  */
6087
6088  if (op0_elt == 0)
6089    {
6090      if (insert_regs (op0, NULL_PTR, 0))
6091	{
6092	  rehash_using_reg (op0);
6093	  op0_hash = HASH (op0, mode);
6094	}
6095
6096      op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6097      op0_elt->in_memory = op0_in_memory;
6098      op0_elt->in_struct = op0_in_struct;
6099    }
6100
6101  if (op1_elt == 0)
6102    {
6103      if (insert_regs (op1, NULL_PTR, 0))
6104	{
6105	  rehash_using_reg (op1);
6106	  op1_hash = HASH (op1, mode);
6107	}
6108
6109      op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6110      op1_elt->in_memory = op1_in_memory;
6111      op1_elt->in_struct = op1_in_struct;
6112    }
6113
6114  merge_equiv_classes (op0_elt, op1_elt);
6115  last_jump_equiv_class = op0_elt;
6116}
6117
6118/* CSE processing for one instruction.
6119   First simplify sources and addresses of all assignments
6120   in the instruction, using previously-computed equivalents values.
6121   Then install the new sources and destinations in the table
6122   of available values.
6123
6124   If LIBCALL_INSN is nonzero, don't record any equivalence made in
6125   the insn.  It means that INSN is inside libcall block.  In this
6126   case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
6127
6128/* Data on one SET contained in the instruction.  */
6129
6130struct set
6131{
6132  /* The SET rtx itself.  */
6133  rtx rtl;
6134  /* The SET_SRC of the rtx (the original value, if it is changing).  */
6135  rtx src;
6136  /* The hash-table element for the SET_SRC of the SET.  */
6137  struct table_elt *src_elt;
6138  /* Hash value for the SET_SRC.  */
6139  unsigned src_hash;
6140  /* Hash value for the SET_DEST.  */
6141  unsigned dest_hash;
6142  /* The SET_DEST, with SUBREG, etc., stripped.  */
6143  rtx inner_dest;
6144  /* Place where the pointer to the INNER_DEST was found.  */
6145  rtx *inner_dest_loc;
6146  /* Nonzero if the SET_SRC is in memory.  */
6147  char src_in_memory;
6148  /* Nonzero if the SET_SRC is in a structure.  */
6149  char src_in_struct;
6150  /* Nonzero if the SET_SRC contains something
6151     whose value cannot be predicted and understood.  */
6152  char src_volatile;
6153  /* Original machine mode, in case it becomes a CONST_INT.  */
6154  enum machine_mode mode;
6155  /* A constant equivalent for SET_SRC, if any.  */
6156  rtx src_const;
6157  /* Hash value of constant equivalent for SET_SRC.  */
6158  unsigned src_const_hash;
6159  /* Table entry for constant equivalent for SET_SRC, if any.  */
6160  struct table_elt *src_const_elt;
6161};
6162
6163static void
6164cse_insn (insn, libcall_insn)
6165     rtx insn;
6166     rtx libcall_insn;
6167{
6168  register rtx x = PATTERN (insn);
6169  register int i;
6170  rtx tem;
6171  register int n_sets = 0;
6172
6173#ifdef HAVE_cc0
6174  /* Records what this insn does to set CC0.  */
6175  rtx this_insn_cc0 = 0;
6176  enum machine_mode this_insn_cc0_mode = VOIDmode;
6177#endif
6178
6179  rtx src_eqv = 0;
6180  struct table_elt *src_eqv_elt = 0;
6181  int src_eqv_volatile;
6182  int src_eqv_in_memory;
6183  int src_eqv_in_struct;
6184  unsigned src_eqv_hash;
6185
6186  struct set *sets;
6187
6188  this_insn = insn;
6189
6190  /* Find all the SETs and CLOBBERs in this instruction.
6191     Record all the SETs in the array `set' and count them.
6192     Also determine whether there is a CLOBBER that invalidates
6193     all memory references, or all references at varying addresses.  */
6194
6195  if (GET_CODE (insn) == CALL_INSN)
6196    {
6197      for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6198	if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6199          invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6200    }
6201
6202  if (GET_CODE (x) == SET)
6203    {
6204      sets = (struct set *) alloca (sizeof (struct set));
6205      sets[0].rtl = x;
6206
6207      /* Ignore SETs that are unconditional jumps.
6208	 They never need cse processing, so this does not hurt.
6209	 The reason is not efficiency but rather
6210	 so that we can test at the end for instructions
6211	 that have been simplified to unconditional jumps
6212	 and not be misled by unchanged instructions
6213	 that were unconditional jumps to begin with.  */
6214      if (SET_DEST (x) == pc_rtx
6215	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
6216	;
6217
6218      /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6219	 The hard function value register is used only once, to copy to
6220	 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6221	 Ensure we invalidate the destination register.  On the 80386 no
6222	 other code would invalidate it since it is a fixed_reg.
6223	 We need not check the return of apply_change_group; see canon_reg.  */
6224
6225      else if (GET_CODE (SET_SRC (x)) == CALL)
6226	{
6227	  canon_reg (SET_SRC (x), insn);
6228	  apply_change_group ();
6229	  fold_rtx (SET_SRC (x), insn);
6230	  invalidate (SET_DEST (x), VOIDmode);
6231	}
6232      else
6233	n_sets = 1;
6234    }
6235  else if (GET_CODE (x) == PARALLEL)
6236    {
6237      register int lim = XVECLEN (x, 0);
6238
6239      sets = (struct set *) alloca (lim * sizeof (struct set));
6240
6241      /* Find all regs explicitly clobbered in this insn,
6242	 and ensure they are not replaced with any other regs
6243	 elsewhere in this insn.
6244	 When a reg that is clobbered is also used for input,
6245	 we should presume that that is for a reason,
6246	 and we should not substitute some other register
6247	 which is not supposed to be clobbered.
6248	 Therefore, this loop cannot be merged into the one below
6249	 because a CALL may precede a CLOBBER and refer to the
6250	 value clobbered.  We must not let a canonicalization do
6251	 anything in that case.  */
6252      for (i = 0; i < lim; i++)
6253	{
6254	  register rtx y = XVECEXP (x, 0, i);
6255	  if (GET_CODE (y) == CLOBBER)
6256	    {
6257	      rtx clobbered = XEXP (y, 0);
6258
6259	      if (GET_CODE (clobbered) == REG
6260		  || GET_CODE (clobbered) == SUBREG)
6261		invalidate (clobbered, VOIDmode);
6262	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
6263		       || GET_CODE (clobbered) == ZERO_EXTRACT)
6264		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6265	    }
6266	}
6267
6268      for (i = 0; i < lim; i++)
6269	{
6270	  register rtx y = XVECEXP (x, 0, i);
6271	  if (GET_CODE (y) == SET)
6272	    {
6273	      /* As above, we ignore unconditional jumps and call-insns and
6274		 ignore the result of apply_change_group.  */
6275	      if (GET_CODE (SET_SRC (y)) == CALL)
6276		{
6277		  canon_reg (SET_SRC (y), insn);
6278		  apply_change_group ();
6279		  fold_rtx (SET_SRC (y), insn);
6280		  invalidate (SET_DEST (y), VOIDmode);
6281		}
6282	      else if (SET_DEST (y) == pc_rtx
6283		       && GET_CODE (SET_SRC (y)) == LABEL_REF)
6284		;
6285	      else
6286		sets[n_sets++].rtl = y;
6287	    }
6288	  else if (GET_CODE (y) == CLOBBER)
6289	    {
6290	      /* If we clobber memory, canon the address.
6291		 This does nothing when a register is clobbered
6292		 because we have already invalidated the reg.  */
6293	      if (GET_CODE (XEXP (y, 0)) == MEM)
6294		canon_reg (XEXP (y, 0), NULL_RTX);
6295	    }
6296	  else if (GET_CODE (y) == USE
6297		   && ! (GET_CODE (XEXP (y, 0)) == REG
6298			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
6299	    canon_reg (y, NULL_RTX);
6300	  else if (GET_CODE (y) == CALL)
6301	    {
6302	      /* The result of apply_change_group can be ignored; see
6303		 canon_reg.  */
6304	      canon_reg (y, insn);
6305	      apply_change_group ();
6306	      fold_rtx (y, insn);
6307	    }
6308	}
6309    }
6310  else if (GET_CODE (x) == CLOBBER)
6311    {
6312      if (GET_CODE (XEXP (x, 0)) == MEM)
6313	canon_reg (XEXP (x, 0), NULL_RTX);
6314    }
6315
6316  /* Canonicalize a USE of a pseudo register or memory location.  */
6317  else if (GET_CODE (x) == USE
6318	   && ! (GET_CODE (XEXP (x, 0)) == REG
6319		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
6320    canon_reg (XEXP (x, 0), NULL_RTX);
6321  else if (GET_CODE (x) == CALL)
6322    {
6323      /* The result of apply_change_group can be ignored; see canon_reg.  */
6324      canon_reg (x, insn);
6325      apply_change_group ();
6326      fold_rtx (x, insn);
6327    }
6328
6329  /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6330     is a STRICT_LOW_PART.  The latter condition is necessary because SRC_EQV
6331     is handled specially for this case, and if it isn't set, then there will
6332     be no equivalence for the destination.  */
6333  if (n_sets == 1 && REG_NOTES (insn) != 0
6334      && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
6335      && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6336	  || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
6337    src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
6338
6339  /* Canonicalize sources and addresses of destinations.
6340     We do this in a separate pass to avoid problems when a MATCH_DUP is
6341     present in the insn pattern.  In that case, we want to ensure that
6342     we don't break the duplicate nature of the pattern.  So we will replace
6343     both operands at the same time.  Otherwise, we would fail to find an
6344     equivalent substitution in the loop calling validate_change below.
6345
6346     We used to suppress canonicalization of DEST if it appears in SRC,
6347     but we don't do this any more.  */
6348
6349  for (i = 0; i < n_sets; i++)
6350    {
6351      rtx dest = SET_DEST (sets[i].rtl);
6352      rtx src = SET_SRC (sets[i].rtl);
6353      rtx new = canon_reg (src, insn);
6354      int insn_code;
6355
6356      if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6357	   && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6358	       != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6359	  || (insn_code = recog_memoized (insn)) < 0
6360	  || insn_n_dups[insn_code] > 0)
6361	validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6362      else
6363	SET_SRC (sets[i].rtl) = new;
6364
6365      if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6366	{
6367	  validate_change (insn, &XEXP (dest, 1),
6368			   canon_reg (XEXP (dest, 1), insn), 1);
6369	  validate_change (insn, &XEXP (dest, 2),
6370			   canon_reg (XEXP (dest, 2), insn), 1);
6371	}
6372
6373      while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6374	     || GET_CODE (dest) == ZERO_EXTRACT
6375	     || GET_CODE (dest) == SIGN_EXTRACT)
6376	dest = XEXP (dest, 0);
6377
6378      if (GET_CODE (dest) == MEM)
6379	canon_reg (dest, insn);
6380    }
6381
6382  /* Now that we have done all the replacements, we can apply the change
6383     group and see if they all work.  Note that this will cause some
6384     canonicalizations that would have worked individually not to be applied
6385     because some other canonicalization didn't work, but this should not
6386     occur often.
6387
6388     The result of apply_change_group can be ignored; see canon_reg.  */
6389
6390  apply_change_group ();
6391
6392  /* Set sets[i].src_elt to the class each source belongs to.
6393     Detect assignments from or to volatile things
6394     and set set[i] to zero so they will be ignored
6395     in the rest of this function.
6396
6397     Nothing in this loop changes the hash table or the register chains.  */
6398
6399  for (i = 0; i < n_sets; i++)
6400    {
6401      register rtx src, dest;
6402      register rtx src_folded;
6403      register struct table_elt *elt = 0, *p;
6404      enum machine_mode mode;
6405      rtx src_eqv_here;
6406      rtx src_const = 0;
6407      rtx src_related = 0;
6408      struct table_elt *src_const_elt = 0;
6409      int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6410      int src_related_cost = 10000, src_elt_cost = 10000;
6411      /* Set non-zero if we need to call force_const_mem on with the
6412	 contents of src_folded before using it.  */
6413      int src_folded_force_flag = 0;
6414
6415      dest = SET_DEST (sets[i].rtl);
6416      src = SET_SRC (sets[i].rtl);
6417
6418      /* If SRC is a constant that has no machine mode,
6419	 hash it with the destination's machine mode.
6420	 This way we can keep different modes separate.  */
6421
6422      mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6423      sets[i].mode = mode;
6424
6425      if (src_eqv)
6426	{
6427	  enum machine_mode eqvmode = mode;
6428	  if (GET_CODE (dest) == STRICT_LOW_PART)
6429	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6430	  do_not_record = 0;
6431	  hash_arg_in_memory = 0;
6432	  hash_arg_in_struct = 0;
6433	  src_eqv = fold_rtx (src_eqv, insn);
6434	  src_eqv_hash = HASH (src_eqv, eqvmode);
6435
6436	  /* Find the equivalence class for the equivalent expression.  */
6437
6438	  if (!do_not_record)
6439	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
6440
6441	  src_eqv_volatile = do_not_record;
6442	  src_eqv_in_memory = hash_arg_in_memory;
6443	  src_eqv_in_struct = hash_arg_in_struct;
6444	}
6445
6446      /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6447	 value of the INNER register, not the destination.  So it is not
6448	 a valid substitution for the source.  But save it for later.  */
6449      if (GET_CODE (dest) == STRICT_LOW_PART)
6450	src_eqv_here = 0;
6451      else
6452	src_eqv_here = src_eqv;
6453
6454      /* Simplify and foldable subexpressions in SRC.  Then get the fully-
6455	 simplified result, which may not necessarily be valid.  */
6456      src_folded = fold_rtx (src, insn);
6457
6458#if 0
6459      /* ??? This caused bad code to be generated for the m68k port with -O2.
6460	 Suppose src is (CONST_INT -1), and that after truncation src_folded
6461	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
6462	 At the end we will add src and src_const to the same equivalence
6463	 class.  We now have 3 and -1 on the same equivalence class.  This
6464	 causes later instructions to be mis-optimized.  */
6465      /* If storing a constant in a bitfield, pre-truncate the constant
6466	 so we will be able to record it later.  */
6467      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6468	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6469	{
6470	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6471
6472	  if (GET_CODE (src) == CONST_INT
6473	      && GET_CODE (width) == CONST_INT
6474	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6475	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6476	    src_folded
6477	      = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6478					  << INTVAL (width)) - 1));
6479	}
6480#endif
6481
6482      /* Compute SRC's hash code, and also notice if it
6483	 should not be recorded at all.  In that case,
6484	 prevent any further processing of this assignment.  */
6485      do_not_record = 0;
6486      hash_arg_in_memory = 0;
6487      hash_arg_in_struct = 0;
6488
6489      sets[i].src = src;
6490      sets[i].src_hash = HASH (src, mode);
6491      sets[i].src_volatile = do_not_record;
6492      sets[i].src_in_memory = hash_arg_in_memory;
6493      sets[i].src_in_struct = hash_arg_in_struct;
6494
6495      /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
6496	 a pseudo that is set more than once, do not record SRC.  Using
6497	 SRC as a replacement for anything else will be incorrect in that
6498	 situation.  Note that this usually occurs only for stack slots,
6499	 in which case all the RTL would be referring to SRC, so we don't
6500	 lose any optimization opportunities by not having SRC in the
6501	 hash table.  */
6502
6503      if (GET_CODE (src) == MEM
6504	  && find_reg_note (insn, REG_EQUIV, src) != 0
6505	  && GET_CODE (dest) == REG
6506	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER
6507	  && REG_N_SETS (REGNO (dest)) != 1)
6508	sets[i].src_volatile = 1;
6509
6510#if 0
6511      /* It is no longer clear why we used to do this, but it doesn't
6512	 appear to still be needed.  So let's try without it since this
6513	 code hurts cse'ing widened ops.  */
6514      /* If source is a perverse subreg (such as QI treated as an SI),
6515	 treat it as volatile.  It may do the work of an SI in one context
6516	 where the extra bits are not being used, but cannot replace an SI
6517	 in general.  */
6518      if (GET_CODE (src) == SUBREG
6519	  && (GET_MODE_SIZE (GET_MODE (src))
6520	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6521	sets[i].src_volatile = 1;
6522#endif
6523
6524      /* Locate all possible equivalent forms for SRC.  Try to replace
6525         SRC in the insn with each cheaper equivalent.
6526
6527         We have the following types of equivalents: SRC itself, a folded
6528         version, a value given in a REG_EQUAL note, or a value related
6529	 to a constant.
6530
6531         Each of these equivalents may be part of an additional class
6532         of equivalents (if more than one is in the table, they must be in
6533         the same class; we check for this).
6534
6535	 If the source is volatile, we don't do any table lookups.
6536
6537         We note any constant equivalent for possible later use in a
6538         REG_NOTE.  */
6539
6540      if (!sets[i].src_volatile)
6541	elt = lookup (src, sets[i].src_hash, mode);
6542
6543      sets[i].src_elt = elt;
6544
6545      if (elt && src_eqv_here && src_eqv_elt)
6546        {
6547          if (elt->first_same_value != src_eqv_elt->first_same_value)
6548	    {
6549	      /* The REG_EQUAL is indicating that two formerly distinct
6550		 classes are now equivalent.  So merge them.  */
6551	      merge_equiv_classes (elt, src_eqv_elt);
6552	      src_eqv_hash = HASH (src_eqv, elt->mode);
6553	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
6554	    }
6555
6556          src_eqv_here = 0;
6557        }
6558
6559      else if (src_eqv_elt)
6560        elt = src_eqv_elt;
6561
6562      /* Try to find a constant somewhere and record it in `src_const'.
6563	 Record its table element, if any, in `src_const_elt'.  Look in
6564	 any known equivalences first.  (If the constant is not in the
6565	 table, also set `sets[i].src_const_hash').  */
6566      if (elt)
6567        for (p = elt->first_same_value; p; p = p->next_same_value)
6568	  if (p->is_const)
6569	    {
6570	      src_const = p->exp;
6571	      src_const_elt = elt;
6572	      break;
6573	    }
6574
6575      if (src_const == 0
6576	  && (CONSTANT_P (src_folded)
6577	      /* Consider (minus (label_ref L1) (label_ref L2)) as
6578		 "constant" here so we will record it. This allows us
6579		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
6580	      || (GET_CODE (src_folded) == MINUS
6581		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6582		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6583	src_const = src_folded, src_const_elt = elt;
6584      else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6585	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6586
6587      /* If we don't know if the constant is in the table, get its
6588	 hash code and look it up.  */
6589      if (src_const && src_const_elt == 0)
6590	{
6591	  sets[i].src_const_hash = HASH (src_const, mode);
6592	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
6593	}
6594
6595      sets[i].src_const = src_const;
6596      sets[i].src_const_elt = src_const_elt;
6597
6598      /* If the constant and our source are both in the table, mark them as
6599	 equivalent.  Otherwise, if a constant is in the table but the source
6600	 isn't, set ELT to it.  */
6601      if (src_const_elt && elt
6602	  && src_const_elt->first_same_value != elt->first_same_value)
6603	merge_equiv_classes (elt, src_const_elt);
6604      else if (src_const_elt && elt == 0)
6605	elt = src_const_elt;
6606
6607      /* See if there is a register linearly related to a constant
6608         equivalent of SRC.  */
6609      if (src_const
6610	  && (GET_CODE (src_const) == CONST
6611	      || (src_const_elt && src_const_elt->related_value != 0)))
6612        {
6613          src_related = use_related_value (src_const, src_const_elt);
6614          if (src_related)
6615            {
6616	      struct table_elt *src_related_elt
6617		    = lookup (src_related, HASH (src_related, mode), mode);
6618	      if (src_related_elt && elt)
6619	        {
6620		  if (elt->first_same_value
6621		      != src_related_elt->first_same_value)
6622		    /* This can occur when we previously saw a CONST
6623		       involving a SYMBOL_REF and then see the SYMBOL_REF
6624		       twice.  Merge the involved classes.  */
6625		    merge_equiv_classes (elt, src_related_elt);
6626
6627	          src_related = 0;
6628		  src_related_elt = 0;
6629	        }
6630              else if (src_related_elt && elt == 0)
6631	        elt = src_related_elt;
6632	    }
6633        }
6634
6635      /* See if we have a CONST_INT that is already in a register in a
6636	 wider mode.  */
6637
6638      if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6639	  && GET_MODE_CLASS (mode) == MODE_INT
6640	  && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6641	{
6642	  enum machine_mode wider_mode;
6643
6644	  for (wider_mode = GET_MODE_WIDER_MODE (mode);
6645	       GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6646	       && src_related == 0;
6647	       wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6648	    {
6649	      struct table_elt *const_elt
6650		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6651
6652	      if (const_elt == 0)
6653		continue;
6654
6655	      for (const_elt = const_elt->first_same_value;
6656		   const_elt; const_elt = const_elt->next_same_value)
6657		if (GET_CODE (const_elt->exp) == REG)
6658		  {
6659		    src_related = gen_lowpart_if_possible (mode,
6660							   const_elt->exp);
6661		    break;
6662		  }
6663	    }
6664	}
6665
6666      /* Another possibility is that we have an AND with a constant in
6667	 a mode narrower than a word.  If so, it might have been generated
6668	 as part of an "if" which would narrow the AND.  If we already
6669	 have done the AND in a wider mode, we can use a SUBREG of that
6670	 value.  */
6671
6672      if (flag_expensive_optimizations && ! src_related
6673	  && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6674	  && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6675	{
6676	  enum machine_mode tmode;
6677	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
6678
6679	  for (tmode = GET_MODE_WIDER_MODE (mode);
6680	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6681	       tmode = GET_MODE_WIDER_MODE (tmode))
6682	    {
6683	      rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6684	      struct table_elt *larger_elt;
6685
6686	      if (inner)
6687		{
6688		  PUT_MODE (new_and, tmode);
6689		  XEXP (new_and, 0) = inner;
6690		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6691		  if (larger_elt == 0)
6692		    continue;
6693
6694		  for (larger_elt = larger_elt->first_same_value;
6695		       larger_elt; larger_elt = larger_elt->next_same_value)
6696		    if (GET_CODE (larger_elt->exp) == REG)
6697		      {
6698			src_related
6699			  = gen_lowpart_if_possible (mode, larger_elt->exp);
6700			break;
6701		      }
6702
6703		  if (src_related)
6704		    break;
6705		}
6706	    }
6707	}
6708
6709#ifdef LOAD_EXTEND_OP
6710      /* See if a MEM has already been loaded with a widening operation;
6711	 if it has, we can use a subreg of that.  Many CISC machines
6712	 also have such operations, but this is only likely to be
6713	 beneficial these machines.  */
6714
6715      if (flag_expensive_optimizations &&  src_related == 0
6716	  && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6717	  && GET_MODE_CLASS (mode) == MODE_INT
6718	  && GET_CODE (src) == MEM && ! do_not_record
6719	  && LOAD_EXTEND_OP (mode) != NIL)
6720	{
6721	  enum machine_mode tmode;
6722
6723	  /* Set what we are trying to extend and the operation it might
6724	     have been extended with.  */
6725	  PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6726	  XEXP (memory_extend_rtx, 0) = src;
6727
6728	  for (tmode = GET_MODE_WIDER_MODE (mode);
6729	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6730	       tmode = GET_MODE_WIDER_MODE (tmode))
6731	    {
6732	      struct table_elt *larger_elt;
6733
6734	      PUT_MODE (memory_extend_rtx, tmode);
6735	      larger_elt = lookup (memory_extend_rtx,
6736				   HASH (memory_extend_rtx, tmode), tmode);
6737	      if (larger_elt == 0)
6738		continue;
6739
6740	      for (larger_elt = larger_elt->first_same_value;
6741		   larger_elt; larger_elt = larger_elt->next_same_value)
6742		if (GET_CODE (larger_elt->exp) == REG)
6743		  {
6744		    src_related = gen_lowpart_if_possible (mode,
6745							   larger_elt->exp);
6746		    break;
6747		  }
6748
6749	      if (src_related)
6750		break;
6751	    }
6752	}
6753#endif /* LOAD_EXTEND_OP */
6754
6755      if (src == src_folded)
6756        src_folded = 0;
6757
6758      /* At this point, ELT, if non-zero, points to a class of expressions
6759         equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
6760	 and SRC_RELATED, if non-zero, each contain additional equivalent
6761	 expressions.  Prune these latter expressions by deleting expressions
6762	 already in the equivalence class.
6763
6764	 Check for an equivalent identical to the destination.  If found,
6765	 this is the preferred equivalent since it will likely lead to
6766	 elimination of the insn.  Indicate this by placing it in
6767	 `src_related'.  */
6768
6769      if (elt) elt = elt->first_same_value;
6770      for (p = elt; p; p = p->next_same_value)
6771        {
6772	  enum rtx_code code = GET_CODE (p->exp);
6773
6774	  /* If the expression is not valid, ignore it.  Then we do not
6775	     have to check for validity below.  In most cases, we can use
6776	     `rtx_equal_p', since canonicalization has already been done.  */
6777	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
6778	    continue;
6779
6780	  /* Also skip paradoxical subregs, unless that's what we're
6781	     looking for.  */
6782	  if (code == SUBREG
6783	      && (GET_MODE_SIZE (GET_MODE (p->exp))
6784		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
6785	      && ! (src != 0
6786		    && GET_CODE (src) == SUBREG
6787		    && GET_MODE (src) == GET_MODE (p->exp)
6788		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6789			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
6790	    continue;
6791
6792          if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
6793	    src = 0;
6794          else if (src_folded && GET_CODE (src_folded) == code
6795		   && rtx_equal_p (src_folded, p->exp))
6796	    src_folded = 0;
6797          else if (src_eqv_here && GET_CODE (src_eqv_here) == code
6798		   && rtx_equal_p (src_eqv_here, p->exp))
6799	    src_eqv_here = 0;
6800          else if (src_related && GET_CODE (src_related) == code
6801		   && rtx_equal_p (src_related, p->exp))
6802	    src_related = 0;
6803
6804	  /* This is the same as the destination of the insns, we want
6805	     to prefer it.  Copy it to src_related.  The code below will
6806	     then give it a negative cost.  */
6807	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
6808	    src_related = dest;
6809
6810        }
6811
6812      /* Find the cheapest valid equivalent, trying all the available
6813         possibilities.  Prefer items not in the hash table to ones
6814         that are when they are equal cost.  Note that we can never
6815         worsen an insn as the current contents will also succeed.
6816	 If we find an equivalent identical to the destination, use it as best,
6817	 since this insn will probably be eliminated in that case.  */
6818      if (src)
6819	{
6820	  if (rtx_equal_p (src, dest))
6821	    src_cost = -1;
6822	  else
6823	    src_cost = COST (src);
6824	}
6825
6826      if (src_eqv_here)
6827	{
6828	  if (rtx_equal_p (src_eqv_here, dest))
6829	    src_eqv_cost = -1;
6830	  else
6831	    src_eqv_cost = COST (src_eqv_here);
6832	}
6833
6834      if (src_folded)
6835	{
6836	  if (rtx_equal_p (src_folded, dest))
6837	    src_folded_cost = -1;
6838	  else
6839	    src_folded_cost = COST (src_folded);
6840	}
6841
6842      if (src_related)
6843	{
6844	  if (rtx_equal_p (src_related, dest))
6845	    src_related_cost = -1;
6846	  else
6847	    src_related_cost = COST (src_related);
6848	}
6849
6850      /* If this was an indirect jump insn, a known label will really be
6851	 cheaper even though it looks more expensive.  */
6852      if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
6853	src_folded = src_const, src_folded_cost = -1;
6854
6855      /* Terminate loop when replacement made.  This must terminate since
6856         the current contents will be tested and will always be valid.  */
6857      while (1)
6858        {
6859          rtx trial, old_src;
6860
6861          /* Skip invalid entries.  */
6862          while (elt && GET_CODE (elt->exp) != REG
6863	         && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6864	    elt = elt->next_same_value;
6865
6866	  /* A paradoxical subreg would be bad here: it'll be the right
6867	     size, but later may be adjusted so that the upper bits aren't
6868	     what we want.  So reject it.  */
6869	  if (elt != 0
6870	      && GET_CODE (elt->exp) == SUBREG
6871	      && (GET_MODE_SIZE (GET_MODE (elt->exp))
6872		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
6873	      /* It is okay, though, if the rtx we're trying to match
6874		 will ignore any of the bits we can't predict.  */
6875	      && ! (src != 0
6876		    && GET_CODE (src) == SUBREG
6877		    && GET_MODE (src) == GET_MODE (elt->exp)
6878		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6879			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
6880	    {
6881	      elt = elt->next_same_value;
6882	      continue;
6883	    }
6884
6885          if (elt) src_elt_cost = elt->cost;
6886
6887          /* Find cheapest and skip it for the next time.   For items
6888	     of equal cost, use this order:
6889	     src_folded, src, src_eqv, src_related and hash table entry.  */
6890          if (src_folded_cost <= src_cost
6891	      && src_folded_cost <= src_eqv_cost
6892	      && src_folded_cost <= src_related_cost
6893	      && src_folded_cost <= src_elt_cost)
6894	    {
6895	      trial = src_folded, src_folded_cost = 10000;
6896	      if (src_folded_force_flag)
6897		trial = force_const_mem (mode, trial);
6898	    }
6899          else if (src_cost <= src_eqv_cost
6900	           && src_cost <= src_related_cost
6901	           && src_cost <= src_elt_cost)
6902	    trial = src, src_cost = 10000;
6903          else if (src_eqv_cost <= src_related_cost
6904	           && src_eqv_cost <= src_elt_cost)
6905	    trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
6906          else if (src_related_cost <= src_elt_cost)
6907	    trial = copy_rtx (src_related), src_related_cost = 10000;
6908          else
6909	    {
6910	      trial = copy_rtx (elt->exp);
6911	      elt = elt->next_same_value;
6912	      src_elt_cost = 10000;
6913	    }
6914
6915	  /* We don't normally have an insn matching (set (pc) (pc)), so
6916	     check for this separately here.  We will delete such an
6917	     insn below.
6918
6919	     Tablejump insns contain a USE of the table, so simply replacing
6920	     the operand with the constant won't match.  This is simply an
6921	     unconditional branch, however, and is therefore valid.  Just
6922	     insert the substitution here and we will delete and re-emit
6923	     the insn later.  */
6924
6925	  /* Keep track of the original SET_SRC so that we can fix notes
6926	     on libcall instructions.  */
6927 	  old_src = SET_SRC (sets[i].rtl);
6928
6929	  if (n_sets == 1 && dest == pc_rtx
6930	      && (trial == pc_rtx
6931		  || (GET_CODE (trial) == LABEL_REF
6932		      && ! condjump_p (insn))))
6933	    {
6934	      /* If TRIAL is a label in front of a jump table, we are
6935		 really falling through the switch (this is how casesi
6936		 insns work), so we must branch around the table.  */
6937	      if (GET_CODE (trial) == CODE_LABEL
6938		  && NEXT_INSN (trial) != 0
6939		  && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
6940		  && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
6941		      || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
6942
6943		trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
6944
6945	      SET_SRC (sets[i].rtl) = trial;
6946 	      cse_jumps_altered = 1;
6947	      break;
6948	    }
6949
6950	  /* Look for a substitution that makes a valid insn.  */
6951          else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
6952	    {
6953	      /* If we just made a substitution inside a libcall, then we
6954		 need to make the same substitution in any notes attached
6955		 to the RETVAL insn.  */
6956	      if (libcall_insn
6957		  && (GET_CODE (old_src) == REG
6958		      || GET_CODE (old_src) == SUBREG
6959		      ||  GET_CODE (old_src) == MEM))
6960		replace_rtx (REG_NOTES (libcall_insn), old_src,
6961			     canon_reg (SET_SRC (sets[i].rtl), insn));
6962
6963	      /* The result of apply_change_group can be ignored; see
6964		 canon_reg.  */
6965
6966	      validate_change (insn, &SET_SRC (sets[i].rtl),
6967			       canon_reg (SET_SRC (sets[i].rtl), insn),
6968			       1);
6969	      apply_change_group ();
6970	      break;
6971	    }
6972
6973	  /* If we previously found constant pool entries for
6974	     constants and this is a constant, try making a
6975	     pool entry.  Put it in src_folded unless we already have done
6976	     this since that is where it likely came from.  */
6977
6978	  else if (constant_pool_entries_cost
6979		   && CONSTANT_P (trial)
6980		   && ! (GET_CODE (trial) == CONST
6981			 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
6982		   && (src_folded == 0
6983		       || (GET_CODE (src_folded) != MEM
6984			   && ! src_folded_force_flag))
6985		   && GET_MODE_CLASS (mode) != MODE_CC
6986		   && mode != VOIDmode)
6987	    {
6988	      src_folded_force_flag = 1;
6989	      src_folded = trial;
6990	      src_folded_cost = constant_pool_entries_cost;
6991	    }
6992        }
6993
6994      src = SET_SRC (sets[i].rtl);
6995
6996      /* In general, it is good to have a SET with SET_SRC == SET_DEST.
6997	 However, there is an important exception:  If both are registers
6998	 that are not the head of their equivalence class, replace SET_SRC
6999	 with the head of the class.  If we do not do this, we will have
7000	 both registers live over a portion of the basic block.  This way,
7001	 their lifetimes will likely abut instead of overlapping.  */
7002      if (GET_CODE (dest) == REG
7003	  && REGNO_QTY_VALID_P (REGNO (dest))
7004	  && qty_mode[reg_qty[REGNO (dest)]] == GET_MODE (dest)
7005	  && qty_first_reg[reg_qty[REGNO (dest)]] != REGNO (dest)
7006	  && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
7007	  /* Don't do this if the original insn had a hard reg as
7008	     SET_SRC.  */
7009	  && (GET_CODE (sets[i].src) != REG
7010	      || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
7011	/* We can't call canon_reg here because it won't do anything if
7012	   SRC is a hard register.  */
7013	{
7014	  int first = qty_first_reg[reg_qty[REGNO (src)]];
7015	  rtx new_src
7016	    = (first >= FIRST_PSEUDO_REGISTER
7017	       ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
7018
7019	  /* We must use validate-change even for this, because this
7020	     might be a special no-op instruction, suitable only to
7021	     tag notes onto.  */
7022	  if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
7023	    {
7024	      src = new_src;
7025	      /* If we had a constant that is cheaper than what we are now
7026		 setting SRC to, use that constant.  We ignored it when we
7027		 thought we could make this into a no-op.  */
7028	      if (src_const && COST (src_const) < COST (src)
7029		  && validate_change (insn, &SET_SRC (sets[i].rtl), src_const,
7030				      0))
7031		src = src_const;
7032	    }
7033	}
7034
7035      /* If we made a change, recompute SRC values.  */
7036      if (src != sets[i].src)
7037        {
7038          do_not_record = 0;
7039          hash_arg_in_memory = 0;
7040          hash_arg_in_struct = 0;
7041	  sets[i].src = src;
7042          sets[i].src_hash = HASH (src, mode);
7043          sets[i].src_volatile = do_not_record;
7044          sets[i].src_in_memory = hash_arg_in_memory;
7045          sets[i].src_in_struct = hash_arg_in_struct;
7046          sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
7047        }
7048
7049      /* If this is a single SET, we are setting a register, and we have an
7050	 equivalent constant, we want to add a REG_NOTE.   We don't want
7051	 to write a REG_EQUAL note for a constant pseudo since verifying that
7052	 that pseudo hasn't been eliminated is a pain.  Such a note also
7053	 won't help anything.
7054
7055	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
7056	 which can be created for a reference to a compile time computable
7057	 entry in a jump table.  */
7058
7059      if (n_sets == 1 && src_const && GET_CODE (dest) == REG
7060	  && GET_CODE (src_const) != REG
7061	  && ! (GET_CODE (src_const) == CONST
7062		&& GET_CODE (XEXP (src_const, 0)) == MINUS
7063		&& GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
7064		&& GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
7065	{
7066	  tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7067
7068	  /* Record the actual constant value in a REG_EQUAL note, making
7069	     a new one if one does not already exist.  */
7070	  if (tem)
7071	    XEXP (tem, 0) = src_const;
7072	  else
7073	    REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
7074						  src_const, REG_NOTES (insn));
7075
7076          /* If storing a constant value in a register that
7077	     previously held the constant value 0,
7078	     record this fact with a REG_WAS_0 note on this insn.
7079
7080	     Note that the *register* is required to have previously held 0,
7081	     not just any register in the quantity and we must point to the
7082	     insn that set that register to zero.
7083
7084	     Rather than track each register individually, we just see if
7085	     the last set for this quantity was for this register.  */
7086
7087	  if (REGNO_QTY_VALID_P (REGNO (dest))
7088	      && qty_const[reg_qty[REGNO (dest)]] == const0_rtx)
7089	    {
7090	      /* See if we previously had a REG_WAS_0 note.  */
7091	      rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7092	      rtx const_insn = qty_const_insn[reg_qty[REGNO (dest)]];
7093
7094	      if ((tem = single_set (const_insn)) != 0
7095		  && rtx_equal_p (SET_DEST (tem), dest))
7096		{
7097		  if (note)
7098		    XEXP (note, 0) = const_insn;
7099		  else
7100		    REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_WAS_0,
7101							  const_insn,
7102							  REG_NOTES (insn));
7103		}
7104	    }
7105	}
7106
7107      /* Now deal with the destination.  */
7108      do_not_record = 0;
7109      sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
7110
7111      /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
7112	 to the MEM or REG within it.  */
7113      while (GET_CODE (dest) == SIGN_EXTRACT
7114	     || GET_CODE (dest) == ZERO_EXTRACT
7115	     || GET_CODE (dest) == SUBREG
7116	     || GET_CODE (dest) == STRICT_LOW_PART)
7117	{
7118	  sets[i].inner_dest_loc = &XEXP (dest, 0);
7119	  dest = XEXP (dest, 0);
7120	}
7121
7122      sets[i].inner_dest = dest;
7123
7124      if (GET_CODE (dest) == MEM)
7125	{
7126#ifdef PUSH_ROUNDING
7127	  /* Stack pushes invalidate the stack pointer.  */
7128	  rtx addr = XEXP (dest, 0);
7129	  if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7130	       || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7131	      && XEXP (addr, 0) == stack_pointer_rtx)
7132	    invalidate (stack_pointer_rtx, Pmode);
7133#endif
7134	  dest = fold_rtx (dest, insn);
7135	}
7136
7137      /* Compute the hash code of the destination now,
7138	 before the effects of this instruction are recorded,
7139	 since the register values used in the address computation
7140	 are those before this instruction.  */
7141      sets[i].dest_hash = HASH (dest, mode);
7142
7143      /* Don't enter a bit-field in the hash table
7144	 because the value in it after the store
7145	 may not equal what was stored, due to truncation.  */
7146
7147      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
7148	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
7149	{
7150	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
7151
7152	  if (src_const != 0 && GET_CODE (src_const) == CONST_INT
7153	      && GET_CODE (width) == CONST_INT
7154	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
7155	      && ! (INTVAL (src_const)
7156		    & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7157	    /* Exception: if the value is constant,
7158	       and it won't be truncated, record it.  */
7159	    ;
7160	  else
7161	    {
7162	      /* This is chosen so that the destination will be invalidated
7163		 but no new value will be recorded.
7164		 We must invalidate because sometimes constant
7165		 values can be recorded for bitfields.  */
7166	      sets[i].src_elt = 0;
7167	      sets[i].src_volatile = 1;
7168	      src_eqv = 0;
7169	      src_eqv_elt = 0;
7170	    }
7171	}
7172
7173      /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
7174	 the insn.  */
7175      else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
7176	{
7177	  PUT_CODE (insn, NOTE);
7178	  NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7179	  NOTE_SOURCE_FILE (insn) = 0;
7180	  cse_jumps_altered = 1;
7181	  /* One less use of the label this insn used to jump to.  */
7182	  if (JUMP_LABEL (insn) != 0)
7183	    --LABEL_NUSES (JUMP_LABEL (insn));
7184	  /* No more processing for this set.  */
7185	  sets[i].rtl = 0;
7186	}
7187
7188      /* If this SET is now setting PC to a label, we know it used to
7189	 be a conditional or computed branch.  So we see if we can follow
7190	 it.  If it was a computed branch, delete it and re-emit.  */
7191      else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
7192	{
7193	  rtx p;
7194
7195	  /* If this is not in the format for a simple branch and
7196	     we are the only SET in it, re-emit it.  */
7197	  if (! simplejump_p (insn) && n_sets == 1)
7198	    {
7199	      rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7200	      JUMP_LABEL (new) = XEXP (src, 0);
7201	      LABEL_NUSES (XEXP (src, 0))++;
7202	      delete_insn (insn);
7203	      insn = new;
7204	    }
7205	  else
7206	    /* Otherwise, force rerecognition, since it probably had
7207	       a different pattern before.
7208	       This shouldn't really be necessary, since whatever
7209	       changed the source value above should have done this.
7210	       Until the right place is found, might as well do this here.  */
7211	    INSN_CODE (insn) = -1;
7212
7213	  /* Now that we've converted this jump to an unconditional jump,
7214	     there is dead code after it.  Delete the dead code until we
7215	     reach a BARRIER, the end of the function, or a label.  Do
7216	     not delete NOTEs except for NOTE_INSN_DELETED since later
7217	     phases assume these notes are retained.  */
7218
7219	  p = insn;
7220
7221	  while (NEXT_INSN (p) != 0
7222		 && GET_CODE (NEXT_INSN (p)) != BARRIER
7223		 && GET_CODE (NEXT_INSN (p)) != CODE_LABEL)
7224	    {
7225	      if (GET_CODE (NEXT_INSN (p)) != NOTE
7226		  || NOTE_LINE_NUMBER (NEXT_INSN (p)) == NOTE_INSN_DELETED)
7227		delete_insn (NEXT_INSN (p));
7228	      else
7229		p = NEXT_INSN (p);
7230	    }
7231
7232	  /* If we don't have a BARRIER immediately after INSN, put one there.
7233	     Much code assumes that there are no NOTEs between a JUMP_INSN and
7234	     BARRIER.  */
7235
7236	  if (NEXT_INSN (insn) == 0
7237	      || GET_CODE (NEXT_INSN (insn)) != BARRIER)
7238	    emit_barrier_before (NEXT_INSN (insn));
7239
7240	  /* We might have two BARRIERs separated by notes.  Delete the second
7241	     one if so.  */
7242
7243	  if (p != insn && NEXT_INSN (p) != 0
7244	      && GET_CODE (NEXT_INSN (p)) == BARRIER)
7245	    delete_insn (NEXT_INSN (p));
7246
7247	  cse_jumps_altered = 1;
7248	  sets[i].rtl = 0;
7249	}
7250
7251      /* If destination is volatile, invalidate it and then do no further
7252	 processing for this assignment.  */
7253
7254      else if (do_not_record)
7255	{
7256	  if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7257	      || GET_CODE (dest) == MEM)
7258	    invalidate (dest, VOIDmode);
7259	  else if (GET_CODE (dest) == STRICT_LOW_PART
7260		   || GET_CODE (dest) == ZERO_EXTRACT)
7261	    invalidate (XEXP (dest, 0), GET_MODE (dest));
7262	  sets[i].rtl = 0;
7263	}
7264
7265      if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
7266	sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7267
7268#ifdef HAVE_cc0
7269      /* If setting CC0, record what it was set to, or a constant, if it
7270	 is equivalent to a constant.  If it is being set to a floating-point
7271	 value, make a COMPARE with the appropriate constant of 0.  If we
7272	 don't do this, later code can interpret this as a test against
7273	 const0_rtx, which can cause problems if we try to put it into an
7274	 insn as a floating-point operand.  */
7275      if (dest == cc0_rtx)
7276	{
7277	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7278	  this_insn_cc0_mode = mode;
7279	  if (FLOAT_MODE_P (mode))
7280	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
7281					     CONST0_RTX (mode));
7282	}
7283#endif
7284    }
7285
7286  /* Now enter all non-volatile source expressions in the hash table
7287     if they are not already present.
7288     Record their equivalence classes in src_elt.
7289     This way we can insert the corresponding destinations into
7290     the same classes even if the actual sources are no longer in them
7291     (having been invalidated).  */
7292
7293  if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7294      && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7295    {
7296      register struct table_elt *elt;
7297      register struct table_elt *classp = sets[0].src_elt;
7298      rtx dest = SET_DEST (sets[0].rtl);
7299      enum machine_mode eqvmode = GET_MODE (dest);
7300
7301      if (GET_CODE (dest) == STRICT_LOW_PART)
7302	{
7303	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7304	  classp = 0;
7305	}
7306      if (insert_regs (src_eqv, classp, 0))
7307	{
7308	  rehash_using_reg (src_eqv);
7309	  src_eqv_hash = HASH (src_eqv, eqvmode);
7310	}
7311      elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7312      elt->in_memory = src_eqv_in_memory;
7313      elt->in_struct = src_eqv_in_struct;
7314      src_eqv_elt = elt;
7315
7316      /* Check to see if src_eqv_elt is the same as a set source which
7317	 does not yet have an elt, and if so set the elt of the set source
7318	 to src_eqv_elt.  */
7319      for (i = 0; i < n_sets; i++)
7320	if (sets[i].rtl && sets[i].src_elt == 0
7321	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7322	  sets[i].src_elt = src_eqv_elt;
7323    }
7324
7325  for (i = 0; i < n_sets; i++)
7326    if (sets[i].rtl && ! sets[i].src_volatile
7327	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7328      {
7329	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7330	  {
7331	    /* REG_EQUAL in setting a STRICT_LOW_PART
7332	       gives an equivalent for the entire destination register,
7333	       not just for the subreg being stored in now.
7334	       This is a more interesting equivalence, so we arrange later
7335	       to treat the entire reg as the destination.  */
7336	    sets[i].src_elt = src_eqv_elt;
7337	    sets[i].src_hash = src_eqv_hash;
7338	  }
7339	else
7340	  {
7341	    /* Insert source and constant equivalent into hash table, if not
7342	       already present.  */
7343	    register struct table_elt *classp = src_eqv_elt;
7344	    register rtx src = sets[i].src;
7345	    register rtx dest = SET_DEST (sets[i].rtl);
7346	    enum machine_mode mode
7347	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7348
7349	    if (sets[i].src_elt == 0)
7350	      {
7351		register struct table_elt *elt;
7352
7353		/* Note that these insert_regs calls cannot remove
7354		   any of the src_elt's, because they would have failed to
7355		   match if not still valid.  */
7356		if (insert_regs (src, classp, 0))
7357		  {
7358		    rehash_using_reg (src);
7359		    sets[i].src_hash = HASH (src, mode);
7360		  }
7361		elt = insert (src, classp, sets[i].src_hash, mode);
7362		elt->in_memory = sets[i].src_in_memory;
7363		elt->in_struct = sets[i].src_in_struct;
7364		sets[i].src_elt = classp = elt;
7365	      }
7366
7367	    if (sets[i].src_const && sets[i].src_const_elt == 0
7368		&& src != sets[i].src_const
7369		&& ! rtx_equal_p (sets[i].src_const, src))
7370	      sets[i].src_elt = insert (sets[i].src_const, classp,
7371					sets[i].src_const_hash, mode);
7372	  }
7373      }
7374    else if (sets[i].src_elt == 0)
7375      /* If we did not insert the source into the hash table (e.g., it was
7376	 volatile), note the equivalence class for the REG_EQUAL value, if any,
7377	 so that the destination goes into that class.  */
7378      sets[i].src_elt = src_eqv_elt;
7379
7380  invalidate_from_clobbers (x);
7381
7382  /* Some registers are invalidated by subroutine calls.  Memory is
7383     invalidated by non-constant calls.  */
7384
7385  if (GET_CODE (insn) == CALL_INSN)
7386    {
7387      if (! CONST_CALL_P (insn))
7388	invalidate_memory ();
7389      invalidate_for_call ();
7390    }
7391
7392  /* Now invalidate everything set by this instruction.
7393     If a SUBREG or other funny destination is being set,
7394     sets[i].rtl is still nonzero, so here we invalidate the reg
7395     a part of which is being set.  */
7396
7397  for (i = 0; i < n_sets; i++)
7398    if (sets[i].rtl)
7399      {
7400	/* We can't use the inner dest, because the mode associated with
7401	   a ZERO_EXTRACT is significant.  */
7402	register rtx dest = SET_DEST (sets[i].rtl);
7403
7404	/* Needed for registers to remove the register from its
7405	   previous quantity's chain.
7406	   Needed for memory if this is a nonvarying address, unless
7407	   we have just done an invalidate_memory that covers even those.  */
7408	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7409	    || GET_CODE (dest) == MEM)
7410	  invalidate (dest, VOIDmode);
7411	else if (GET_CODE (dest) == STRICT_LOW_PART
7412		 || GET_CODE (dest) == ZERO_EXTRACT)
7413	  invalidate (XEXP (dest, 0), GET_MODE (dest));
7414      }
7415
7416  /* Make sure registers mentioned in destinations
7417     are safe for use in an expression to be inserted.
7418     This removes from the hash table
7419     any invalid entry that refers to one of these registers.
7420
7421     We don't care about the return value from mention_regs because
7422     we are going to hash the SET_DEST values unconditionally.  */
7423
7424  for (i = 0; i < n_sets; i++)
7425    if (sets[i].rtl && GET_CODE (SET_DEST (sets[i].rtl)) != REG)
7426      mention_regs (SET_DEST (sets[i].rtl));
7427
7428  /* We may have just removed some of the src_elt's from the hash table.
7429     So replace each one with the current head of the same class.  */
7430
7431  for (i = 0; i < n_sets; i++)
7432    if (sets[i].rtl)
7433      {
7434	if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7435	  /* If elt was removed, find current head of same class,
7436	     or 0 if nothing remains of that class.  */
7437	  {
7438	    register struct table_elt *elt = sets[i].src_elt;
7439
7440	    while (elt && elt->prev_same_value)
7441	      elt = elt->prev_same_value;
7442
7443	    while (elt && elt->first_same_value == 0)
7444	      elt = elt->next_same_value;
7445	    sets[i].src_elt = elt ? elt->first_same_value : 0;
7446	  }
7447      }
7448
7449  /* Now insert the destinations into their equivalence classes.  */
7450
7451  for (i = 0; i < n_sets; i++)
7452    if (sets[i].rtl)
7453      {
7454	register rtx dest = SET_DEST (sets[i].rtl);
7455	rtx inner_dest = sets[i].inner_dest;
7456	register struct table_elt *elt;
7457
7458	/* Don't record value if we are not supposed to risk allocating
7459	   floating-point values in registers that might be wider than
7460	   memory.  */
7461	if ((flag_float_store
7462	     && GET_CODE (dest) == MEM
7463	     && FLOAT_MODE_P (GET_MODE (dest)))
7464	    /* Don't record BLKmode values, because we don't know the
7465	       size of it, and can't be sure that other BLKmode values
7466	       have the same or smaller size.  */
7467	    || GET_MODE (dest) == BLKmode
7468	    /* Don't record values of destinations set inside a libcall block
7469	       since we might delete the libcall.  Things should have been set
7470	       up so we won't want to reuse such a value, but we play it safe
7471	       here.  */
7472	    || libcall_insn
7473	    /* If we didn't put a REG_EQUAL value or a source into the hash
7474	       table, there is no point is recording DEST.  */
7475	    || sets[i].src_elt == 0
7476	    /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7477	       or SIGN_EXTEND, don't record DEST since it can cause
7478	       some tracking to be wrong.
7479
7480	       ??? Think about this more later.  */
7481	    || (GET_CODE (dest) == SUBREG
7482		&& (GET_MODE_SIZE (GET_MODE (dest))
7483		    > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7484		&& (GET_CODE (sets[i].src) == SIGN_EXTEND
7485		    || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7486	  continue;
7487
7488	/* STRICT_LOW_PART isn't part of the value BEING set,
7489	   and neither is the SUBREG inside it.
7490	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
7491	if (GET_CODE (dest) == STRICT_LOW_PART)
7492	  dest = SUBREG_REG (XEXP (dest, 0));
7493
7494	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7495	  /* Registers must also be inserted into chains for quantities.  */
7496	  if (insert_regs (dest, sets[i].src_elt, 1))
7497	    {
7498	      /* If `insert_regs' changes something, the hash code must be
7499		 recalculated.  */
7500	      rehash_using_reg (dest);
7501	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7502	    }
7503
7504	if (GET_CODE (inner_dest) == MEM
7505	    && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
7506	  /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
7507	     that (MEM (ADDRESSOF (X))) is equivalent to Y.
7508	     Consider the case in which the address of the MEM is
7509	     passed to a function, which alters the MEM.  Then, if we
7510	     later use Y instead of the MEM we'll miss the update.  */
7511	  elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
7512	else
7513	  elt = insert (dest, sets[i].src_elt,
7514			sets[i].dest_hash, GET_MODE (dest));
7515
7516	elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
7517			  && (! RTX_UNCHANGING_P (sets[i].inner_dest)
7518			      || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
7519							  0))));
7520
7521	if (elt->in_memory)
7522	  {
7523	    /* This implicitly assumes a whole struct
7524	       need not have MEM_IN_STRUCT_P.
7525	       But a whole struct is *supposed* to have MEM_IN_STRUCT_P.  */
7526	    elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7527			      || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7528	  }
7529
7530	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7531	   narrower than M2, and both M1 and M2 are the same number of words,
7532	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7533	   make that equivalence as well.
7534
7535	   However, BAR may have equivalences for which gen_lowpart_if_possible
7536	   will produce a simpler value than gen_lowpart_if_possible applied to
7537	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7538	   BAR's equivalences.  If we don't get a simplified form, make
7539	   the SUBREG.  It will not be used in an equivalence, but will
7540	   cause two similar assignments to be detected.
7541
7542	   Note the loop below will find SUBREG_REG (DEST) since we have
7543	   already entered SRC and DEST of the SET in the table.  */
7544
7545	if (GET_CODE (dest) == SUBREG
7546	    && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7547		 / UNITS_PER_WORD)
7548		== (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7549	    && (GET_MODE_SIZE (GET_MODE (dest))
7550		>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7551	    && sets[i].src_elt != 0)
7552	  {
7553	    enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7554	    struct table_elt *elt, *classp = 0;
7555
7556	    for (elt = sets[i].src_elt->first_same_value; elt;
7557		 elt = elt->next_same_value)
7558	      {
7559		rtx new_src = 0;
7560		unsigned src_hash;
7561		struct table_elt *src_elt;
7562
7563		/* Ignore invalid entries.  */
7564		if (GET_CODE (elt->exp) != REG
7565		    && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7566		  continue;
7567
7568		new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7569		if (new_src == 0)
7570		  new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
7571
7572		src_hash = HASH (new_src, new_mode);
7573		src_elt = lookup (new_src, src_hash, new_mode);
7574
7575		/* Put the new source in the hash table is if isn't
7576		   already.  */
7577		if (src_elt == 0)
7578		  {
7579		    if (insert_regs (new_src, classp, 0))
7580		      {
7581			rehash_using_reg (new_src);
7582			src_hash = HASH (new_src, new_mode);
7583		      }
7584		    src_elt = insert (new_src, classp, src_hash, new_mode);
7585		    src_elt->in_memory = elt->in_memory;
7586		    src_elt->in_struct = elt->in_struct;
7587		  }
7588		else if (classp && classp != src_elt->first_same_value)
7589		  /* Show that two things that we've seen before are
7590		     actually the same.  */
7591		  merge_equiv_classes (src_elt, classp);
7592
7593		classp = src_elt->first_same_value;
7594		/* Ignore invalid entries.  */
7595		while (classp
7596		       && GET_CODE (classp->exp) != REG
7597		       && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
7598		  classp = classp->next_same_value;
7599	      }
7600	  }
7601      }
7602
7603  /* Special handling for (set REG0 REG1)
7604     where REG0 is the "cheapest", cheaper than REG1.
7605     After cse, REG1 will probably not be used in the sequel,
7606     so (if easily done) change this insn to (set REG1 REG0) and
7607     replace REG1 with REG0 in the previous insn that computed their value.
7608     Then REG1 will become a dead store and won't cloud the situation
7609     for later optimizations.
7610
7611     Do not make this change if REG1 is a hard register, because it will
7612     then be used in the sequel and we may be changing a two-operand insn
7613     into a three-operand insn.
7614
7615     Also do not do this if we are operating on a copy of INSN.  */
7616
7617  if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7618      && NEXT_INSN (PREV_INSN (insn)) == insn
7619      && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7620      && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7621      && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7622      && (qty_first_reg[reg_qty[REGNO (SET_SRC (sets[0].rtl))]]
7623	  == REGNO (SET_DEST (sets[0].rtl))))
7624    {
7625      rtx prev = PREV_INSN (insn);
7626      while (prev && GET_CODE (prev) == NOTE)
7627	prev = PREV_INSN (prev);
7628
7629      if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7630	  && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7631	{
7632	  rtx dest = SET_DEST (sets[0].rtl);
7633	  rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7634
7635	  validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7636	  validate_change (insn, & SET_DEST (sets[0].rtl),
7637			   SET_SRC (sets[0].rtl), 1);
7638	  validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7639	  apply_change_group ();
7640
7641	  /* If REG1 was equivalent to a constant, REG0 is not.  */
7642	  if (note)
7643	    PUT_REG_NOTE_KIND (note, REG_EQUAL);
7644
7645	  /* If there was a REG_WAS_0 note on PREV, remove it.  Move
7646	     any REG_WAS_0 note on INSN to PREV.  */
7647	  note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7648	  if (note)
7649	    remove_note (prev, note);
7650
7651	  note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7652	  if (note)
7653	    {
7654	      remove_note (insn, note);
7655	      XEXP (note, 1) = REG_NOTES (prev);
7656	      REG_NOTES (prev) = note;
7657	    }
7658
7659	  /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7660	     then we must delete it, because the value in REG0 has changed.  */
7661	  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7662	  if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7663	    remove_note (insn, note);
7664	}
7665    }
7666
7667  /* If this is a conditional jump insn, record any known equivalences due to
7668     the condition being tested.  */
7669
7670  last_jump_equiv_class = 0;
7671  if (GET_CODE (insn) == JUMP_INSN
7672      && n_sets == 1 && GET_CODE (x) == SET
7673      && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7674    record_jump_equiv (insn, 0);
7675
7676#ifdef HAVE_cc0
7677  /* If the previous insn set CC0 and this insn no longer references CC0,
7678     delete the previous insn.  Here we use the fact that nothing expects CC0
7679     to be valid over an insn, which is true until the final pass.  */
7680  if (prev_insn && GET_CODE (prev_insn) == INSN
7681      && (tem = single_set (prev_insn)) != 0
7682      && SET_DEST (tem) == cc0_rtx
7683      && ! reg_mentioned_p (cc0_rtx, x))
7684    {
7685      PUT_CODE (prev_insn, NOTE);
7686      NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7687      NOTE_SOURCE_FILE (prev_insn) = 0;
7688    }
7689
7690  prev_insn_cc0 = this_insn_cc0;
7691  prev_insn_cc0_mode = this_insn_cc0_mode;
7692#endif
7693
7694  prev_insn = insn;
7695}
7696
7697/* Remove from the ahsh table all expressions that reference memory.  */
7698static void
7699invalidate_memory ()
7700{
7701  register int i;
7702  register struct table_elt *p, *next;
7703
7704  for (i = 0; i < NBUCKETS; i++)
7705    for (p = table[i]; p; p = next)
7706      {
7707	next = p->next_same_hash;
7708	if (p->in_memory)
7709	  remove_from_table (p, i);
7710      }
7711}
7712
7713/* XXX ??? The name of this function bears little resemblance to
7714   what this function actually does.  FIXME.  */
7715static int
7716note_mem_written (addr)
7717     register rtx addr;
7718{
7719  /* Pushing or popping the stack invalidates just the stack pointer.  */
7720  if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7721       || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7722      && GET_CODE (XEXP (addr, 0)) == REG
7723      && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
7724    {
7725      if (reg_tick[STACK_POINTER_REGNUM] >= 0)
7726	reg_tick[STACK_POINTER_REGNUM]++;
7727
7728      /* This should be *very* rare.  */
7729      if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
7730	invalidate (stack_pointer_rtx, VOIDmode);
7731      return 1;
7732    }
7733  return 0;
7734}
7735
7736/* Perform invalidation on the basis of everything about an insn
7737   except for invalidating the actual places that are SET in it.
7738   This includes the places CLOBBERed, and anything that might
7739   alias with something that is SET or CLOBBERed.
7740
7741   X is the pattern of the insn.  */
7742
7743static void
7744invalidate_from_clobbers (x)
7745     rtx x;
7746{
7747  if (GET_CODE (x) == CLOBBER)
7748    {
7749      rtx ref = XEXP (x, 0);
7750      if (ref)
7751	{
7752	  if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7753	      || GET_CODE (ref) == MEM)
7754	    invalidate (ref, VOIDmode);
7755	  else if (GET_CODE (ref) == STRICT_LOW_PART
7756		   || GET_CODE (ref) == ZERO_EXTRACT)
7757	    invalidate (XEXP (ref, 0), GET_MODE (ref));
7758	}
7759    }
7760  else if (GET_CODE (x) == PARALLEL)
7761    {
7762      register int i;
7763      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
7764	{
7765	  register rtx y = XVECEXP (x, 0, i);
7766	  if (GET_CODE (y) == CLOBBER)
7767	    {
7768	      rtx ref = XEXP (y, 0);
7769	      if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
7770		  || GET_CODE (ref) == MEM)
7771		invalidate (ref, VOIDmode);
7772	      else if (GET_CODE (ref) == STRICT_LOW_PART
7773		       || GET_CODE (ref) == ZERO_EXTRACT)
7774		invalidate (XEXP (ref, 0), GET_MODE (ref));
7775	    }
7776	}
7777    }
7778}
7779
7780/* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
7781   and replace any registers in them with either an equivalent constant
7782   or the canonical form of the register.  If we are inside an address,
7783   only do this if the address remains valid.
7784
7785   OBJECT is 0 except when within a MEM in which case it is the MEM.
7786
7787   Return the replacement for X.  */
7788
7789static rtx
7790cse_process_notes (x, object)
7791     rtx x;
7792     rtx object;
7793{
7794  enum rtx_code code = GET_CODE (x);
7795  char *fmt = GET_RTX_FORMAT (code);
7796  int i;
7797
7798  switch (code)
7799    {
7800    case CONST_INT:
7801    case CONST:
7802    case SYMBOL_REF:
7803    case LABEL_REF:
7804    case CONST_DOUBLE:
7805    case PC:
7806    case CC0:
7807    case LO_SUM:
7808      return x;
7809
7810    case MEM:
7811      XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
7812      return x;
7813
7814    case EXPR_LIST:
7815    case INSN_LIST:
7816      if (REG_NOTE_KIND (x) == REG_EQUAL)
7817	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
7818      if (XEXP (x, 1))
7819	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
7820      return x;
7821
7822    case SIGN_EXTEND:
7823    case ZERO_EXTEND:
7824    case SUBREG:
7825      {
7826	rtx new = cse_process_notes (XEXP (x, 0), object);
7827	/* We don't substitute VOIDmode constants into these rtx,
7828	   since they would impede folding.  */
7829	if (GET_MODE (new) != VOIDmode)
7830	  validate_change (object, &XEXP (x, 0), new, 0);
7831	return x;
7832      }
7833
7834    case REG:
7835      i = reg_qty[REGNO (x)];
7836
7837      /* Return a constant or a constant register.  */
7838      if (REGNO_QTY_VALID_P (REGNO (x))
7839	  && qty_const[i] != 0
7840	  && (CONSTANT_P (qty_const[i])
7841	      || GET_CODE (qty_const[i]) == REG))
7842	{
7843	  rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
7844	  if (new)
7845	    return new;
7846	}
7847
7848      /* Otherwise, canonicalize this register.  */
7849      return canon_reg (x, NULL_RTX);
7850
7851    default:
7852      break;
7853    }
7854
7855  for (i = 0; i < GET_RTX_LENGTH (code); i++)
7856    if (fmt[i] == 'e')
7857      validate_change (object, &XEXP (x, i),
7858		       cse_process_notes (XEXP (x, i), object), 0);
7859
7860  return x;
7861}
7862
7863/* Find common subexpressions between the end test of a loop and the beginning
7864   of the loop.  LOOP_START is the CODE_LABEL at the start of a loop.
7865
7866   Often we have a loop where an expression in the exit test is used
7867   in the body of the loop.  For example "while (*p) *q++ = *p++;".
7868   Because of the way we duplicate the loop exit test in front of the loop,
7869   however, we don't detect that common subexpression.  This will be caught
7870   when global cse is implemented, but this is a quite common case.
7871
7872   This function handles the most common cases of these common expressions.
7873   It is called after we have processed the basic block ending with the
7874   NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
7875   jumps to a label used only once.  */
7876
7877static void
7878cse_around_loop (loop_start)
7879     rtx loop_start;
7880{
7881  rtx insn;
7882  int i;
7883  struct table_elt *p;
7884
7885  /* If the jump at the end of the loop doesn't go to the start, we don't
7886     do anything.  */
7887  for (insn = PREV_INSN (loop_start);
7888       insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
7889       insn = PREV_INSN (insn))
7890    ;
7891
7892  if (insn == 0
7893      || GET_CODE (insn) != NOTE
7894      || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
7895    return;
7896
7897  /* If the last insn of the loop (the end test) was an NE comparison,
7898     we will interpret it as an EQ comparison, since we fell through
7899     the loop.  Any equivalences resulting from that comparison are
7900     therefore not valid and must be invalidated.  */
7901  if (last_jump_equiv_class)
7902    for (p = last_jump_equiv_class->first_same_value; p;
7903	 p = p->next_same_value)
7904      {
7905        if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
7906	    || (GET_CODE (p->exp) == SUBREG
7907	        && GET_CODE (SUBREG_REG (p->exp)) == REG))
7908	  invalidate (p->exp, VOIDmode);
7909        else if (GET_CODE (p->exp) == STRICT_LOW_PART
7910	         || GET_CODE (p->exp) == ZERO_EXTRACT)
7911	  invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
7912      }
7913
7914  /* Process insns starting after LOOP_START until we hit a CALL_INSN or
7915     a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
7916
7917     The only thing we do with SET_DEST is invalidate entries, so we
7918     can safely process each SET in order.  It is slightly less efficient
7919     to do so, but we only want to handle the most common cases.
7920
7921     The gen_move_insn call in cse_set_around_loop may create new pseudos.
7922     These pseudos won't have valid entries in any of the tables indexed
7923     by register number, such as reg_qty.  We avoid out-of-range array
7924     accesses by not processing any instructions created after cse started.  */
7925
7926  for (insn = NEXT_INSN (loop_start);
7927       GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
7928       && INSN_UID (insn) < max_insn_uid
7929       && ! (GET_CODE (insn) == NOTE
7930	     && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
7931       insn = NEXT_INSN (insn))
7932    {
7933      if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7934	  && (GET_CODE (PATTERN (insn)) == SET
7935	      || GET_CODE (PATTERN (insn)) == CLOBBER))
7936	cse_set_around_loop (PATTERN (insn), insn, loop_start);
7937      else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
7938	       && GET_CODE (PATTERN (insn)) == PARALLEL)
7939	for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7940	  if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
7941	      || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
7942	    cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
7943				 loop_start);
7944    }
7945}
7946
7947/* Process one SET of an insn that was skipped.  We ignore CLOBBERs
7948   since they are done elsewhere.  This function is called via note_stores.  */
7949
7950static void
7951invalidate_skipped_set (dest, set)
7952     rtx set;
7953     rtx dest;
7954{
7955  enum rtx_code code = GET_CODE (dest);
7956
7957  if (code == MEM
7958      && ! note_mem_written (dest)	/* If this is not a stack push ... */
7959      /* There are times when an address can appear varying and be a PLUS
7960	 during this scan when it would be a fixed address were we to know
7961	 the proper equivalences.  So invalidate all memory if there is
7962	 a BLKmode or nonscalar memory reference or a reference to a
7963	 variable address.  */
7964      && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
7965	  || cse_rtx_varies_p (XEXP (dest, 0))))
7966    {
7967      invalidate_memory ();
7968      return;
7969    }
7970
7971  if (GET_CODE (set) == CLOBBER
7972#ifdef HAVE_cc0
7973      || dest == cc0_rtx
7974#endif
7975      || dest == pc_rtx)
7976    return;
7977
7978  if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
7979    invalidate (XEXP (dest, 0), GET_MODE (dest));
7980  else if (code == REG || code == SUBREG || code == MEM)
7981    invalidate (dest, VOIDmode);
7982}
7983
7984/* Invalidate all insns from START up to the end of the function or the
7985   next label.  This called when we wish to CSE around a block that is
7986   conditionally executed.  */
7987
7988static void
7989invalidate_skipped_block (start)
7990     rtx start;
7991{
7992  rtx insn;
7993
7994  for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
7995       insn = NEXT_INSN (insn))
7996    {
7997      if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7998	continue;
7999
8000      if (GET_CODE (insn) == CALL_INSN)
8001	{
8002	  if (! CONST_CALL_P (insn))
8003	    invalidate_memory ();
8004	  invalidate_for_call ();
8005	}
8006
8007      invalidate_from_clobbers (PATTERN (insn));
8008      note_stores (PATTERN (insn), invalidate_skipped_set);
8009    }
8010}
8011
8012/* Used for communication between the following two routines; contains a
8013   value to be checked for modification.  */
8014
8015static rtx cse_check_loop_start_value;
8016
8017/* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
8018   indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0.  */
8019
8020static void
8021cse_check_loop_start (x, set)
8022     rtx x;
8023     rtx set ATTRIBUTE_UNUSED;
8024{
8025  if (cse_check_loop_start_value == 0
8026      || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
8027    return;
8028
8029  if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
8030      || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
8031    cse_check_loop_start_value = 0;
8032}
8033
8034/* X is a SET or CLOBBER contained in INSN that was found near the start of
8035   a loop that starts with the label at LOOP_START.
8036
8037   If X is a SET, we see if its SET_SRC is currently in our hash table.
8038   If so, we see if it has a value equal to some register used only in the
8039   loop exit code (as marked by jump.c).
8040
8041   If those two conditions are true, we search backwards from the start of
8042   the loop to see if that same value was loaded into a register that still
8043   retains its value at the start of the loop.
8044
8045   If so, we insert an insn after the load to copy the destination of that
8046   load into the equivalent register and (try to) replace our SET_SRC with that
8047   register.
8048
8049   In any event, we invalidate whatever this SET or CLOBBER modifies.  */
8050
8051static void
8052cse_set_around_loop (x, insn, loop_start)
8053     rtx x;
8054     rtx insn;
8055     rtx loop_start;
8056{
8057  struct table_elt *src_elt;
8058
8059  /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
8060     are setting PC or CC0 or whose SET_SRC is already a register.  */
8061  if (GET_CODE (x) == SET
8062      && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
8063      && GET_CODE (SET_SRC (x)) != REG)
8064    {
8065      src_elt = lookup (SET_SRC (x),
8066			HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
8067			GET_MODE (SET_DEST (x)));
8068
8069      if (src_elt)
8070	for (src_elt = src_elt->first_same_value; src_elt;
8071	     src_elt = src_elt->next_same_value)
8072	  if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
8073	      && COST (src_elt->exp) < COST (SET_SRC (x)))
8074	    {
8075	      rtx p, set;
8076
8077	      /* Look for an insn in front of LOOP_START that sets
8078		 something in the desired mode to SET_SRC (x) before we hit
8079		 a label or CALL_INSN.  */
8080
8081	      for (p = prev_nonnote_insn (loop_start);
8082		   p && GET_CODE (p) != CALL_INSN
8083		   && GET_CODE (p) != CODE_LABEL;
8084		   p = prev_nonnote_insn  (p))
8085		if ((set = single_set (p)) != 0
8086		    && GET_CODE (SET_DEST (set)) == REG
8087		    && GET_MODE (SET_DEST (set)) == src_elt->mode
8088		    && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
8089		  {
8090		    /* We now have to ensure that nothing between P
8091		       and LOOP_START modified anything referenced in
8092		       SET_SRC (x).  We know that nothing within the loop
8093		       can modify it, or we would have invalidated it in
8094		       the hash table.  */
8095		    rtx q;
8096
8097		    cse_check_loop_start_value = SET_SRC (x);
8098		    for (q = p; q != loop_start; q = NEXT_INSN (q))
8099		      if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
8100			note_stores (PATTERN (q), cse_check_loop_start);
8101
8102		    /* If nothing was changed and we can replace our
8103		       SET_SRC, add an insn after P to copy its destination
8104		       to what we will be replacing SET_SRC with.  */
8105		    if (cse_check_loop_start_value
8106			&& validate_change (insn, &SET_SRC (x),
8107					    src_elt->exp, 0))
8108		      {
8109			/* If this creates new pseudos, this is unsafe,
8110			   because the regno of new pseudo is unsuitable
8111			   to index into reg_qty when cse_insn processes
8112			   the new insn.  Therefore, if a new pseudo was
8113			   created, discard this optimization.  */
8114			int nregs = max_reg_num ();
8115			rtx move
8116			  = gen_move_insn (src_elt->exp, SET_DEST (set));
8117			if (nregs != max_reg_num ())
8118			  {
8119			    if (! validate_change (insn, &SET_SRC (x),
8120						   SET_SRC (set), 0))
8121			      abort ();
8122			  }
8123			else
8124			  emit_insn_after (move, p);
8125		      }
8126		    break;
8127		  }
8128	    }
8129    }
8130
8131  /* Now invalidate anything modified by X.  */
8132  note_mem_written (SET_DEST (x));
8133
8134  /* See comment on similar code in cse_insn for explanation of these tests.  */
8135  if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
8136      || GET_CODE (SET_DEST (x)) == MEM)
8137    invalidate (SET_DEST (x), VOIDmode);
8138  else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
8139	   || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
8140    invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
8141}
8142
8143/* Find the end of INSN's basic block and return its range,
8144   the total number of SETs in all the insns of the block, the last insn of the
8145   block, and the branch path.
8146
8147   The branch path indicates which branches should be followed.  If a non-zero
8148   path size is specified, the block should be rescanned and a different set
8149   of branches will be taken.  The branch path is only used if
8150   FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
8151
8152   DATA is a pointer to a struct cse_basic_block_data, defined below, that is
8153   used to describe the block.  It is filled in with the information about
8154   the current block.  The incoming structure's branch path, if any, is used
8155   to construct the output branch path.  */
8156
8157void
8158cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
8159     rtx insn;
8160     struct cse_basic_block_data *data;
8161     int follow_jumps;
8162     int after_loop;
8163     int skip_blocks;
8164{
8165  rtx p = insn, q;
8166  int nsets = 0;
8167  int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
8168  rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
8169  int path_size = data->path_size;
8170  int path_entry = 0;
8171  int i;
8172
8173  /* Update the previous branch path, if any.  If the last branch was
8174     previously TAKEN, mark it NOT_TAKEN.  If it was previously NOT_TAKEN,
8175     shorten the path by one and look at the previous branch.  We know that
8176     at least one branch must have been taken if PATH_SIZE is non-zero.  */
8177  while (path_size > 0)
8178    {
8179      if (data->path[path_size - 1].status != NOT_TAKEN)
8180	{
8181	  data->path[path_size - 1].status = NOT_TAKEN;
8182	  break;
8183	}
8184      else
8185	path_size--;
8186    }
8187
8188  /* Scan to end of this basic block.  */
8189  while (p && GET_CODE (p) != CODE_LABEL)
8190    {
8191      /* Don't cse out the end of a loop.  This makes a difference
8192	 only for the unusual loops that always execute at least once;
8193	 all other loops have labels there so we will stop in any case.
8194	 Cse'ing out the end of the loop is dangerous because it
8195	 might cause an invariant expression inside the loop
8196	 to be reused after the end of the loop.  This would make it
8197	 hard to move the expression out of the loop in loop.c,
8198	 especially if it is one of several equivalent expressions
8199	 and loop.c would like to eliminate it.
8200
8201	 If we are running after loop.c has finished, we can ignore
8202	 the NOTE_INSN_LOOP_END.  */
8203
8204      if (! after_loop && GET_CODE (p) == NOTE
8205	  && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
8206	break;
8207
8208      /* Don't cse over a call to setjmp; on some machines (eg vax)
8209	 the regs restored by the longjmp come from
8210	 a later time than the setjmp.  */
8211      if (GET_CODE (p) == NOTE
8212	  && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
8213	break;
8214
8215      /* A PARALLEL can have lots of SETs in it,
8216	 especially if it is really an ASM_OPERANDS.  */
8217      if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
8218	  && GET_CODE (PATTERN (p)) == PARALLEL)
8219	nsets += XVECLEN (PATTERN (p), 0);
8220      else if (GET_CODE (p) != NOTE)
8221	nsets += 1;
8222
8223      /* Ignore insns made by CSE; they cannot affect the boundaries of
8224	 the basic block.  */
8225
8226      if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8227	high_cuid = INSN_CUID (p);
8228      if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8229	low_cuid = INSN_CUID (p);
8230
8231      /* See if this insn is in our branch path.  If it is and we are to
8232	 take it, do so.  */
8233      if (path_entry < path_size && data->path[path_entry].branch == p)
8234	{
8235	  if (data->path[path_entry].status != NOT_TAKEN)
8236	    p = JUMP_LABEL (p);
8237
8238	  /* Point to next entry in path, if any.  */
8239	  path_entry++;
8240	}
8241
8242      /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8243	 was specified, we haven't reached our maximum path length, there are
8244	 insns following the target of the jump, this is the only use of the
8245	 jump label, and the target label is preceded by a BARRIER.
8246
8247	 Alternatively, we can follow the jump if it branches around a
8248	 block of code and there are no other branches into the block.
8249	 In this case invalidate_skipped_block will be called to invalidate any
8250	 registers set in the block when following the jump.  */
8251
8252      else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
8253	       && GET_CODE (p) == JUMP_INSN
8254      	       && GET_CODE (PATTERN (p)) == SET
8255	       && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
8256	       && JUMP_LABEL (p) != 0
8257	       && LABEL_NUSES (JUMP_LABEL (p)) == 1
8258	       && NEXT_INSN (JUMP_LABEL (p)) != 0)
8259	{
8260	  for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8261	    if ((GET_CODE (q) != NOTE
8262	         || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8263	         || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8264	        && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8265	      break;
8266
8267	  /* If we ran into a BARRIER, this code is an extension of the
8268	     basic block when the branch is taken.  */
8269	  if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
8270	    {
8271	      /* Don't allow ourself to keep walking around an
8272		 always-executed loop.  */
8273	      if (next_real_insn (q) == next)
8274		{
8275		  p = NEXT_INSN (p);
8276		  continue;
8277		}
8278
8279	      /* Similarly, don't put a branch in our path more than once.  */
8280	      for (i = 0; i < path_entry; i++)
8281		if (data->path[i].branch == p)
8282		  break;
8283
8284	      if (i != path_entry)
8285		break;
8286
8287	      data->path[path_entry].branch = p;
8288	      data->path[path_entry++].status = TAKEN;
8289
8290	      /* This branch now ends our path.  It was possible that we
8291		 didn't see this branch the last time around (when the
8292		 insn in front of the target was a JUMP_INSN that was
8293		 turned into a no-op).  */
8294	      path_size = path_entry;
8295
8296	      p = JUMP_LABEL (p);
8297	      /* Mark block so we won't scan it again later.  */
8298	      PUT_MODE (NEXT_INSN (p), QImode);
8299	    }
8300	  /* Detect a branch around a block of code.  */
8301	  else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8302	    {
8303	      register rtx tmp;
8304
8305	      if (next_real_insn (q) == next)
8306		{
8307		  p = NEXT_INSN (p);
8308		  continue;
8309		}
8310
8311	      for (i = 0; i < path_entry; i++)
8312		if (data->path[i].branch == p)
8313		  break;
8314
8315	      if (i != path_entry)
8316		break;
8317
8318	      /* This is no_labels_between_p (p, q) with an added check for
8319		 reaching the end of a function (in case Q precedes P).  */
8320	      for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8321		if (GET_CODE (tmp) == CODE_LABEL)
8322		  break;
8323
8324	      if (tmp == q)
8325		{
8326		  data->path[path_entry].branch = p;
8327		  data->path[path_entry++].status = AROUND;
8328
8329		  path_size = path_entry;
8330
8331		  p = JUMP_LABEL (p);
8332		  /* Mark block so we won't scan it again later.  */
8333		  PUT_MODE (NEXT_INSN (p), QImode);
8334		}
8335	    }
8336	}
8337      p = NEXT_INSN (p);
8338    }
8339
8340  data->low_cuid = low_cuid;
8341  data->high_cuid = high_cuid;
8342  data->nsets = nsets;
8343  data->last = p;
8344
8345  /* If all jumps in the path are not taken, set our path length to zero
8346     so a rescan won't be done.  */
8347  for (i = path_size - 1; i >= 0; i--)
8348    if (data->path[i].status != NOT_TAKEN)
8349      break;
8350
8351  if (i == -1)
8352    data->path_size = 0;
8353  else
8354    data->path_size = path_size;
8355
8356  /* End the current branch path.  */
8357  data->path[path_size].branch = 0;
8358}
8359
8360/* Perform cse on the instructions of a function.
8361   F is the first instruction.
8362   NREGS is one plus the highest pseudo-reg number used in the instruction.
8363
8364   AFTER_LOOP is 1 if this is the cse call done after loop optimization
8365   (only if -frerun-cse-after-loop).
8366
8367   Returns 1 if jump_optimize should be redone due to simplifications
8368   in conditional jump instructions.  */
8369
8370int
8371cse_main (f, nregs, after_loop, file)
8372     rtx f;
8373     int nregs;
8374     int after_loop;
8375     FILE *file;
8376{
8377  struct cse_basic_block_data val;
8378  register rtx insn = f;
8379  register int i;
8380
8381  cse_jumps_altered = 0;
8382  recorded_label_ref = 0;
8383  constant_pool_entries_cost = 0;
8384  val.path_size = 0;
8385
8386  init_recog ();
8387  init_alias_analysis ();
8388
8389  max_reg = nregs;
8390
8391  max_insn_uid = get_max_uid ();
8392
8393  all_minus_one = (int *) alloca (nregs * sizeof (int));
8394  consec_ints = (int *) alloca (nregs * sizeof (int));
8395
8396  for (i = 0; i < nregs; i++)
8397    {
8398      all_minus_one[i] = -1;
8399      consec_ints[i] = i;
8400    }
8401
8402  reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8403  reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8404  reg_qty = (int *) alloca (nregs * sizeof (int));
8405  reg_in_table = (int *) alloca (nregs * sizeof (int));
8406  reg_tick = (int *) alloca (nregs * sizeof (int));
8407
8408#ifdef LOAD_EXTEND_OP
8409
8410  /* Allocate scratch rtl here.  cse_insn will fill in the memory reference
8411     and change the code and mode as appropriate.  */
8412  memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
8413#endif
8414
8415  /* Discard all the free elements of the previous function
8416     since they are allocated in the temporarily obstack.  */
8417  bzero ((char *) table, sizeof table);
8418  free_element_chain = 0;
8419  n_elements_made = 0;
8420
8421  /* Find the largest uid.  */
8422
8423  max_uid = get_max_uid ();
8424  uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
8425  bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
8426
8427  /* Compute the mapping from uids to cuids.
8428     CUIDs are numbers assigned to insns, like uids,
8429     except that cuids increase monotonically through the code.
8430     Don't assign cuids to line-number NOTEs, so that the distance in cuids
8431     between two insns is not affected by -g.  */
8432
8433  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8434    {
8435      if (GET_CODE (insn) != NOTE
8436	  || NOTE_LINE_NUMBER (insn) < 0)
8437	INSN_CUID (insn) = ++i;
8438      else
8439	/* Give a line number note the same cuid as preceding insn.  */
8440	INSN_CUID (insn) = i;
8441    }
8442
8443  /* Initialize which registers are clobbered by calls.  */
8444
8445  CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8446
8447  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8448    if ((call_used_regs[i]
8449	 /* Used to check !fixed_regs[i] here, but that isn't safe;
8450	    fixed regs are still call-clobbered, and sched can get
8451	    confused if they can "live across calls".
8452
8453	    The frame pointer is always preserved across calls.  The arg
8454	    pointer is if it is fixed.  The stack pointer usually is, unless
8455	    RETURN_POPS_ARGS, in which case an explicit CLOBBER
8456	    will be present.  If we are generating PIC code, the PIC offset
8457	    table register is preserved across calls.  */
8458
8459	 && i != STACK_POINTER_REGNUM
8460	 && i != FRAME_POINTER_REGNUM
8461#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8462	 && i != HARD_FRAME_POINTER_REGNUM
8463#endif
8464#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8465	 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8466#endif
8467#if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
8468	 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8469#endif
8470	 )
8471	|| global_regs[i])
8472      SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8473
8474  /* Loop over basic blocks.
8475     Compute the maximum number of qty's needed for each basic block
8476     (which is 2 for each SET).  */
8477  insn = f;
8478  while (insn)
8479    {
8480      cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8481			      flag_cse_skip_blocks);
8482
8483      /* If this basic block was already processed or has no sets, skip it.  */
8484      if (val.nsets == 0 || GET_MODE (insn) == QImode)
8485	{
8486	  PUT_MODE (insn, VOIDmode);
8487	  insn = (val.last ? NEXT_INSN (val.last) : 0);
8488	  val.path_size = 0;
8489	  continue;
8490	}
8491
8492      cse_basic_block_start = val.low_cuid;
8493      cse_basic_block_end = val.high_cuid;
8494      max_qty = val.nsets * 2;
8495
8496      if (file)
8497	fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
8498		 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8499		 val.nsets);
8500
8501      /* Make MAX_QTY bigger to give us room to optimize
8502	 past the end of this basic block, if that should prove useful.  */
8503      if (max_qty < 500)
8504	max_qty = 500;
8505
8506      max_qty += max_reg;
8507
8508      /* If this basic block is being extended by following certain jumps,
8509         (see `cse_end_of_basic_block'), we reprocess the code from the start.
8510         Otherwise, we start after this basic block.  */
8511      if (val.path_size > 0)
8512        cse_basic_block (insn, val.last, val.path, 0);
8513      else
8514	{
8515	  int old_cse_jumps_altered = cse_jumps_altered;
8516	  rtx temp;
8517
8518	  /* When cse changes a conditional jump to an unconditional
8519	     jump, we want to reprocess the block, since it will give
8520	     us a new branch path to investigate.  */
8521	  cse_jumps_altered = 0;
8522	  temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8523	  if (cse_jumps_altered == 0
8524	      || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8525	    insn = temp;
8526
8527	  cse_jumps_altered |= old_cse_jumps_altered;
8528	}
8529
8530#ifdef USE_C_ALLOCA
8531      alloca (0);
8532#endif
8533    }
8534
8535  /* Tell refers_to_mem_p that qty_const info is not available.  */
8536  qty_const = 0;
8537
8538  if (max_elements_made < n_elements_made)
8539    max_elements_made = n_elements_made;
8540
8541  return cse_jumps_altered || recorded_label_ref;
8542}
8543
8544/* Process a single basic block.  FROM and TO and the limits of the basic
8545   block.  NEXT_BRANCH points to the branch path when following jumps or
8546   a null path when not following jumps.
8547
8548   AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8549   loop.  This is true when we are being called for the last time on a
8550   block and this CSE pass is before loop.c.  */
8551
8552static rtx
8553cse_basic_block (from, to, next_branch, around_loop)
8554     register rtx from, to;
8555     struct branch_path *next_branch;
8556     int around_loop;
8557{
8558  register rtx insn;
8559  int to_usage = 0;
8560  rtx libcall_insn = NULL_RTX;
8561  int num_insns = 0;
8562
8563  /* Each of these arrays is undefined before max_reg, so only allocate
8564     the space actually needed and adjust the start below.  */
8565
8566  qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8567  qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8568  qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8569  qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8570  qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8571  qty_comparison_code
8572    = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8573  qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8574  qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8575
8576  qty_first_reg -= max_reg;
8577  qty_last_reg -= max_reg;
8578  qty_mode -= max_reg;
8579  qty_const -= max_reg;
8580  qty_const_insn -= max_reg;
8581  qty_comparison_code -= max_reg;
8582  qty_comparison_qty -= max_reg;
8583  qty_comparison_const -= max_reg;
8584
8585  new_basic_block ();
8586
8587  /* TO might be a label.  If so, protect it from being deleted.  */
8588  if (to != 0 && GET_CODE (to) == CODE_LABEL)
8589    ++LABEL_NUSES (to);
8590
8591  for (insn = from; insn != to; insn = NEXT_INSN (insn))
8592    {
8593      register enum rtx_code code = GET_CODE (insn);
8594      int i;
8595      struct table_elt *p, *next;
8596
8597      /* If we have processed 1,000 insns, flush the hash table to
8598	 avoid extreme quadratic behavior.  We must not include NOTEs
8599	 in the count since there may be more or them when generating
8600	 debugging information.  If we clear the table at different
8601	 times, code generated with -g -O might be different than code
8602	 generated with -O but not -g.
8603
8604	 ??? This is a real kludge and needs to be done some other way.
8605	 Perhaps for 2.9.  */
8606      if (code != NOTE && num_insns++ > 1000)
8607	{
8608	  for (i = 0; i < NBUCKETS; i++)
8609	    for (p = table[i]; p; p = next)
8610	      {
8611		next = p->next_same_hash;
8612
8613		if (GET_CODE (p->exp) == REG)
8614		  invalidate (p->exp, p->mode);
8615		else
8616		  remove_from_table (p, i);
8617	      }
8618
8619	  num_insns = 0;
8620	}
8621
8622      /* See if this is a branch that is part of the path.  If so, and it is
8623	 to be taken, do so.  */
8624      if (next_branch->branch == insn)
8625	{
8626	  enum taken status = next_branch++->status;
8627	  if (status != NOT_TAKEN)
8628	    {
8629	      if (status == TAKEN)
8630		record_jump_equiv (insn, 1);
8631	      else
8632		invalidate_skipped_block (NEXT_INSN (insn));
8633
8634	      /* Set the last insn as the jump insn; it doesn't affect cc0.
8635		 Then follow this branch.  */
8636#ifdef HAVE_cc0
8637	      prev_insn_cc0 = 0;
8638#endif
8639	      prev_insn = insn;
8640	      insn = JUMP_LABEL (insn);
8641	      continue;
8642	    }
8643	}
8644
8645      if (GET_MODE (insn) == QImode)
8646	PUT_MODE (insn, VOIDmode);
8647
8648      if (GET_RTX_CLASS (code) == 'i')
8649	{
8650	  rtx p;
8651
8652	  /* Process notes first so we have all notes in canonical forms when
8653	     looking for duplicate operations.  */
8654
8655	  if (REG_NOTES (insn))
8656	    REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
8657
8658	  /* Track when we are inside in LIBCALL block.  Inside such a block,
8659	     we do not want to record destinations.  The last insn of a
8660	     LIBCALL block is not considered to be part of the block, since
8661	     its destination is the result of the block and hence should be
8662	     recorded.  */
8663
8664	  if (p = find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8665	    libcall_insn = XEXP (p, 0);
8666	  else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8667	    libcall_insn = NULL_RTX;
8668
8669	  cse_insn (insn, libcall_insn);
8670	}
8671
8672      /* If INSN is now an unconditional jump, skip to the end of our
8673	 basic block by pretending that we just did the last insn in the
8674	 basic block.  If we are jumping to the end of our block, show
8675	 that we can have one usage of TO.  */
8676
8677      if (simplejump_p (insn))
8678	{
8679	  if (to == 0)
8680	    return 0;
8681
8682	  if (JUMP_LABEL (insn) == to)
8683	    to_usage = 1;
8684
8685	  /* Maybe TO was deleted because the jump is unconditional.
8686	     If so, there is nothing left in this basic block.  */
8687	  /* ??? Perhaps it would be smarter to set TO
8688	     to whatever follows this insn,
8689	     and pretend the basic block had always ended here.  */
8690	  if (INSN_DELETED_P (to))
8691	    break;
8692
8693	  insn = PREV_INSN (to);
8694	}
8695
8696      /* See if it is ok to keep on going past the label
8697	 which used to end our basic block.  Remember that we incremented
8698	 the count of that label, so we decrement it here.  If we made
8699	 a jump unconditional, TO_USAGE will be one; in that case, we don't
8700	 want to count the use in that jump.  */
8701
8702      if (to != 0 && NEXT_INSN (insn) == to
8703	  && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8704	{
8705	  struct cse_basic_block_data val;
8706	  rtx prev;
8707
8708	  insn = NEXT_INSN (to);
8709
8710	  if (LABEL_NUSES (to) == 0)
8711	    insn = delete_insn (to);
8712
8713	  /* If TO was the last insn in the function, we are done.  */
8714	  if (insn == 0)
8715	    return 0;
8716
8717	  /* If TO was preceded by a BARRIER we are done with this block
8718	     because it has no continuation.  */
8719	  prev = prev_nonnote_insn (to);
8720	  if (prev && GET_CODE (prev) == BARRIER)
8721	    return insn;
8722
8723	  /* Find the end of the following block.  Note that we won't be
8724	     following branches in this case.  */
8725	  to_usage = 0;
8726	  val.path_size = 0;
8727	  cse_end_of_basic_block (insn, &val, 0, 0, 0);
8728
8729	  /* If the tables we allocated have enough space left
8730	     to handle all the SETs in the next basic block,
8731	     continue through it.  Otherwise, return,
8732	     and that block will be scanned individually.  */
8733	  if (val.nsets * 2 + next_qty > max_qty)
8734	    break;
8735
8736	  cse_basic_block_start = val.low_cuid;
8737	  cse_basic_block_end = val.high_cuid;
8738	  to = val.last;
8739
8740	  /* Prevent TO from being deleted if it is a label.  */
8741	  if (to != 0 && GET_CODE (to) == CODE_LABEL)
8742	    ++LABEL_NUSES (to);
8743
8744	  /* Back up so we process the first insn in the extension.  */
8745	  insn = PREV_INSN (insn);
8746	}
8747    }
8748
8749  if (next_qty > max_qty)
8750    abort ();
8751
8752  /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
8753     the previous insn is the only insn that branches to the head of a loop,
8754     we can cse into the loop.  Don't do this if we changed the jump
8755     structure of a loop unless we aren't going to be following jumps.  */
8756
8757  if ((cse_jumps_altered == 0
8758       || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8759      && around_loop && to != 0
8760      && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
8761      && GET_CODE (PREV_INSN (to)) == JUMP_INSN
8762      && JUMP_LABEL (PREV_INSN (to)) != 0
8763      && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
8764    cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
8765
8766  return to ? NEXT_INSN (to) : 0;
8767}
8768
8769/* Count the number of times registers are used (not set) in X.
8770   COUNTS is an array in which we accumulate the count, INCR is how much
8771   we count each register usage.
8772
8773   Don't count a usage of DEST, which is the SET_DEST of a SET which
8774   contains X in its SET_SRC.  This is because such a SET does not
8775   modify the liveness of DEST.  */
8776
8777static void
8778count_reg_usage (x, counts, dest, incr)
8779     rtx x;
8780     int *counts;
8781     rtx dest;
8782     int incr;
8783{
8784  enum rtx_code code;
8785  char *fmt;
8786  int i, j;
8787
8788  if (x == 0)
8789    return;
8790
8791  switch (code = GET_CODE (x))
8792    {
8793    case REG:
8794      if (x != dest)
8795	counts[REGNO (x)] += incr;
8796      return;
8797
8798    case PC:
8799    case CC0:
8800    case CONST:
8801    case CONST_INT:
8802    case CONST_DOUBLE:
8803    case SYMBOL_REF:
8804    case LABEL_REF:
8805      return;
8806
8807    case CLOBBER:
8808      /* If we are clobbering a MEM, mark any registers inside the address
8809         as being used.  */
8810      if (GET_CODE (XEXP (x, 0)) == MEM)
8811	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
8812      return;
8813
8814    case SET:
8815      /* Unless we are setting a REG, count everything in SET_DEST.  */
8816      if (GET_CODE (SET_DEST (x)) != REG)
8817	count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
8818
8819      /* If SRC has side-effects, then we can't delete this insn, so the
8820	 usage of SET_DEST inside SRC counts.
8821
8822	 ??? Strictly-speaking, we might be preserving this insn
8823	 because some other SET has side-effects, but that's hard
8824	 to do and can't happen now.  */
8825      count_reg_usage (SET_SRC (x), counts,
8826		       side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
8827		       incr);
8828      return;
8829
8830    case CALL_INSN:
8831      count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
8832
8833      /* ... falls through ...  */
8834    case INSN:
8835    case JUMP_INSN:
8836      count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
8837
8838      /* Things used in a REG_EQUAL note aren't dead since loop may try to
8839	 use them.  */
8840
8841      count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
8842      return;
8843
8844    case EXPR_LIST:
8845    case INSN_LIST:
8846      if (REG_NOTE_KIND (x) == REG_EQUAL
8847	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
8848	count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
8849      count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
8850      return;
8851
8852    default:
8853      break;
8854    }
8855
8856  fmt = GET_RTX_FORMAT (code);
8857  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8858    {
8859      if (fmt[i] == 'e')
8860	count_reg_usage (XEXP (x, i), counts, dest, incr);
8861      else if (fmt[i] == 'E')
8862	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8863	  count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
8864    }
8865}
8866
8867/* Scan all the insns and delete any that are dead; i.e., they store a register
8868   that is never used or they copy a register to itself.
8869
8870   This is used to remove insns made obviously dead by cse, loop or other
8871   optimizations.  It improves the heuristics in loop since it won't try to
8872   move dead invariants out of loops or make givs for dead quantities.  The
8873   remaining passes of the compilation are also sped up.  */
8874
8875void
8876delete_trivially_dead_insns (insns, nreg)
8877     rtx insns;
8878     int nreg;
8879{
8880  int *counts = (int *) alloca (nreg * sizeof (int));
8881  rtx insn, prev;
8882#ifdef HAVE_cc0
8883  rtx tem;
8884#endif
8885  int i;
8886  int in_libcall = 0, dead_libcall = 0;
8887
8888  /* First count the number of times each register is used.  */
8889  bzero ((char *) counts, sizeof (int) * nreg);
8890  for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
8891    count_reg_usage (insn, counts, NULL_RTX, 1);
8892
8893  /* Go from the last insn to the first and delete insns that only set unused
8894     registers or copy a register to itself.  As we delete an insn, remove
8895     usage counts for registers it uses.  */
8896  for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
8897    {
8898      int live_insn = 0;
8899      rtx note;
8900
8901      prev = prev_real_insn (insn);
8902
8903      /* Don't delete any insns that are part of a libcall block unless
8904	 we can delete the whole libcall block.
8905
8906	 Flow or loop might get confused if we did that.  Remember
8907	 that we are scanning backwards.  */
8908      if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8909	{
8910	  in_libcall = 1;
8911	  live_insn = 1;
8912	  dead_libcall = 0;
8913
8914	  /* See if there's a REG_EQUAL note on this insn and try to
8915	     replace the source with the REG_EQUAL expression.
8916
8917	     We assume that insns with REG_RETVALs can only be reg->reg
8918	     copies at this point.  */
8919	  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
8920	  if (note)
8921	    {
8922	      rtx set = single_set (insn);
8923	      if (set
8924		  && validate_change (insn, &SET_SRC (set), XEXP (note, 0), 0))
8925		{
8926		  remove_note (insn,
8927			       find_reg_note (insn, REG_RETVAL, NULL_RTX));
8928		  dead_libcall = 1;
8929		}
8930	    }
8931	}
8932      else if (in_libcall)
8933	live_insn = ! dead_libcall;
8934      else if (GET_CODE (PATTERN (insn)) == SET)
8935	{
8936	  if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
8937	      && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
8938	    ;
8939
8940#ifdef HAVE_cc0
8941	  else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
8942		   && ! side_effects_p (SET_SRC (PATTERN (insn)))
8943		   && ((tem = next_nonnote_insn (insn)) == 0
8944		       || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8945		       || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8946	    ;
8947#endif
8948	  else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
8949		   || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
8950		   || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
8951		   || side_effects_p (SET_SRC (PATTERN (insn))))
8952	    live_insn = 1;
8953	}
8954      else if (GET_CODE (PATTERN (insn)) == PARALLEL)
8955	for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8956	  {
8957	    rtx elt = XVECEXP (PATTERN (insn), 0, i);
8958
8959	    if (GET_CODE (elt) == SET)
8960	      {
8961		if (GET_CODE (SET_DEST (elt)) == REG
8962		    && SET_DEST (elt) == SET_SRC (elt))
8963		  ;
8964
8965#ifdef HAVE_cc0
8966		else if (GET_CODE (SET_DEST (elt)) == CC0
8967			 && ! side_effects_p (SET_SRC (elt))
8968			 && ((tem = next_nonnote_insn (insn)) == 0
8969			     || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
8970			     || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
8971		  ;
8972#endif
8973		else if (GET_CODE (SET_DEST (elt)) != REG
8974			 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
8975			 || counts[REGNO (SET_DEST (elt))] != 0
8976			 || side_effects_p (SET_SRC (elt)))
8977		  live_insn = 1;
8978	      }
8979	    else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
8980	      live_insn = 1;
8981	  }
8982      else
8983	live_insn = 1;
8984
8985      /* If this is a dead insn, delete it and show registers in it aren't
8986	 being used.  */
8987
8988      if (! live_insn)
8989	{
8990	  count_reg_usage (insn, counts, NULL_RTX, -1);
8991	  delete_insn (insn);
8992	}
8993
8994      if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8995	{
8996	  in_libcall = 0;
8997	  dead_libcall = 0;
8998	}
8999    }
9000}
9001