cse.c revision 56385
1/* Common subexpression elimination for GNU compiler.
2   Copyright (C) 1987, 88, 89, 92-99, 2000 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING.  If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA.  */
20
21
22#include "config.h"
23/* stdio.h must precede rtl.h for FFS.  */
24#include "system.h"
25#include <setjmp.h>
26
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "flags.h"
31#include "real.h"
32#include "insn-config.h"
33#include "recog.h"
34#include "expr.h"
35#include "toplev.h"
36#include "output.h"
37#include "splay-tree.h"
38
39/* The basic idea of common subexpression elimination is to go
40   through the code, keeping a record of expressions that would
41   have the same value at the current scan point, and replacing
42   expressions encountered with the cheapest equivalent expression.
43
44   It is too complicated to keep track of the different possibilities
45   when control paths merge in this code; so, at each label, we forget all
46   that is known and start fresh.  This can be described as processing each
47   extended basic block separately.  We have a separate pass to perform
48   global CSE.
49
50   Note CSE can turn a conditional or computed jump into a nop or
51   an unconditional jump.  When this occurs we arrange to run the jump
52   optimizer after CSE to delete the unreachable code.
53
54   We use two data structures to record the equivalent expressions:
55   a hash table for most expressions, and several vectors together
56   with "quantity numbers" to record equivalent (pseudo) registers.
57
58   The use of the special data structure for registers is desirable
59   because it is faster.  It is possible because registers references
60   contain a fairly small number, the register number, taken from
61   a contiguously allocated series, and two register references are
62   identical if they have the same number.  General expressions
63   do not have any such thing, so the only way to retrieve the
64   information recorded on an expression other than a register
65   is to keep it in a hash table.
66
67Registers and "quantity numbers":
68
69   At the start of each basic block, all of the (hardware and pseudo)
70   registers used in the function are given distinct quantity
71   numbers to indicate their contents.  During scan, when the code
72   copies one register into another, we copy the quantity number.
73   When a register is loaded in any other way, we allocate a new
74   quantity number to describe the value generated by this operation.
75   `reg_qty' records what quantity a register is currently thought
76   of as containing.
77
78   All real quantity numbers are greater than or equal to `max_reg'.
79   If register N has not been assigned a quantity, reg_qty[N] will equal N.
80
81   Quantity numbers below `max_reg' do not exist and none of the `qty_...'
82   variables should be referenced with an index below `max_reg'.
83
84   We also maintain a bidirectional chain of registers for each
85   quantity number.  `qty_first_reg', `qty_last_reg',
86   `reg_next_eqv' and `reg_prev_eqv' hold these chains.
87
88   The first register in a chain is the one whose lifespan is least local.
89   Among equals, it is the one that was seen first.
90   We replace any equivalent register with that one.
91
92   If two registers have the same quantity number, it must be true that
93   REG expressions with `qty_mode' must be in the hash table for both
94   registers and must be in the same class.
95
96   The converse is not true.  Since hard registers may be referenced in
97   any mode, two REG expressions might be equivalent in the hash table
98   but not have the same quantity number if the quantity number of one
99   of the registers is not the same mode as those expressions.
100
101Constants and quantity numbers
102
103   When a quantity has a known constant value, that value is stored
104   in the appropriate element of qty_const.  This is in addition to
105   putting the constant in the hash table as is usual for non-regs.
106
107   Whether a reg or a constant is preferred is determined by the configuration
108   macro CONST_COSTS and will often depend on the constant value.  In any
109   event, expressions containing constants can be simplified, by fold_rtx.
110
111   When a quantity has a known nearly constant value (such as an address
112   of a stack slot), that value is stored in the appropriate element
113   of qty_const.
114
115   Integer constants don't have a machine mode.  However, cse
116   determines the intended machine mode from the destination
117   of the instruction that moves the constant.  The machine mode
118   is recorded in the hash table along with the actual RTL
119   constant expression so that different modes are kept separate.
120
121Other expressions:
122
123   To record known equivalences among expressions in general
124   we use a hash table called `table'.  It has a fixed number of buckets
125   that contain chains of `struct table_elt' elements for expressions.
126   These chains connect the elements whose expressions have the same
127   hash codes.
128
129   Other chains through the same elements connect the elements which
130   currently have equivalent values.
131
132   Register references in an expression are canonicalized before hashing
133   the expression.  This is done using `reg_qty' and `qty_first_reg'.
134   The hash code of a register reference is computed using the quantity
135   number, not the register number.
136
137   When the value of an expression changes, it is necessary to remove from the
138   hash table not just that expression but all expressions whose values
139   could be different as a result.
140
141     1. If the value changing is in memory, except in special cases
142     ANYTHING referring to memory could be changed.  That is because
143     nobody knows where a pointer does not point.
144     The function `invalidate_memory' removes what is necessary.
145
146     The special cases are when the address is constant or is
147     a constant plus a fixed register such as the frame pointer
148     or a static chain pointer.  When such addresses are stored in,
149     we can tell exactly which other such addresses must be invalidated
150     due to overlap.  `invalidate' does this.
151     All expressions that refer to non-constant
152     memory addresses are also invalidated.  `invalidate_memory' does this.
153
154     2. If the value changing is a register, all expressions
155     containing references to that register, and only those,
156     must be removed.
157
158   Because searching the entire hash table for expressions that contain
159   a register is very slow, we try to figure out when it isn't necessary.
160   Precisely, this is necessary only when expressions have been
161   entered in the hash table using this register, and then the value has
162   changed, and then another expression wants to be added to refer to
163   the register's new value.  This sequence of circumstances is rare
164   within any one basic block.
165
166   The vectors `reg_tick' and `reg_in_table' are used to detect this case.
167   reg_tick[i] is incremented whenever a value is stored in register i.
168   reg_in_table[i] holds -1 if no references to register i have been
169   entered in the table; otherwise, it contains the value reg_tick[i] had
170   when the references were entered.  If we want to enter a reference
171   and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
172   Until we want to enter a new entry, the mere fact that the two vectors
173   don't match makes the entries be ignored if anyone tries to match them.
174
175   Registers themselves are entered in the hash table as well as in
176   the equivalent-register chains.  However, the vectors `reg_tick'
177   and `reg_in_table' do not apply to expressions which are simple
178   register references.  These expressions are removed from the table
179   immediately when they become invalid, and this can be done even if
180   we do not immediately search for all the expressions that refer to
181   the register.
182
183   A CLOBBER rtx in an instruction invalidates its operand for further
184   reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
185   invalidates everything that resides in memory.
186
187Related expressions:
188
189   Constant expressions that differ only by an additive integer
190   are called related.  When a constant expression is put in
191   the table, the related expression with no constant term
192   is also entered.  These are made to point at each other
193   so that it is possible to find out if there exists any
194   register equivalent to an expression related to a given expression.  */
195
196/* One plus largest register number used in this function.  */
197
198static int max_reg;
199
200/* One plus largest instruction UID used in this function at time of
201   cse_main call.  */
202
203static int max_insn_uid;
204
205/* Length of vectors indexed by quantity number.
206   We know in advance we will not need a quantity number this big.  */
207
208static int max_qty;
209
210/* Next quantity number to be allocated.
211   This is 1 + the largest number needed so far.  */
212
213static int next_qty;
214
215/* Indexed by quantity number, gives the first (or last) register
216   in the chain of registers that currently contain this quantity.  */
217
218static int *qty_first_reg;
219static int *qty_last_reg;
220
221/* Index by quantity number, gives the mode of the quantity.  */
222
223static enum machine_mode *qty_mode;
224
225/* Indexed by quantity number, gives the rtx of the constant value of the
226   quantity, or zero if it does not have a known value.
227   A sum of the frame pointer (or arg pointer) plus a constant
228   can also be entered here.  */
229
230static rtx *qty_const;
231
232/* Indexed by qty number, gives the insn that stored the constant value
233   recorded in `qty_const'.  */
234
235static rtx *qty_const_insn;
236
237/* The next three variables are used to track when a comparison between a
238   quantity and some constant or register has been passed.  In that case, we
239   know the results of the comparison in case we see it again.  These variables
240   record a comparison that is known to be true.  */
241
242/* Indexed by qty number, gives the rtx code of a comparison with a known
243   result involving this quantity.  If none, it is UNKNOWN.  */
244static enum rtx_code *qty_comparison_code;
245
246/* Indexed by qty number, gives the constant being compared against in a
247   comparison of known result.  If no such comparison, it is undefined.
248   If the comparison is not with a constant, it is zero.  */
249
250static rtx *qty_comparison_const;
251
252/* Indexed by qty number, gives the quantity being compared against in a
253   comparison of known result.  If no such comparison, if it undefined.
254   If the comparison is not with a register, it is -1.  */
255
256static int *qty_comparison_qty;
257
258#ifdef HAVE_cc0
259/* For machines that have a CC0, we do not record its value in the hash
260   table since its use is guaranteed to be the insn immediately following
261   its definition and any other insn is presumed to invalidate it.
262
263   Instead, we store below the value last assigned to CC0.  If it should
264   happen to be a constant, it is stored in preference to the actual
265   assigned value.  In case it is a constant, we store the mode in which
266   the constant should be interpreted.  */
267
268static rtx prev_insn_cc0;
269static enum machine_mode prev_insn_cc0_mode;
270#endif
271
272/* Previous actual insn.  0 if at first insn of basic block.  */
273
274static rtx prev_insn;
275
276/* Insn being scanned.  */
277
278static rtx this_insn;
279
280/* Index by register number, gives the number of the next (or
281   previous) register in the chain of registers sharing the same
282   value.
283
284   Or -1 if this register is at the end of the chain.
285
286   If reg_qty[N] == N, reg_next_eqv[N] is undefined.  */
287
288static int *reg_next_eqv;
289static int *reg_prev_eqv;
290
291struct cse_reg_info {
292  union {
293    /* The number of times the register has been altered in the current
294       basic block.  */
295    int reg_tick;
296
297    /* The next cse_reg_info structure in the free list.  */
298    struct cse_reg_info* next;
299  } variant;
300
301  /* The REG_TICK value at which rtx's containing this register are
302     valid in the hash table.  If this does not equal the current
303     reg_tick value, such expressions existing in the hash table are
304     invalid.  */
305  int reg_in_table;
306
307  /* The quantity number of the register's current contents.  */
308  int reg_qty;
309};
310
311/* A free list of cse_reg_info entries.  */
312static struct cse_reg_info *cse_reg_info_free_list;
313
314/* A mapping from registers to cse_reg_info data structures.  */
315static splay_tree cse_reg_info_tree;
316
317/* The last lookup we did into the cse_reg_info_tree.  This allows us
318   to cache repeated lookups.  */
319static int cached_regno;
320static struct cse_reg_info *cached_cse_reg_info;
321
322/* A HARD_REG_SET containing all the hard registers for which there is
323   currently a REG expression in the hash table.  Note the difference
324   from the above variables, which indicate if the REG is mentioned in some
325   expression in the table.  */
326
327static HARD_REG_SET hard_regs_in_table;
328
329/* A HARD_REG_SET containing all the hard registers that are invalidated
330   by a CALL_INSN.  */
331
332static HARD_REG_SET regs_invalidated_by_call;
333
334/* CUID of insn that starts the basic block currently being cse-processed.  */
335
336static int cse_basic_block_start;
337
338/* CUID of insn that ends the basic block currently being cse-processed.  */
339
340static int cse_basic_block_end;
341
342/* Vector mapping INSN_UIDs to cuids.
343   The cuids are like uids but increase monotonically always.
344   We use them to see whether a reg is used outside a given basic block.  */
345
346static int *uid_cuid;
347
348/* Highest UID in UID_CUID.  */
349static int max_uid;
350
351/* Get the cuid of an insn.  */
352
353#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
354
355/* Nonzero if cse has altered conditional jump insns
356   in such a way that jump optimization should be redone.  */
357
358static int cse_jumps_altered;
359
360/* Nonzero if we put a LABEL_REF into the hash table.  Since we may have put
361   it into an INSN without a REG_LABEL, we have to rerun jump after CSE
362   to put in the note.  */
363static int recorded_label_ref;
364
365/* canon_hash stores 1 in do_not_record
366   if it notices a reference to CC0, PC, or some other volatile
367   subexpression.  */
368
369static int do_not_record;
370
371#ifdef LOAD_EXTEND_OP
372
373/* Scratch rtl used when looking for load-extended copy of a MEM.  */
374static rtx memory_extend_rtx;
375#endif
376
377/* canon_hash stores 1 in hash_arg_in_memory
378   if it notices a reference to memory within the expression being hashed.  */
379
380static int hash_arg_in_memory;
381
382/* canon_hash stores 1 in hash_arg_in_struct
383   if it notices a reference to memory that's part of a structure.  */
384
385static int hash_arg_in_struct;
386
387/* The hash table contains buckets which are chains of `struct table_elt's,
388   each recording one expression's information.
389   That expression is in the `exp' field.
390
391   Those elements with the same hash code are chained in both directions
392   through the `next_same_hash' and `prev_same_hash' fields.
393
394   Each set of expressions with equivalent values
395   are on a two-way chain through the `next_same_value'
396   and `prev_same_value' fields, and all point with
397   the `first_same_value' field at the first element in
398   that chain.  The chain is in order of increasing cost.
399   Each element's cost value is in its `cost' field.
400
401   The `in_memory' field is nonzero for elements that
402   involve any reference to memory.  These elements are removed
403   whenever a write is done to an unidentified location in memory.
404   To be safe, we assume that a memory address is unidentified unless
405   the address is either a symbol constant or a constant plus
406   the frame pointer or argument pointer.
407
408   The `in_struct' field is nonzero for elements that
409   involve any reference to memory inside a structure or array.
410
411   The `related_value' field is used to connect related expressions
412   (that differ by adding an integer).
413   The related expressions are chained in a circular fashion.
414   `related_value' is zero for expressions for which this
415   chain is not useful.
416
417   The `cost' field stores the cost of this element's expression.
418
419   The `is_const' flag is set if the element is a constant (including
420   a fixed address).
421
422   The `flag' field is used as a temporary during some search routines.
423
424   The `mode' field is usually the same as GET_MODE (`exp'), but
425   if `exp' is a CONST_INT and has no machine mode then the `mode'
426   field is the mode it was being used as.  Each constant is
427   recorded separately for each mode it is used with.  */
428
429
430struct table_elt
431{
432  rtx exp;
433  struct table_elt *next_same_hash;
434  struct table_elt *prev_same_hash;
435  struct table_elt *next_same_value;
436  struct table_elt *prev_same_value;
437  struct table_elt *first_same_value;
438  struct table_elt *related_value;
439  int cost;
440  enum machine_mode mode;
441  char in_memory;
442  char in_struct;
443  char is_const;
444  char flag;
445};
446
447/* We don't want a lot of buckets, because we rarely have very many
448   things stored in the hash table, and a lot of buckets slows
449   down a lot of loops that happen frequently.  */
450#define NBUCKETS 31
451
452/* Compute hash code of X in mode M.  Special-case case where X is a pseudo
453   register (hard registers may require `do_not_record' to be set).  */
454
455#define HASH(X, M)	\
456 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
457  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) % NBUCKETS	\
458  : canon_hash (X, M) % NBUCKETS)
459
460/* Determine whether register number N is considered a fixed register for CSE.
461   It is desirable to replace other regs with fixed regs, to reduce need for
462   non-fixed hard regs.
463   A reg wins if it is either the frame pointer or designated as fixed,
464   but not if it is an overlapping register.  */
465#ifdef OVERLAPPING_REGNO_P
466#define FIXED_REGNO_P(N)  \
467  (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
468    || fixed_regs[N] || global_regs[N])	  \
469   && ! OVERLAPPING_REGNO_P ((N)))
470#else
471#define FIXED_REGNO_P(N)  \
472  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
473   || fixed_regs[N] || global_regs[N])
474#endif
475
476/* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
477   hard registers and pointers into the frame are the cheapest with a cost
478   of 0.  Next come pseudos with a cost of one and other hard registers with
479   a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
480
481#define CHEAP_REGNO(N) \
482  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM 	\
483   || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM	     	\
484   || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) 	\
485   || ((N) < FIRST_PSEUDO_REGISTER					\
486       && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
487
488/* A register is cheap if it is a user variable assigned to the register
489   or if its register number always corresponds to a cheap register.  */
490
491#define CHEAP_REG(N) \
492  ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER)	\
493   || CHEAP_REGNO (REGNO (N)))
494
495#define COST(X)								\
496  (GET_CODE (X) == REG							\
497   ? (CHEAP_REG (X) ? 0							\
498      : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1				\
499      : 2)								\
500   : notreg_cost(X))
501
502/* Get the info associated with register N.  */
503
504#define GET_CSE_REG_INFO(N) 			\
505  (((N) == cached_regno && cached_cse_reg_info)	\
506   ? cached_cse_reg_info : get_cse_reg_info ((N)))
507
508/* Get the number of times this register has been updated in this
509   basic block.  */
510
511#define REG_TICK(N) ((GET_CSE_REG_INFO (N))->variant.reg_tick)
512
513/* Get the point at which REG was recorded in the table.  */
514
515#define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
516
517/* Get the quantity number for REG.  */
518
519#define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
520
521/* Determine if the quantity number for register X represents a valid index
522   into the `qty_...' variables.  */
523
524#define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (N))
525
526#ifdef ADDRESS_COST
527/* The ADDRESS_COST macro does not deal with ADDRESSOF nodes.  But,
528   during CSE, such nodes are present.  Using an ADDRESSOF node which
529   refers to the address of a REG is a good thing because we can then
530   turn (MEM (ADDRESSSOF (REG))) into just plain REG.  */
531#define CSE_ADDRESS_COST(RTX)					\
532  ((GET_CODE (RTX) == ADDRESSOF && REG_P (XEXP ((RTX), 0)))	\
533   ? -1 : ADDRESS_COST(RTX))
534#endif
535
536static struct table_elt *table[NBUCKETS];
537
538/* Chain of `struct table_elt's made so far for this function
539   but currently removed from the table.  */
540
541static struct table_elt *free_element_chain;
542
543/* Number of `struct table_elt' structures made so far for this function.  */
544
545static int n_elements_made;
546
547/* Maximum value `n_elements_made' has had so far in this compilation
548   for functions previously processed.  */
549
550static int max_elements_made;
551
552/* Surviving equivalence class when two equivalence classes are merged
553   by recording the effects of a jump in the last insn.  Zero if the
554   last insn was not a conditional jump.  */
555
556static struct table_elt *last_jump_equiv_class;
557
558/* Set to the cost of a constant pool reference if one was found for a
559   symbolic constant.  If this was found, it means we should try to
560   convert constants into constant pool entries if they don't fit in
561   the insn.  */
562
563static int constant_pool_entries_cost;
564
565/* Define maximum length of a branch path.  */
566
567#define PATHLENGTH	10
568
569/* This data describes a block that will be processed by cse_basic_block.  */
570
571struct cse_basic_block_data {
572  /* Lowest CUID value of insns in block.  */
573  int low_cuid;
574  /* Highest CUID value of insns in block.  */
575  int high_cuid;
576  /* Total number of SETs in block.  */
577  int nsets;
578  /* Last insn in the block.  */
579  rtx last;
580  /* Size of current branch path, if any.  */
581  int path_size;
582  /* Current branch path, indicating which branches will be taken.  */
583  struct branch_path {
584    /* The branch insn.  */
585    rtx branch;
586    /* Whether it should be taken or not.  AROUND is the same as taken
587       except that it is used when the destination label is not preceded
588       by a BARRIER.  */
589    enum taken {TAKEN, NOT_TAKEN, AROUND} status;
590  } path[PATHLENGTH];
591};
592
593/* Nonzero if X has the form (PLUS frame-pointer integer).  We check for
594   virtual regs here because the simplify_*_operation routines are called
595   by integrate.c, which is called before virtual register instantiation.  */
596
597#define FIXED_BASE_PLUS_P(X)					\
598  ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx	\
599   || (X) == arg_pointer_rtx					\
600   || (X) == virtual_stack_vars_rtx				\
601   || (X) == virtual_incoming_args_rtx				\
602   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
603       && (XEXP (X, 0) == frame_pointer_rtx			\
604	   || XEXP (X, 0) == hard_frame_pointer_rtx		\
605	   || XEXP (X, 0) == arg_pointer_rtx			\
606	   || XEXP (X, 0) == virtual_stack_vars_rtx		\
607	   || XEXP (X, 0) == virtual_incoming_args_rtx))	\
608   || GET_CODE (X) == ADDRESSOF)
609
610/* Similar, but also allows reference to the stack pointer.
611
612   This used to include FIXED_BASE_PLUS_P, however, we can't assume that
613   arg_pointer_rtx by itself is nonzero, because on at least one machine,
614   the i960, the arg pointer is zero when it is unused.  */
615
616#define NONZERO_BASE_PLUS_P(X)					\
617  ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx	\
618   || (X) == virtual_stack_vars_rtx				\
619   || (X) == virtual_incoming_args_rtx				\
620   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
621       && (XEXP (X, 0) == frame_pointer_rtx			\
622	   || XEXP (X, 0) == hard_frame_pointer_rtx		\
623	   || XEXP (X, 0) == arg_pointer_rtx			\
624	   || XEXP (X, 0) == virtual_stack_vars_rtx		\
625	   || XEXP (X, 0) == virtual_incoming_args_rtx))	\
626   || (X) == stack_pointer_rtx					\
627   || (X) == virtual_stack_dynamic_rtx				\
628   || (X) == virtual_outgoing_args_rtx				\
629   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
630       && (XEXP (X, 0) == stack_pointer_rtx			\
631	   || XEXP (X, 0) == virtual_stack_dynamic_rtx		\
632	   || XEXP (X, 0) == virtual_outgoing_args_rtx))	\
633   || GET_CODE (X) == ADDRESSOF)
634
635static int notreg_cost		PROTO((rtx));
636static void new_basic_block	PROTO((void));
637static void make_new_qty	PROTO((int));
638static void make_regs_eqv	PROTO((int, int));
639static void delete_reg_equiv	PROTO((int));
640static int mention_regs		PROTO((rtx));
641static int insert_regs		PROTO((rtx, struct table_elt *, int));
642static void free_element	PROTO((struct table_elt *));
643static void remove_from_table	PROTO((struct table_elt *, unsigned));
644static struct table_elt *get_element PROTO((void));
645static struct table_elt *lookup	PROTO((rtx, unsigned, enum machine_mode)),
646       *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
647static rtx lookup_as_function	PROTO((rtx, enum rtx_code));
648static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
649				       enum machine_mode));
650static void merge_equiv_classes PROTO((struct table_elt *,
651				       struct table_elt *));
652static void invalidate		PROTO((rtx, enum machine_mode));
653static int cse_rtx_varies_p	PROTO((rtx));
654static void remove_invalid_refs	PROTO((int));
655static void remove_invalid_subreg_refs	PROTO((int, int, enum machine_mode));
656static void rehash_using_reg	PROTO((rtx));
657static void invalidate_memory	PROTO((void));
658static void invalidate_for_call	PROTO((void));
659static rtx use_related_value	PROTO((rtx, struct table_elt *));
660static unsigned canon_hash	PROTO((rtx, enum machine_mode));
661static unsigned safe_hash	PROTO((rtx, enum machine_mode));
662static int exp_equiv_p		PROTO((rtx, rtx, int, int));
663static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
664						     HOST_WIDE_INT *,
665						     HOST_WIDE_INT *));
666static int refers_to_p		PROTO((rtx, rtx));
667static rtx canon_reg		PROTO((rtx, rtx));
668static void find_best_addr	PROTO((rtx, rtx *));
669static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
670						 enum machine_mode *,
671						 enum machine_mode *));
672static rtx cse_gen_binary	PROTO((enum rtx_code, enum machine_mode,
673				       rtx, rtx));
674static rtx simplify_plus_minus	PROTO((enum rtx_code, enum machine_mode,
675				       rtx, rtx));
676static rtx fold_rtx		PROTO((rtx, rtx));
677static rtx equiv_constant	PROTO((rtx));
678static void record_jump_equiv	PROTO((rtx, int));
679static void record_jump_cond	PROTO((enum rtx_code, enum machine_mode,
680				       rtx, rtx, int));
681static void cse_insn		PROTO((rtx, rtx));
682static int note_mem_written	PROTO((rtx));
683static void invalidate_from_clobbers PROTO((rtx));
684static rtx cse_process_notes	PROTO((rtx, rtx));
685static void cse_around_loop	PROTO((rtx));
686static void invalidate_skipped_set PROTO((rtx, rtx));
687static void invalidate_skipped_block PROTO((rtx));
688static void cse_check_loop_start PROTO((rtx, rtx));
689static void cse_set_around_loop	PROTO((rtx, rtx, rtx));
690static rtx cse_basic_block	PROTO((rtx, rtx, struct branch_path *, int));
691static void count_reg_usage	PROTO((rtx, int *, rtx, int));
692extern void dump_class          PROTO((struct table_elt*));
693static void check_fold_consts	PROTO((PTR));
694static struct cse_reg_info* get_cse_reg_info PROTO((int));
695static void free_cse_reg_info   PROTO((splay_tree_value));
696static void flush_hash_table	PROTO((void));
697
698extern int rtx_equal_function_value_matters;
699
700/* Dump the expressions in the equivalence class indicated by CLASSP.
701   This function is used only for debugging.  */
702void
703dump_class (classp)
704     struct table_elt *classp;
705{
706  struct table_elt *elt;
707
708  fprintf (stderr, "Equivalence chain for ");
709  print_rtl (stderr, classp->exp);
710  fprintf (stderr, ": \n");
711
712  for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
713    {
714      print_rtl (stderr, elt->exp);
715      fprintf (stderr, "\n");
716    }
717}
718
719/* Return an estimate of the cost of computing rtx X.
720   One use is in cse, to decide which expression to keep in the hash table.
721   Another is in rtl generation, to pick the cheapest way to multiply.
722   Other uses like the latter are expected in the future.  */
723
724/* Internal function, to compute cost when X is not a register; called
725   from COST macro to keep it simple.  */
726
727static int
728notreg_cost (x)
729     rtx x;
730{
731  return ((GET_CODE (x) == SUBREG
732	   && GET_CODE (SUBREG_REG (x)) == REG
733	   && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
734	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
735	   && (GET_MODE_SIZE (GET_MODE (x))
736	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
737	   && subreg_lowpart_p (x)
738	   && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
739				     GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
740	  ? (CHEAP_REG (SUBREG_REG (x)) ? 0
741	     : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
742		: 2))
743	  : rtx_cost (x, SET) * 2);
744}
745
746/* Return the right cost to give to an operation
747   to make the cost of the corresponding register-to-register instruction
748   N times that of a fast register-to-register instruction.  */
749
750#define COSTS_N_INSNS(N) ((N) * 4 - 2)
751
752int
753rtx_cost (x, outer_code)
754     rtx x;
755     enum rtx_code outer_code ATTRIBUTE_UNUSED;
756{
757  register int i, j;
758  register enum rtx_code code;
759  register char *fmt;
760  register int total;
761
762  if (x == 0)
763    return 0;
764
765  /* Compute the default costs of certain things.
766     Note that RTX_COSTS can override the defaults.  */
767
768  code = GET_CODE (x);
769  switch (code)
770    {
771    case MULT:
772      /* Count multiplication by 2**n as a shift,
773	 because if we are considering it, we would output it as a shift.  */
774      if (GET_CODE (XEXP (x, 1)) == CONST_INT
775	  && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
776	total = 2;
777      else
778	total = COSTS_N_INSNS (5);
779      break;
780    case DIV:
781    case UDIV:
782    case MOD:
783    case UMOD:
784      total = COSTS_N_INSNS (7);
785      break;
786    case USE:
787      /* Used in loop.c and combine.c as a marker.  */
788      total = 0;
789      break;
790    case ASM_OPERANDS:
791      /* We don't want these to be used in substitutions because
792	 we have no way of validating the resulting insn.  So assign
793	 anything containing an ASM_OPERANDS a very high cost.  */
794      total = 1000;
795      break;
796    default:
797      total = 2;
798    }
799
800  switch (code)
801    {
802    case REG:
803      return ! CHEAP_REG (x);
804
805    case SUBREG:
806      /* If we can't tie these modes, make this expensive.  The larger
807	 the mode, the more expensive it is.  */
808      if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
809	return COSTS_N_INSNS (2
810			      + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
811      return 2;
812#ifdef RTX_COSTS
813      RTX_COSTS (x, code, outer_code);
814#endif
815#ifdef CONST_COSTS
816      CONST_COSTS (x, code, outer_code);
817#endif
818
819    default:
820#ifdef DEFAULT_RTX_COSTS
821      DEFAULT_RTX_COSTS(x, code, outer_code);
822#endif
823      break;
824    }
825
826  /* Sum the costs of the sub-rtx's, plus cost of this operation,
827     which is already in total.  */
828
829  fmt = GET_RTX_FORMAT (code);
830  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
831    if (fmt[i] == 'e')
832      total += rtx_cost (XEXP (x, i), code);
833    else if (fmt[i] == 'E')
834      for (j = 0; j < XVECLEN (x, i); j++)
835	total += rtx_cost (XVECEXP (x, i, j), code);
836
837  return total;
838}
839
840static struct cse_reg_info *
841get_cse_reg_info (regno)
842     int regno;
843{
844  struct cse_reg_info *cri;
845  splay_tree_node n;
846
847  /* See if we already have this entry.  */
848  n = splay_tree_lookup (cse_reg_info_tree,
849			(splay_tree_key) regno);
850  if (n)
851    cri = (struct cse_reg_info *) (n->value);
852  else
853    {
854      /* Get a new cse_reg_info structure.  */
855      if (cse_reg_info_free_list)
856	{
857	  cri = cse_reg_info_free_list;
858	  cse_reg_info_free_list = cri->variant.next;
859	}
860      else
861	cri = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
862
863      /* Initialize it.  */
864      cri->variant.reg_tick = 0;
865      cri->reg_in_table = -1;
866      cri->reg_qty = regno;
867
868      splay_tree_insert (cse_reg_info_tree,
869			 (splay_tree_key) regno,
870			 (splay_tree_value) cri);
871    }
872
873  /* Cache this lookup; we tend to be looking up information about the
874     same register several times in a row.  */
875  cached_regno = regno;
876  cached_cse_reg_info = cri;
877
878  return cri;
879}
880
881static void
882free_cse_reg_info (v)
883     splay_tree_value v;
884{
885  struct cse_reg_info *cri = (struct cse_reg_info *) v;
886
887  cri->variant.next = cse_reg_info_free_list;
888  cse_reg_info_free_list = cri;
889}
890
891/* Clear the hash table and initialize each register with its own quantity,
892   for a new basic block.  */
893
894static void
895new_basic_block ()
896{
897  register int i;
898
899  next_qty = max_reg;
900
901  if (cse_reg_info_tree)
902    {
903      splay_tree_delete (cse_reg_info_tree);
904      cached_cse_reg_info = 0;
905    }
906
907  cse_reg_info_tree = splay_tree_new (splay_tree_compare_ints, 0,
908				      free_cse_reg_info);
909
910  CLEAR_HARD_REG_SET (hard_regs_in_table);
911
912  /* The per-quantity values used to be initialized here, but it is
913     much faster to initialize each as it is made in `make_new_qty'.  */
914
915  for (i = 0; i < NBUCKETS; i++)
916    {
917      register struct table_elt *this, *next;
918      for (this = table[i]; this; this = next)
919	{
920	  next = this->next_same_hash;
921	  free_element (this);
922	}
923    }
924
925  bzero ((char *) table, sizeof table);
926
927  prev_insn = 0;
928
929#ifdef HAVE_cc0
930  prev_insn_cc0 = 0;
931#endif
932}
933
934/* Say that register REG contains a quantity not in any register before
935   and initialize that quantity.  */
936
937static void
938make_new_qty (reg)
939     register int reg;
940{
941  register int q;
942
943  if (next_qty >= max_qty)
944    abort ();
945
946  q = REG_QTY (reg) = next_qty++;
947  qty_first_reg[q] = reg;
948  qty_last_reg[q] = reg;
949  qty_const[q] = qty_const_insn[q] = 0;
950  qty_comparison_code[q] = UNKNOWN;
951
952  reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
953}
954
955/* Make reg NEW equivalent to reg OLD.
956   OLD is not changing; NEW is.  */
957
958static void
959make_regs_eqv (new, old)
960     register int new, old;
961{
962  register int lastr, firstr;
963  register int q = REG_QTY (old);
964
965  /* Nothing should become eqv until it has a "non-invalid" qty number.  */
966  if (! REGNO_QTY_VALID_P (old))
967    abort ();
968
969  REG_QTY (new) = q;
970  firstr = qty_first_reg[q];
971  lastr = qty_last_reg[q];
972
973  /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
974     hard regs.  Among pseudos, if NEW will live longer than any other reg
975     of the same qty, and that is beyond the current basic block,
976     make it the new canonical replacement for this qty.  */
977  if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
978      /* Certain fixed registers might be of the class NO_REGS.  This means
979	 that not only can they not be allocated by the compiler, but
980	 they cannot be used in substitutions or canonicalizations
981	 either.  */
982      && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
983      && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
984	  || (new >= FIRST_PSEUDO_REGISTER
985	      && (firstr < FIRST_PSEUDO_REGISTER
986		  || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
987		       || (uid_cuid[REGNO_FIRST_UID (new)]
988			   < cse_basic_block_start))
989		      && (uid_cuid[REGNO_LAST_UID (new)]
990			  > uid_cuid[REGNO_LAST_UID (firstr)]))))))
991    {
992      reg_prev_eqv[firstr] = new;
993      reg_next_eqv[new] = firstr;
994      reg_prev_eqv[new] = -1;
995      qty_first_reg[q] = new;
996    }
997  else
998    {
999      /* If NEW is a hard reg (known to be non-fixed), insert at end.
1000	 Otherwise, insert before any non-fixed hard regs that are at the
1001	 end.  Registers of class NO_REGS cannot be used as an
1002	 equivalent for anything.  */
1003      while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
1004	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1005	     && new >= FIRST_PSEUDO_REGISTER)
1006	lastr = reg_prev_eqv[lastr];
1007      reg_next_eqv[new] = reg_next_eqv[lastr];
1008      if (reg_next_eqv[lastr] >= 0)
1009	reg_prev_eqv[reg_next_eqv[lastr]] = new;
1010      else
1011	qty_last_reg[q] = new;
1012      reg_next_eqv[lastr] = new;
1013      reg_prev_eqv[new] = lastr;
1014    }
1015}
1016
1017/* Remove REG from its equivalence class.  */
1018
1019static void
1020delete_reg_equiv (reg)
1021     register int reg;
1022{
1023  register int q = REG_QTY (reg);
1024  register int p, n;
1025
1026  /* If invalid, do nothing.  */
1027  if (q == reg)
1028    return;
1029
1030  p = reg_prev_eqv[reg];
1031  n = reg_next_eqv[reg];
1032
1033  if (n != -1)
1034    reg_prev_eqv[n] = p;
1035  else
1036    qty_last_reg[q] = p;
1037  if (p != -1)
1038    reg_next_eqv[p] = n;
1039  else
1040    qty_first_reg[q] = n;
1041
1042  REG_QTY (reg) = reg;
1043}
1044
1045/* Remove any invalid expressions from the hash table
1046   that refer to any of the registers contained in expression X.
1047
1048   Make sure that newly inserted references to those registers
1049   as subexpressions will be considered valid.
1050
1051   mention_regs is not called when a register itself
1052   is being stored in the table.
1053
1054   Return 1 if we have done something that may have changed the hash code
1055   of X.  */
1056
1057static int
1058mention_regs (x)
1059     rtx x;
1060{
1061  register enum rtx_code code;
1062  register int i, j;
1063  register char *fmt;
1064  register int changed = 0;
1065
1066  if (x == 0)
1067    return 0;
1068
1069  code = GET_CODE (x);
1070  if (code == REG)
1071    {
1072      register int regno = REGNO (x);
1073      register int endregno
1074	= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1075		   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1076      int i;
1077
1078      for (i = regno; i < endregno; i++)
1079	{
1080	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1081	    remove_invalid_refs (i);
1082
1083	  REG_IN_TABLE (i) = REG_TICK (i);
1084	}
1085
1086      return 0;
1087    }
1088
1089  /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1090     pseudo if they don't use overlapping words.  We handle only pseudos
1091     here for simplicity.  */
1092  if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1093      && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1094    {
1095      int i = REGNO (SUBREG_REG (x));
1096
1097      if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1098	{
1099	  /* If reg_tick has been incremented more than once since
1100	     reg_in_table was last set, that means that the entire
1101	     register has been set before, so discard anything memorized
1102	     for the entrire register, including all SUBREG expressions.  */
1103	  if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1104	    remove_invalid_refs (i);
1105	  else
1106	    remove_invalid_subreg_refs (i, SUBREG_WORD (x), GET_MODE (x));
1107	}
1108
1109      REG_IN_TABLE (i) = REG_TICK (i);
1110      return 0;
1111    }
1112
1113  /* If X is a comparison or a COMPARE and either operand is a register
1114     that does not have a quantity, give it one.  This is so that a later
1115     call to record_jump_equiv won't cause X to be assigned a different
1116     hash code and not found in the table after that call.
1117
1118     It is not necessary to do this here, since rehash_using_reg can
1119     fix up the table later, but doing this here eliminates the need to
1120     call that expensive function in the most common case where the only
1121     use of the register is in the comparison.  */
1122
1123  if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1124    {
1125      if (GET_CODE (XEXP (x, 0)) == REG
1126	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1127	if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
1128	  {
1129	    rehash_using_reg (XEXP (x, 0));
1130	    changed = 1;
1131	  }
1132
1133      if (GET_CODE (XEXP (x, 1)) == REG
1134	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1135	if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
1136	  {
1137	    rehash_using_reg (XEXP (x, 1));
1138	    changed = 1;
1139	  }
1140    }
1141
1142  fmt = GET_RTX_FORMAT (code);
1143  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1144    if (fmt[i] == 'e')
1145      changed |= mention_regs (XEXP (x, i));
1146    else if (fmt[i] == 'E')
1147      for (j = 0; j < XVECLEN (x, i); j++)
1148	changed |= mention_regs (XVECEXP (x, i, j));
1149
1150  return changed;
1151}
1152
1153/* Update the register quantities for inserting X into the hash table
1154   with a value equivalent to CLASSP.
1155   (If the class does not contain a REG, it is irrelevant.)
1156   If MODIFIED is nonzero, X is a destination; it is being modified.
1157   Note that delete_reg_equiv should be called on a register
1158   before insert_regs is done on that register with MODIFIED != 0.
1159
1160   Nonzero value means that elements of reg_qty have changed
1161   so X's hash code may be different.  */
1162
1163static int
1164insert_regs (x, classp, modified)
1165     rtx x;
1166     struct table_elt *classp;
1167     int modified;
1168{
1169  if (GET_CODE (x) == REG)
1170    {
1171      register int regno = REGNO (x);
1172
1173      /* If REGNO is in the equivalence table already but is of the
1174	 wrong mode for that equivalence, don't do anything here.  */
1175
1176      if (REGNO_QTY_VALID_P (regno)
1177	  && qty_mode[REG_QTY (regno)] != GET_MODE (x))
1178	return 0;
1179
1180      if (modified || ! REGNO_QTY_VALID_P (regno))
1181	{
1182	  if (classp)
1183	    for (classp = classp->first_same_value;
1184		 classp != 0;
1185		 classp = classp->next_same_value)
1186	      if (GET_CODE (classp->exp) == REG
1187		  && GET_MODE (classp->exp) == GET_MODE (x))
1188		{
1189		  make_regs_eqv (regno, REGNO (classp->exp));
1190		  return 1;
1191		}
1192
1193	  make_new_qty (regno);
1194	  qty_mode[REG_QTY (regno)] = GET_MODE (x);
1195	  return 1;
1196	}
1197
1198      return 0;
1199    }
1200
1201  /* If X is a SUBREG, we will likely be inserting the inner register in the
1202     table.  If that register doesn't have an assigned quantity number at
1203     this point but does later, the insertion that we will be doing now will
1204     not be accessible because its hash code will have changed.  So assign
1205     a quantity number now.  */
1206
1207  else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1208	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1209    {
1210      int regno = REGNO (SUBREG_REG (x));
1211
1212      insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1213      /* Mention_regs checks if REG_TICK is exactly one larger than
1214	 REG_IN_TABLE to find out if there was only a single preceding
1215	 invalidation - for the SUBREG - or another one, which would be
1216	 for the full register.  Since we don't invalidate the SUBREG
1217	 here first, we might have to bump up REG_TICK so that mention_regs
1218	 will do the right thing.  */
1219      if (REG_IN_TABLE (regno) >= 0
1220	  && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1221	REG_TICK (regno)++;
1222      mention_regs (x);
1223      return 1;
1224    }
1225  else
1226    return mention_regs (x);
1227}
1228
1229/* Look in or update the hash table.  */
1230
1231/* Put the element ELT on the list of free elements.  */
1232
1233static void
1234free_element (elt)
1235     struct table_elt *elt;
1236{
1237  elt->next_same_hash = free_element_chain;
1238  free_element_chain = elt;
1239}
1240
1241/* Return an element that is free for use.  */
1242
1243static struct table_elt *
1244get_element ()
1245{
1246  struct table_elt *elt = free_element_chain;
1247  if (elt)
1248    {
1249      free_element_chain = elt->next_same_hash;
1250      return elt;
1251    }
1252  n_elements_made++;
1253  return (struct table_elt *) oballoc (sizeof (struct table_elt));
1254}
1255
1256/* Remove table element ELT from use in the table.
1257   HASH is its hash code, made using the HASH macro.
1258   It's an argument because often that is known in advance
1259   and we save much time not recomputing it.  */
1260
1261static void
1262remove_from_table (elt, hash)
1263     register struct table_elt *elt;
1264     unsigned hash;
1265{
1266  if (elt == 0)
1267    return;
1268
1269  /* Mark this element as removed.  See cse_insn.  */
1270  elt->first_same_value = 0;
1271
1272  /* Remove the table element from its equivalence class.  */
1273
1274  {
1275    register struct table_elt *prev = elt->prev_same_value;
1276    register struct table_elt *next = elt->next_same_value;
1277
1278    if (next) next->prev_same_value = prev;
1279
1280    if (prev)
1281      prev->next_same_value = next;
1282    else
1283      {
1284	register struct table_elt *newfirst = next;
1285	while (next)
1286	  {
1287	    next->first_same_value = newfirst;
1288	    next = next->next_same_value;
1289	  }
1290      }
1291  }
1292
1293  /* Remove the table element from its hash bucket.  */
1294
1295  {
1296    register struct table_elt *prev = elt->prev_same_hash;
1297    register struct table_elt *next = elt->next_same_hash;
1298
1299    if (next) next->prev_same_hash = prev;
1300
1301    if (prev)
1302      prev->next_same_hash = next;
1303    else if (table[hash] == elt)
1304      table[hash] = next;
1305    else
1306      {
1307	/* This entry is not in the proper hash bucket.  This can happen
1308	   when two classes were merged by `merge_equiv_classes'.  Search
1309	   for the hash bucket that it heads.  This happens only very
1310	   rarely, so the cost is acceptable.  */
1311	for (hash = 0; hash < NBUCKETS; hash++)
1312	  if (table[hash] == elt)
1313	    table[hash] = next;
1314      }
1315  }
1316
1317  /* Remove the table element from its related-value circular chain.  */
1318
1319  if (elt->related_value != 0 && elt->related_value != elt)
1320    {
1321      register struct table_elt *p = elt->related_value;
1322      while (p->related_value != elt)
1323	p = p->related_value;
1324      p->related_value = elt->related_value;
1325      if (p->related_value == p)
1326	p->related_value = 0;
1327    }
1328
1329  free_element (elt);
1330}
1331
1332/* Look up X in the hash table and return its table element,
1333   or 0 if X is not in the table.
1334
1335   MODE is the machine-mode of X, or if X is an integer constant
1336   with VOIDmode then MODE is the mode with which X will be used.
1337
1338   Here we are satisfied to find an expression whose tree structure
1339   looks like X.  */
1340
1341static struct table_elt *
1342lookup (x, hash, mode)
1343     rtx x;
1344     unsigned hash;
1345     enum machine_mode mode;
1346{
1347  register struct table_elt *p;
1348
1349  for (p = table[hash]; p; p = p->next_same_hash)
1350    if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1351			    || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1352      return p;
1353
1354  return 0;
1355}
1356
1357/* Like `lookup' but don't care whether the table element uses invalid regs.
1358   Also ignore discrepancies in the machine mode of a register.  */
1359
1360static struct table_elt *
1361lookup_for_remove (x, hash, mode)
1362     rtx x;
1363     unsigned hash;
1364     enum machine_mode mode;
1365{
1366  register struct table_elt *p;
1367
1368  if (GET_CODE (x) == REG)
1369    {
1370      int regno = REGNO (x);
1371      /* Don't check the machine mode when comparing registers;
1372	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1373      for (p = table[hash]; p; p = p->next_same_hash)
1374	if (GET_CODE (p->exp) == REG
1375	    && REGNO (p->exp) == regno)
1376	  return p;
1377    }
1378  else
1379    {
1380      for (p = table[hash]; p; p = p->next_same_hash)
1381	if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1382	  return p;
1383    }
1384
1385  return 0;
1386}
1387
1388/* Look for an expression equivalent to X and with code CODE.
1389   If one is found, return that expression.  */
1390
1391static rtx
1392lookup_as_function (x, code)
1393     rtx x;
1394     enum rtx_code code;
1395{
1396  register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1397					 GET_MODE (x));
1398  /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1399     long as we are narrowing.  So if we looked in vain for a mode narrower
1400     than word_mode before, look for word_mode now.  */
1401  if (p == 0 && code == CONST_INT
1402      && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1403    {
1404      x = copy_rtx (x);
1405      PUT_MODE (x, word_mode);
1406      p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS, word_mode);
1407    }
1408
1409  if (p == 0)
1410    return 0;
1411
1412  for (p = p->first_same_value; p; p = p->next_same_value)
1413    {
1414      if (GET_CODE (p->exp) == code
1415	  /* Make sure this is a valid entry in the table.  */
1416	  && exp_equiv_p (p->exp, p->exp, 1, 0))
1417	return p->exp;
1418    }
1419
1420  return 0;
1421}
1422
1423/* Insert X in the hash table, assuming HASH is its hash code
1424   and CLASSP is an element of the class it should go in
1425   (or 0 if a new class should be made).
1426   It is inserted at the proper position to keep the class in
1427   the order cheapest first.
1428
1429   MODE is the machine-mode of X, or if X is an integer constant
1430   with VOIDmode then MODE is the mode with which X will be used.
1431
1432   For elements of equal cheapness, the most recent one
1433   goes in front, except that the first element in the list
1434   remains first unless a cheaper element is added.  The order of
1435   pseudo-registers does not matter, as canon_reg will be called to
1436   find the cheapest when a register is retrieved from the table.
1437
1438   The in_memory field in the hash table element is set to 0.
1439   The caller must set it nonzero if appropriate.
1440
1441   You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1442   and if insert_regs returns a nonzero value
1443   you must then recompute its hash code before calling here.
1444
1445   If necessary, update table showing constant values of quantities.  */
1446
1447#define CHEAPER(X,Y)   ((X)->cost < (Y)->cost)
1448
1449static struct table_elt *
1450insert (x, classp, hash, mode)
1451     register rtx x;
1452     register struct table_elt *classp;
1453     unsigned hash;
1454     enum machine_mode mode;
1455{
1456  register struct table_elt *elt;
1457
1458  /* If X is a register and we haven't made a quantity for it,
1459     something is wrong.  */
1460  if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1461    abort ();
1462
1463  /* If X is a hard register, show it is being put in the table.  */
1464  if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1465    {
1466      int regno = REGNO (x);
1467      int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1468      int i;
1469
1470      for (i = regno; i < endregno; i++)
1471	    SET_HARD_REG_BIT (hard_regs_in_table, i);
1472    }
1473
1474  /* If X is a label, show we recorded it.  */
1475  if (GET_CODE (x) == LABEL_REF
1476      || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1477	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1478    recorded_label_ref = 1;
1479
1480  /* Put an element for X into the right hash bucket.  */
1481
1482  elt = get_element ();
1483  elt->exp = x;
1484  elt->cost = COST (x);
1485  elt->next_same_value = 0;
1486  elt->prev_same_value = 0;
1487  elt->next_same_hash = table[hash];
1488  elt->prev_same_hash = 0;
1489  elt->related_value = 0;
1490  elt->in_memory = 0;
1491  elt->mode = mode;
1492  elt->is_const = (CONSTANT_P (x)
1493		   /* GNU C++ takes advantage of this for `this'
1494		      (and other const values).  */
1495		   || (RTX_UNCHANGING_P (x)
1496		       && GET_CODE (x) == REG
1497		       && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1498		   || FIXED_BASE_PLUS_P (x));
1499
1500  if (table[hash])
1501    table[hash]->prev_same_hash = elt;
1502  table[hash] = elt;
1503
1504  /* Put it into the proper value-class.  */
1505  if (classp)
1506    {
1507      classp = classp->first_same_value;
1508      if (CHEAPER (elt, classp))
1509	/* Insert at the head of the class */
1510	{
1511	  register struct table_elt *p;
1512	  elt->next_same_value = classp;
1513	  classp->prev_same_value = elt;
1514	  elt->first_same_value = elt;
1515
1516	  for (p = classp; p; p = p->next_same_value)
1517	    p->first_same_value = elt;
1518	}
1519      else
1520	{
1521	  /* Insert not at head of the class.  */
1522	  /* Put it after the last element cheaper than X.  */
1523	  register struct table_elt *p, *next;
1524	  for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1525	       p = next);
1526	  /* Put it after P and before NEXT.  */
1527	  elt->next_same_value = next;
1528	  if (next)
1529	    next->prev_same_value = elt;
1530	  elt->prev_same_value = p;
1531	  p->next_same_value = elt;
1532	  elt->first_same_value = classp;
1533	}
1534    }
1535  else
1536    elt->first_same_value = elt;
1537
1538  /* If this is a constant being set equivalent to a register or a register
1539     being set equivalent to a constant, note the constant equivalence.
1540
1541     If this is a constant, it cannot be equivalent to a different constant,
1542     and a constant is the only thing that can be cheaper than a register.  So
1543     we know the register is the head of the class (before the constant was
1544     inserted).
1545
1546     If this is a register that is not already known equivalent to a
1547     constant, we must check the entire class.
1548
1549     If this is a register that is already known equivalent to an insn,
1550     update `qty_const_insn' to show that `this_insn' is the latest
1551     insn making that quantity equivalent to the constant.  */
1552
1553  if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1554      && GET_CODE (x) != REG)
1555    {
1556      qty_const[REG_QTY (REGNO (classp->exp))]
1557	= gen_lowpart_if_possible (qty_mode[REG_QTY (REGNO (classp->exp))], x);
1558      qty_const_insn[REG_QTY (REGNO (classp->exp))] = this_insn;
1559    }
1560
1561  else if (GET_CODE (x) == REG && classp && ! qty_const[REG_QTY (REGNO (x))]
1562	   && ! elt->is_const)
1563    {
1564      register struct table_elt *p;
1565
1566      for (p = classp; p != 0; p = p->next_same_value)
1567	{
1568	  if (p->is_const && GET_CODE (p->exp) != REG)
1569	    {
1570	      qty_const[REG_QTY (REGNO (x))]
1571		= gen_lowpart_if_possible (GET_MODE (x), p->exp);
1572	      qty_const_insn[REG_QTY (REGNO (x))] = this_insn;
1573	      break;
1574	    }
1575	}
1576    }
1577
1578  else if (GET_CODE (x) == REG && qty_const[REG_QTY (REGNO (x))]
1579	   && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))])
1580    qty_const_insn[REG_QTY (REGNO (x))] = this_insn;
1581
1582  /* If this is a constant with symbolic value,
1583     and it has a term with an explicit integer value,
1584     link it up with related expressions.  */
1585  if (GET_CODE (x) == CONST)
1586    {
1587      rtx subexp = get_related_value (x);
1588      unsigned subhash;
1589      struct table_elt *subelt, *subelt_prev;
1590
1591      if (subexp != 0)
1592	{
1593	  /* Get the integer-free subexpression in the hash table.  */
1594	  subhash = safe_hash (subexp, mode) % NBUCKETS;
1595	  subelt = lookup (subexp, subhash, mode);
1596	  if (subelt == 0)
1597	    subelt = insert (subexp, NULL_PTR, subhash, mode);
1598	  /* Initialize SUBELT's circular chain if it has none.  */
1599	  if (subelt->related_value == 0)
1600	    subelt->related_value = subelt;
1601	  /* Find the element in the circular chain that precedes SUBELT.  */
1602	  subelt_prev = subelt;
1603	  while (subelt_prev->related_value != subelt)
1604	    subelt_prev = subelt_prev->related_value;
1605	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1606	     This way the element that follows SUBELT is the oldest one.  */
1607	  elt->related_value = subelt_prev->related_value;
1608	  subelt_prev->related_value = elt;
1609	}
1610    }
1611
1612  return elt;
1613}
1614
1615/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1616   CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1617   the two classes equivalent.
1618
1619   CLASS1 will be the surviving class; CLASS2 should not be used after this
1620   call.
1621
1622   Any invalid entries in CLASS2 will not be copied.  */
1623
1624static void
1625merge_equiv_classes (class1, class2)
1626     struct table_elt *class1, *class2;
1627{
1628  struct table_elt *elt, *next, *new;
1629
1630  /* Ensure we start with the head of the classes.  */
1631  class1 = class1->first_same_value;
1632  class2 = class2->first_same_value;
1633
1634  /* If they were already equal, forget it.  */
1635  if (class1 == class2)
1636    return;
1637
1638  for (elt = class2; elt; elt = next)
1639    {
1640      unsigned hash;
1641      rtx exp = elt->exp;
1642      enum machine_mode mode = elt->mode;
1643
1644      next = elt->next_same_value;
1645
1646      /* Remove old entry, make a new one in CLASS1's class.
1647	 Don't do this for invalid entries as we cannot find their
1648	 hash code (it also isn't necessary).  */
1649      if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1650	{
1651	  hash_arg_in_memory = 0;
1652	  hash_arg_in_struct = 0;
1653	  hash = HASH (exp, mode);
1654
1655	  if (GET_CODE (exp) == REG)
1656	    delete_reg_equiv (REGNO (exp));
1657
1658	  remove_from_table (elt, hash);
1659
1660	  if (insert_regs (exp, class1, 0))
1661	    {
1662	      rehash_using_reg (exp);
1663	      hash = HASH (exp, mode);
1664	    }
1665	  new = insert (exp, class1, hash, mode);
1666	  new->in_memory = hash_arg_in_memory;
1667	  new->in_struct = hash_arg_in_struct;
1668	}
1669    }
1670}
1671
1672
1673/* Flush the entire hash table.  */
1674
1675static void
1676flush_hash_table ()
1677{
1678  int i;
1679  struct table_elt *p;
1680
1681  for (i = 0; i < NBUCKETS; i++)
1682    for (p = table[i]; p; p = table[i])
1683      {
1684	/* Note that invalidate can remove elements
1685	   after P in the current hash chain.  */
1686	if (GET_CODE (p->exp) == REG)
1687	  invalidate (p->exp, p->mode);
1688	else
1689	  remove_from_table (p, i);
1690      }
1691}
1692
1693
1694/* Remove from the hash table, or mark as invalid,
1695   all expressions whose values could be altered by storing in X.
1696   X is a register, a subreg, or a memory reference with nonvarying address
1697   (because, when a memory reference with a varying address is stored in,
1698   all memory references are removed by invalidate_memory
1699   so specific invalidation is superfluous).
1700   FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1701   instead of just the amount indicated by the mode of X.  This is only used
1702   for bitfield stores into memory.
1703
1704   A nonvarying address may be just a register or just
1705   a symbol reference, or it may be either of those plus
1706   a numeric offset.  */
1707
1708static void
1709invalidate (x, full_mode)
1710     rtx x;
1711     enum machine_mode full_mode;
1712{
1713  register int i;
1714  register struct table_elt *p;
1715
1716  /* If X is a register, dependencies on its contents
1717     are recorded through the qty number mechanism.
1718     Just change the qty number of the register,
1719     mark it as invalid for expressions that refer to it,
1720     and remove it itself.  */
1721
1722  if (GET_CODE (x) == REG)
1723    {
1724      register int regno = REGNO (x);
1725      register unsigned hash = HASH (x, GET_MODE (x));
1726
1727      /* Remove REGNO from any quantity list it might be on and indicate
1728	 that its value might have changed.  If it is a pseudo, remove its
1729	 entry from the hash table.
1730
1731	 For a hard register, we do the first two actions above for any
1732	 additional hard registers corresponding to X.  Then, if any of these
1733	 registers are in the table, we must remove any REG entries that
1734	 overlap these registers.  */
1735
1736      delete_reg_equiv (regno);
1737      REG_TICK (regno)++;
1738
1739      if (regno >= FIRST_PSEUDO_REGISTER)
1740	{
1741	  /* Because a register can be referenced in more than one mode,
1742	     we might have to remove more than one table entry.  */
1743
1744	  struct table_elt *elt;
1745
1746	  while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1747	    remove_from_table (elt, hash);
1748	}
1749      else
1750	{
1751	  HOST_WIDE_INT in_table
1752	    = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1753	  int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1754	  int tregno, tendregno;
1755	  register struct table_elt *p, *next;
1756
1757	  CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1758
1759	  for (i = regno + 1; i < endregno; i++)
1760	    {
1761	      in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1762	      CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1763	      delete_reg_equiv (i);
1764	      REG_TICK (i)++;
1765	    }
1766
1767	  if (in_table)
1768	    for (hash = 0; hash < NBUCKETS; hash++)
1769	      for (p = table[hash]; p; p = next)
1770		{
1771		  next = p->next_same_hash;
1772
1773		  if (GET_CODE (p->exp) != REG
1774		      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1775		    continue;
1776
1777		  tregno = REGNO (p->exp);
1778		  tendregno
1779		    = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1780		  if (tendregno > regno && tregno < endregno)
1781		    remove_from_table (p, hash);
1782		}
1783	}
1784
1785      return;
1786    }
1787
1788  if (GET_CODE (x) == SUBREG)
1789    {
1790      if (GET_CODE (SUBREG_REG (x)) != REG)
1791	abort ();
1792      invalidate (SUBREG_REG (x), VOIDmode);
1793      return;
1794    }
1795
1796  /* If X is a parallel, invalidate all of its elements.  */
1797
1798  if (GET_CODE (x) == PARALLEL)
1799    {
1800      for (i = XVECLEN (x, 0) - 1; i >= 0 ; --i)
1801	invalidate (XVECEXP (x, 0, i), VOIDmode);
1802      return;
1803    }
1804
1805  /* If X is an expr_list, this is part of a disjoint return value;
1806     extract the location in question ignoring the offset.  */
1807
1808  if (GET_CODE (x) == EXPR_LIST)
1809    {
1810      invalidate (XEXP (x, 0), VOIDmode);
1811      return;
1812    }
1813
1814  /* X is not a register; it must be a memory reference with
1815     a nonvarying address.  Remove all hash table elements
1816     that refer to overlapping pieces of memory.  */
1817
1818  if (GET_CODE (x) != MEM)
1819    abort ();
1820
1821  if (full_mode == VOIDmode)
1822    full_mode = GET_MODE (x);
1823
1824  for (i = 0; i < NBUCKETS; i++)
1825    {
1826      register struct table_elt *next;
1827      for (p = table[i]; p; p = next)
1828	{
1829	  next = p->next_same_hash;
1830	  /* Invalidate ASM_OPERANDS which reference memory (this is easier
1831	     than checking all the aliases).  */
1832	  if (p->in_memory
1833	      && (GET_CODE (p->exp) != MEM
1834		  || true_dependence (x, full_mode, p->exp, cse_rtx_varies_p)))
1835	    remove_from_table (p, i);
1836	}
1837    }
1838}
1839
1840/* Remove all expressions that refer to register REGNO,
1841   since they are already invalid, and we are about to
1842   mark that register valid again and don't want the old
1843   expressions to reappear as valid.  */
1844
1845static void
1846remove_invalid_refs (regno)
1847     int regno;
1848{
1849  register int i;
1850  register struct table_elt *p, *next;
1851
1852  for (i = 0; i < NBUCKETS; i++)
1853    for (p = table[i]; p; p = next)
1854      {
1855	next = p->next_same_hash;
1856	if (GET_CODE (p->exp) != REG
1857	    && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1858	  remove_from_table (p, i);
1859      }
1860}
1861
1862/* Likewise for a subreg with subreg_reg WORD and mode MODE.  */
1863static void
1864remove_invalid_subreg_refs (regno, word, mode)
1865     int regno;
1866     int word;
1867     enum machine_mode mode;
1868{
1869  register int i;
1870  register struct table_elt *p, *next;
1871  int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
1872
1873  for (i = 0; i < NBUCKETS; i++)
1874    for (p = table[i]; p; p = next)
1875      {
1876	rtx exp;
1877	next = p->next_same_hash;
1878
1879	exp = p->exp;
1880	if (GET_CODE (p->exp) != REG
1881	    && (GET_CODE (exp) != SUBREG
1882		|| GET_CODE (SUBREG_REG (exp)) != REG
1883		|| REGNO (SUBREG_REG (exp)) != regno
1884		|| (((SUBREG_WORD (exp)
1885		      + (GET_MODE_SIZE (GET_MODE (exp)) - 1) / UNITS_PER_WORD)
1886		     >= word)
1887		 && SUBREG_WORD (exp) <= end))
1888	    && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1889	  remove_from_table (p, i);
1890      }
1891}
1892
1893/* Recompute the hash codes of any valid entries in the hash table that
1894   reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1895
1896   This is called when we make a jump equivalence.  */
1897
1898static void
1899rehash_using_reg (x)
1900     rtx x;
1901{
1902  unsigned int i;
1903  struct table_elt *p, *next;
1904  unsigned hash;
1905
1906  if (GET_CODE (x) == SUBREG)
1907    x = SUBREG_REG (x);
1908
1909  /* If X is not a register or if the register is known not to be in any
1910     valid entries in the table, we have no work to do.  */
1911
1912  if (GET_CODE (x) != REG
1913      || REG_IN_TABLE (REGNO (x)) < 0
1914      || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1915    return;
1916
1917  /* Scan all hash chains looking for valid entries that mention X.
1918     If we find one and it is in the wrong hash chain, move it.  We can skip
1919     objects that are registers, since they are handled specially.  */
1920
1921  for (i = 0; i < NBUCKETS; i++)
1922    for (p = table[i]; p; p = next)
1923      {
1924	next = p->next_same_hash;
1925	if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1926	    && exp_equiv_p (p->exp, p->exp, 1, 0)
1927	    && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1928	  {
1929	    if (p->next_same_hash)
1930	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
1931
1932	    if (p->prev_same_hash)
1933	      p->prev_same_hash->next_same_hash = p->next_same_hash;
1934	    else
1935	      table[i] = p->next_same_hash;
1936
1937	    p->next_same_hash = table[hash];
1938	    p->prev_same_hash = 0;
1939	    if (table[hash])
1940	      table[hash]->prev_same_hash = p;
1941	    table[hash] = p;
1942	  }
1943      }
1944}
1945
1946/* Remove from the hash table any expression that is a call-clobbered
1947   register.  Also update their TICK values.  */
1948
1949static void
1950invalidate_for_call ()
1951{
1952  int regno, endregno;
1953  int i;
1954  unsigned hash;
1955  struct table_elt *p, *next;
1956  int in_table = 0;
1957
1958  /* Go through all the hard registers.  For each that is clobbered in
1959     a CALL_INSN, remove the register from quantity chains and update
1960     reg_tick if defined.  Also see if any of these registers is currently
1961     in the table.  */
1962
1963  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1964    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1965      {
1966	delete_reg_equiv (regno);
1967	if (REG_TICK (regno) >= 0)
1968	  REG_TICK (regno)++;
1969
1970	in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1971      }
1972
1973  /* In the case where we have no call-clobbered hard registers in the
1974     table, we are done.  Otherwise, scan the table and remove any
1975     entry that overlaps a call-clobbered register.  */
1976
1977  if (in_table)
1978    for (hash = 0; hash < NBUCKETS; hash++)
1979      for (p = table[hash]; p; p = next)
1980	{
1981	  next = p->next_same_hash;
1982
1983	  if (p->in_memory)
1984	    {
1985	      remove_from_table (p, hash);
1986	      continue;
1987	    }
1988
1989	  if (GET_CODE (p->exp) != REG
1990	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1991	    continue;
1992
1993	  regno = REGNO (p->exp);
1994	  endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1995
1996	  for (i = regno; i < endregno; i++)
1997	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1998	      {
1999		remove_from_table (p, hash);
2000		break;
2001	      }
2002	}
2003}
2004
2005/* Given an expression X of type CONST,
2006   and ELT which is its table entry (or 0 if it
2007   is not in the hash table),
2008   return an alternate expression for X as a register plus integer.
2009   If none can be found, return 0.  */
2010
2011static rtx
2012use_related_value (x, elt)
2013     rtx x;
2014     struct table_elt *elt;
2015{
2016  register struct table_elt *relt = 0;
2017  register struct table_elt *p, *q;
2018  HOST_WIDE_INT offset;
2019
2020  /* First, is there anything related known?
2021     If we have a table element, we can tell from that.
2022     Otherwise, must look it up.  */
2023
2024  if (elt != 0 && elt->related_value != 0)
2025    relt = elt;
2026  else if (elt == 0 && GET_CODE (x) == CONST)
2027    {
2028      rtx subexp = get_related_value (x);
2029      if (subexp != 0)
2030	relt = lookup (subexp,
2031		       safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
2032		       GET_MODE (subexp));
2033    }
2034
2035  if (relt == 0)
2036    return 0;
2037
2038  /* Search all related table entries for one that has an
2039     equivalent register.  */
2040
2041  p = relt;
2042  while (1)
2043    {
2044      /* This loop is strange in that it is executed in two different cases.
2045	 The first is when X is already in the table.  Then it is searching
2046	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2047	 X is not in the table.  Then RELT points to a class for the related
2048	 value.
2049
2050	 Ensure that, whatever case we are in, that we ignore classes that have
2051	 the same value as X.  */
2052
2053      if (rtx_equal_p (x, p->exp))
2054	q = 0;
2055      else
2056	for (q = p->first_same_value; q; q = q->next_same_value)
2057	  if (GET_CODE (q->exp) == REG)
2058	    break;
2059
2060      if (q)
2061	break;
2062
2063      p = p->related_value;
2064
2065      /* We went all the way around, so there is nothing to be found.
2066	 Alternatively, perhaps RELT was in the table for some other reason
2067	 and it has no related values recorded.  */
2068      if (p == relt || p == 0)
2069	break;
2070    }
2071
2072  if (q == 0)
2073    return 0;
2074
2075  offset = (get_integer_term (x) - get_integer_term (p->exp));
2076  /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2077  return plus_constant (q->exp, offset);
2078}
2079
2080/* Hash an rtx.  We are careful to make sure the value is never negative.
2081   Equivalent registers hash identically.
2082   MODE is used in hashing for CONST_INTs only;
2083   otherwise the mode of X is used.
2084
2085   Store 1 in do_not_record if any subexpression is volatile.
2086
2087   Store 1 in hash_arg_in_memory if X contains a MEM rtx
2088   which does not have the RTX_UNCHANGING_P bit set.
2089   In this case, also store 1 in hash_arg_in_struct
2090   if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
2091
2092   Note that cse_insn knows that the hash code of a MEM expression
2093   is just (int) MEM plus the hash code of the address.  */
2094
2095static unsigned
2096canon_hash (x, mode)
2097     rtx x;
2098     enum machine_mode mode;
2099{
2100  register int i, j;
2101  register unsigned hash = 0;
2102  register enum rtx_code code;
2103  register char *fmt;
2104
2105  /* repeat is used to turn tail-recursion into iteration.  */
2106 repeat:
2107  if (x == 0)
2108    return hash;
2109
2110  code = GET_CODE (x);
2111  switch (code)
2112    {
2113    case REG:
2114      {
2115	register int regno = REGNO (x);
2116
2117	/* On some machines, we can't record any non-fixed hard register,
2118	   because extending its life will cause reload problems.  We
2119	   consider ap, fp, and sp to be fixed for this purpose.
2120
2121	   We also consider CCmode registers to be fixed for this purpose;
2122	   failure to do so leads to failure to simplify 0<100 type of
2123	   conditionals.
2124
2125	   On all machines, we can't record any global registers.  */
2126
2127	if (regno < FIRST_PSEUDO_REGISTER
2128	    && (global_regs[regno]
2129		|| (SMALL_REGISTER_CLASSES
2130		    && ! fixed_regs[regno]
2131		    && regno != FRAME_POINTER_REGNUM
2132		    && regno != HARD_FRAME_POINTER_REGNUM
2133		    && regno != ARG_POINTER_REGNUM
2134		    && regno != STACK_POINTER_REGNUM
2135		    && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
2136	  {
2137	    do_not_record = 1;
2138	    return 0;
2139	  }
2140	hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2141	return hash;
2142      }
2143
2144    /* We handle SUBREG of a REG specially because the underlying
2145       reg changes its hash value with every value change; we don't
2146       want to have to forget unrelated subregs when one subreg changes.  */
2147    case SUBREG:
2148      {
2149	if (GET_CODE (SUBREG_REG (x)) == REG)
2150	  {
2151	    hash += (((unsigned) SUBREG << 7)
2152		     + REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2153	    return hash;
2154	  }
2155	break;
2156      }
2157
2158    case CONST_INT:
2159      {
2160	unsigned HOST_WIDE_INT tem = INTVAL (x);
2161	hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2162	return hash;
2163      }
2164
2165    case CONST_DOUBLE:
2166      /* This is like the general case, except that it only counts
2167	 the integers representing the constant.  */
2168      hash += (unsigned) code + (unsigned) GET_MODE (x);
2169      if (GET_MODE (x) != VOIDmode)
2170	for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2171	  {
2172	    unsigned tem = XINT (x, i);
2173	    hash += tem;
2174	  }
2175      else
2176	hash += ((unsigned) CONST_DOUBLE_LOW (x)
2177		 + (unsigned) CONST_DOUBLE_HIGH (x));
2178      return hash;
2179
2180      /* Assume there is only one rtx object for any given label.  */
2181    case LABEL_REF:
2182      hash
2183	+= ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2184      return hash;
2185
2186    case SYMBOL_REF:
2187      hash
2188	+= ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2189      return hash;
2190
2191    case MEM:
2192      if (MEM_VOLATILE_P (x))
2193	{
2194	  do_not_record = 1;
2195	  return 0;
2196	}
2197      if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2198	{
2199	  hash_arg_in_memory = 1;
2200	  if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
2201	}
2202      /* Now that we have already found this special case,
2203	 might as well speed it up as much as possible.  */
2204      hash += (unsigned) MEM;
2205      x = XEXP (x, 0);
2206      goto repeat;
2207
2208    case PRE_DEC:
2209    case PRE_INC:
2210    case POST_DEC:
2211    case POST_INC:
2212    case PC:
2213    case CC0:
2214    case CALL:
2215    case UNSPEC_VOLATILE:
2216      do_not_record = 1;
2217      return 0;
2218
2219    case ASM_OPERANDS:
2220      if (MEM_VOLATILE_P (x))
2221	{
2222	  do_not_record = 1;
2223	  return 0;
2224	}
2225      break;
2226
2227    default:
2228      break;
2229    }
2230
2231  i = GET_RTX_LENGTH (code) - 1;
2232  hash += (unsigned) code + (unsigned) GET_MODE (x);
2233  fmt = GET_RTX_FORMAT (code);
2234  for (; i >= 0; i--)
2235    {
2236      if (fmt[i] == 'e')
2237	{
2238	  rtx tem = XEXP (x, i);
2239
2240	  /* If we are about to do the last recursive call
2241	     needed at this level, change it into iteration.
2242	     This function  is called enough to be worth it.  */
2243	  if (i == 0)
2244	    {
2245	      x = tem;
2246	      goto repeat;
2247	    }
2248	  hash += canon_hash (tem, 0);
2249	}
2250      else if (fmt[i] == 'E')
2251	for (j = 0; j < XVECLEN (x, i); j++)
2252	  hash += canon_hash (XVECEXP (x, i, j), 0);
2253      else if (fmt[i] == 's')
2254	{
2255	  register unsigned char *p = (unsigned char *) XSTR (x, i);
2256	  if (p)
2257	    while (*p)
2258	      hash += *p++;
2259	}
2260      else if (fmt[i] == 'i')
2261	{
2262	  register unsigned tem = XINT (x, i);
2263	  hash += tem;
2264	}
2265      else if (fmt[i] == '0')
2266	/* unused */;
2267      else
2268	abort ();
2269    }
2270  return hash;
2271}
2272
2273/* Like canon_hash but with no side effects.  */
2274
2275static unsigned
2276safe_hash (x, mode)
2277     rtx x;
2278     enum machine_mode mode;
2279{
2280  int save_do_not_record = do_not_record;
2281  int save_hash_arg_in_memory = hash_arg_in_memory;
2282  int save_hash_arg_in_struct = hash_arg_in_struct;
2283  unsigned hash = canon_hash (x, mode);
2284  hash_arg_in_memory = save_hash_arg_in_memory;
2285  hash_arg_in_struct = save_hash_arg_in_struct;
2286  do_not_record = save_do_not_record;
2287  return hash;
2288}
2289
2290/* Return 1 iff X and Y would canonicalize into the same thing,
2291   without actually constructing the canonicalization of either one.
2292   If VALIDATE is nonzero,
2293   we assume X is an expression being processed from the rtl
2294   and Y was found in the hash table.  We check register refs
2295   in Y for being marked as valid.
2296
2297   If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2298   that is known to be in the register.  Ordinarily, we don't allow them
2299   to match, because letting them match would cause unpredictable results
2300   in all the places that search a hash table chain for an equivalent
2301   for a given value.  A possible equivalent that has different structure
2302   has its hash code computed from different data.  Whether the hash code
2303   is the same as that of the given value is pure luck.  */
2304
2305static int
2306exp_equiv_p (x, y, validate, equal_values)
2307     rtx x, y;
2308     int validate;
2309     int equal_values;
2310{
2311  register int i, j;
2312  register enum rtx_code code;
2313  register char *fmt;
2314
2315  /* Note: it is incorrect to assume an expression is equivalent to itself
2316     if VALIDATE is nonzero.  */
2317  if (x == y && !validate)
2318    return 1;
2319  if (x == 0 || y == 0)
2320    return x == y;
2321
2322  code = GET_CODE (x);
2323  if (code != GET_CODE (y))
2324    {
2325      if (!equal_values)
2326	return 0;
2327
2328      /* If X is a constant and Y is a register or vice versa, they may be
2329	 equivalent.  We only have to validate if Y is a register.  */
2330      if (CONSTANT_P (x) && GET_CODE (y) == REG
2331	  && REGNO_QTY_VALID_P (REGNO (y))
2332	  && GET_MODE (y) == qty_mode[REG_QTY (REGNO (y))]
2333	  && rtx_equal_p (x, qty_const[REG_QTY (REGNO (y))])
2334	  && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2335	return 1;
2336
2337      if (CONSTANT_P (y) && code == REG
2338	  && REGNO_QTY_VALID_P (REGNO (x))
2339	  && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))]
2340	  && rtx_equal_p (y, qty_const[REG_QTY (REGNO (x))]))
2341	return 1;
2342
2343      return 0;
2344    }
2345
2346  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2347  if (GET_MODE (x) != GET_MODE (y))
2348    return 0;
2349
2350  switch (code)
2351    {
2352    case PC:
2353    case CC0:
2354      return x == y;
2355
2356    case CONST_INT:
2357      return INTVAL (x) == INTVAL (y);
2358
2359    case LABEL_REF:
2360      return XEXP (x, 0) == XEXP (y, 0);
2361
2362    case SYMBOL_REF:
2363      return XSTR (x, 0) == XSTR (y, 0);
2364
2365    case REG:
2366      {
2367	int regno = REGNO (y);
2368	int endregno
2369	  = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2370		     : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2371	int i;
2372
2373	/* If the quantities are not the same, the expressions are not
2374	   equivalent.  If there are and we are not to validate, they
2375	   are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2376
2377	if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2378	  return 0;
2379
2380	if (! validate)
2381	  return 1;
2382
2383	for (i = regno; i < endregno; i++)
2384	  if (REG_IN_TABLE (i) != REG_TICK (i))
2385	    return 0;
2386
2387	return 1;
2388      }
2389
2390    /*  For commutative operations, check both orders.  */
2391    case PLUS:
2392    case MULT:
2393    case AND:
2394    case IOR:
2395    case XOR:
2396    case NE:
2397    case EQ:
2398      return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2399	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2400			       validate, equal_values))
2401	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2402			       validate, equal_values)
2403		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2404				  validate, equal_values)));
2405
2406    default:
2407      break;
2408    }
2409
2410  /* Compare the elements.  If any pair of corresponding elements
2411     fail to match, return 0 for the whole things.  */
2412
2413  fmt = GET_RTX_FORMAT (code);
2414  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2415    {
2416      switch (fmt[i])
2417	{
2418	case 'e':
2419	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2420	    return 0;
2421	  break;
2422
2423	case 'E':
2424	  if (XVECLEN (x, i) != XVECLEN (y, i))
2425	    return 0;
2426	  for (j = 0; j < XVECLEN (x, i); j++)
2427	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2428			       validate, equal_values))
2429	      return 0;
2430	  break;
2431
2432	case 's':
2433	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2434	    return 0;
2435	  break;
2436
2437	case 'i':
2438	  if (XINT (x, i) != XINT (y, i))
2439	    return 0;
2440	  break;
2441
2442	case 'w':
2443	  if (XWINT (x, i) != XWINT (y, i))
2444	    return 0;
2445	break;
2446
2447	case '0':
2448	  break;
2449
2450	default:
2451	  abort ();
2452	}
2453      }
2454
2455  return 1;
2456}
2457
2458/* Return 1 iff any subexpression of X matches Y.
2459   Here we do not require that X or Y be valid (for registers referred to)
2460   for being in the hash table.  */
2461
2462static int
2463refers_to_p (x, y)
2464     rtx x, y;
2465{
2466  register int i;
2467  register enum rtx_code code;
2468  register char *fmt;
2469
2470 repeat:
2471  if (x == y)
2472    return 1;
2473  if (x == 0 || y == 0)
2474    return 0;
2475
2476  code = GET_CODE (x);
2477  /* If X as a whole has the same code as Y, they may match.
2478     If so, return 1.  */
2479  if (code == GET_CODE (y))
2480    {
2481      if (exp_equiv_p (x, y, 0, 1))
2482	return 1;
2483    }
2484
2485  /* X does not match, so try its subexpressions.  */
2486
2487  fmt = GET_RTX_FORMAT (code);
2488  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2489    if (fmt[i] == 'e')
2490      {
2491	if (i == 0)
2492	  {
2493	    x = XEXP (x, 0);
2494	    goto repeat;
2495	  }
2496	else
2497	  if (refers_to_p (XEXP (x, i), y))
2498	    return 1;
2499      }
2500    else if (fmt[i] == 'E')
2501      {
2502	int j;
2503	for (j = 0; j < XVECLEN (x, i); j++)
2504	  if (refers_to_p (XVECEXP (x, i, j), y))
2505	    return 1;
2506      }
2507
2508  return 0;
2509}
2510
2511/* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2512   set PBASE, PSTART, and PEND which correspond to the base of the address,
2513   the starting offset, and ending offset respectively.
2514
2515   ADDR is known to be a nonvarying address.  */
2516
2517/* ??? Despite what the comments say, this function is in fact frequently
2518   passed varying addresses.  This does not appear to cause any problems.  */
2519
2520static void
2521set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2522     rtx addr;
2523     int size;
2524     rtx *pbase;
2525     HOST_WIDE_INT *pstart, *pend;
2526{
2527  rtx base;
2528  HOST_WIDE_INT start, end;
2529
2530  base = addr;
2531  start = 0;
2532  end = 0;
2533
2534  if (flag_pic && GET_CODE (base) == PLUS
2535      && XEXP (base, 0) == pic_offset_table_rtx)
2536    base = XEXP (base, 1);
2537
2538  /* Registers with nonvarying addresses usually have constant equivalents;
2539     but the frame pointer register is also possible.  */
2540  if (GET_CODE (base) == REG
2541      && qty_const != 0
2542      && REGNO_QTY_VALID_P (REGNO (base))
2543      && qty_mode[REG_QTY (REGNO (base))] == GET_MODE (base)
2544      && qty_const[REG_QTY (REGNO (base))] != 0)
2545    base = qty_const[REG_QTY (REGNO (base))];
2546  else if (GET_CODE (base) == PLUS
2547	   && GET_CODE (XEXP (base, 1)) == CONST_INT
2548	   && GET_CODE (XEXP (base, 0)) == REG
2549	   && qty_const != 0
2550	   && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2551	   && (qty_mode[REG_QTY (REGNO (XEXP (base, 0)))]
2552	       == GET_MODE (XEXP (base, 0)))
2553	   && qty_const[REG_QTY (REGNO (XEXP (base, 0)))])
2554    {
2555      start = INTVAL (XEXP (base, 1));
2556      base = qty_const[REG_QTY (REGNO (XEXP (base, 0)))];
2557    }
2558  /* This can happen as the result of virtual register instantiation,
2559     if the initial offset is too large to be a valid address.  */
2560  else if (GET_CODE (base) == PLUS
2561	   && GET_CODE (XEXP (base, 0)) == REG
2562	   && GET_CODE (XEXP (base, 1)) == REG
2563	   && qty_const != 0
2564	   && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2565	   && (qty_mode[REG_QTY (REGNO (XEXP (base, 0)))]
2566	       == GET_MODE (XEXP (base, 0)))
2567	   && qty_const[REG_QTY (REGNO (XEXP (base, 0)))]
2568	   && REGNO_QTY_VALID_P (REGNO (XEXP (base, 1)))
2569	   && (qty_mode[REG_QTY (REGNO (XEXP (base, 1)))]
2570	       == GET_MODE (XEXP (base, 1)))
2571	   && qty_const[REG_QTY (REGNO (XEXP (base, 1)))])
2572    {
2573      rtx tem = qty_const[REG_QTY (REGNO (XEXP (base, 1)))];
2574      base = qty_const[REG_QTY (REGNO (XEXP (base, 0)))];
2575
2576      /* One of the two values must be a constant.  */
2577      if (GET_CODE (base) != CONST_INT)
2578	{
2579	  if (GET_CODE (tem) != CONST_INT)
2580	    abort ();
2581	  start = INTVAL (tem);
2582	}
2583      else
2584	{
2585	  start = INTVAL (base);
2586	  base = tem;
2587	}
2588    }
2589
2590  /* Handle everything that we can find inside an address that has been
2591     viewed as constant.  */
2592
2593  while (1)
2594    {
2595      /* If no part of this switch does a "continue", the code outside
2596	 will exit this loop.  */
2597
2598      switch (GET_CODE (base))
2599	{
2600	case LO_SUM:
2601	  /* By definition, operand1 of a LO_SUM is the associated constant
2602	     address.  Use the associated constant address as the base
2603	     instead.  */
2604	  base = XEXP (base, 1);
2605	  continue;
2606
2607	case CONST:
2608	  /* Strip off CONST.  */
2609	  base = XEXP (base, 0);
2610	  continue;
2611
2612	case PLUS:
2613	  if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2614	    {
2615	      start += INTVAL (XEXP (base, 1));
2616	      base = XEXP (base, 0);
2617	      continue;
2618	    }
2619	  break;
2620
2621	case AND:
2622	  /* Handle the case of an AND which is the negative of a power of
2623	     two.  This is used to represent unaligned memory operations.  */
2624	  if (GET_CODE (XEXP (base, 1)) == CONST_INT
2625	      && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2626	    {
2627	      set_nonvarying_address_components (XEXP (base, 0), size,
2628						 pbase, pstart, pend);
2629
2630	      /* Assume the worst misalignment.  START is affected, but not
2631		 END, so compensate but adjusting SIZE.  Don't lose any
2632		 constant we already had.  */
2633
2634	      size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
2635	      start += *pstart + INTVAL (XEXP (base, 1)) + 1;
2636	      end += *pend;
2637	      base = *pbase;
2638	    }
2639	  break;
2640
2641	default:
2642	  break;
2643	}
2644
2645      break;
2646    }
2647
2648  if (GET_CODE (base) == CONST_INT)
2649    {
2650      start += INTVAL (base);
2651      base = const0_rtx;
2652    }
2653
2654  end = start + size;
2655
2656  /* Set the return values.  */
2657  *pbase = base;
2658  *pstart = start;
2659  *pend = end;
2660}
2661
2662/* Return 1 if X has a value that can vary even between two
2663   executions of the program.  0 means X can be compared reliably
2664   against certain constants or near-constants.  */
2665
2666static int
2667cse_rtx_varies_p (x)
2668     register rtx x;
2669{
2670  /* We need not check for X and the equivalence class being of the same
2671     mode because if X is equivalent to a constant in some mode, it
2672     doesn't vary in any mode.  */
2673
2674  if (GET_CODE (x) == REG
2675      && REGNO_QTY_VALID_P (REGNO (x))
2676      && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))]
2677      && qty_const[REG_QTY (REGNO (x))] != 0)
2678    return 0;
2679
2680  if (GET_CODE (x) == PLUS
2681      && GET_CODE (XEXP (x, 1)) == CONST_INT
2682      && GET_CODE (XEXP (x, 0)) == REG
2683      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2684      && (GET_MODE (XEXP (x, 0))
2685	  == qty_mode[REG_QTY (REGNO (XEXP (x, 0)))])
2686      && qty_const[REG_QTY (REGNO (XEXP (x, 0)))])
2687    return 0;
2688
2689  /* This can happen as the result of virtual register instantiation, if
2690     the initial constant is too large to be a valid address.  This gives
2691     us a three instruction sequence, load large offset into a register,
2692     load fp minus a constant into a register, then a MEM which is the
2693     sum of the two `constant' registers.  */
2694  if (GET_CODE (x) == PLUS
2695      && GET_CODE (XEXP (x, 0)) == REG
2696      && GET_CODE (XEXP (x, 1)) == REG
2697      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2698      && (GET_MODE (XEXP (x, 0))
2699	  == qty_mode[REG_QTY (REGNO (XEXP (x, 0)))])
2700      && qty_const[REG_QTY (REGNO (XEXP (x, 0)))]
2701      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1)))
2702      && (GET_MODE (XEXP (x, 1))
2703	  == qty_mode[REG_QTY (REGNO (XEXP (x, 1)))])
2704      && qty_const[REG_QTY (REGNO (XEXP (x, 1)))])
2705    return 0;
2706
2707  return rtx_varies_p (x);
2708}
2709
2710/* Canonicalize an expression:
2711   replace each register reference inside it
2712   with the "oldest" equivalent register.
2713
2714   If INSN is non-zero and we are replacing a pseudo with a hard register
2715   or vice versa, validate_change is used to ensure that INSN remains valid
2716   after we make our substitution.  The calls are made with IN_GROUP non-zero
2717   so apply_change_group must be called upon the outermost return from this
2718   function (unless INSN is zero).  The result of apply_change_group can
2719   generally be discarded since the changes we are making are optional.  */
2720
2721static rtx
2722canon_reg (x, insn)
2723     rtx x;
2724     rtx insn;
2725{
2726  register int i;
2727  register enum rtx_code code;
2728  register char *fmt;
2729
2730  if (x == 0)
2731    return x;
2732
2733  code = GET_CODE (x);
2734  switch (code)
2735    {
2736    case PC:
2737    case CC0:
2738    case CONST:
2739    case CONST_INT:
2740    case CONST_DOUBLE:
2741    case SYMBOL_REF:
2742    case LABEL_REF:
2743    case ADDR_VEC:
2744    case ADDR_DIFF_VEC:
2745      return x;
2746
2747    case REG:
2748      {
2749	register int first;
2750
2751	/* Never replace a hard reg, because hard regs can appear
2752	   in more than one machine mode, and we must preserve the mode
2753	   of each occurrence.  Also, some hard regs appear in
2754	   MEMs that are shared and mustn't be altered.  Don't try to
2755	   replace any reg that maps to a reg of class NO_REGS.  */
2756	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2757	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2758	  return x;
2759
2760	first = qty_first_reg[REG_QTY (REGNO (x))];
2761	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2762		: REGNO_REG_CLASS (first) == NO_REGS ? x
2763		: gen_rtx_REG (qty_mode[REG_QTY (REGNO (x))], first));
2764      }
2765
2766    default:
2767      break;
2768    }
2769
2770  fmt = GET_RTX_FORMAT (code);
2771  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2772    {
2773      register int j;
2774
2775      if (fmt[i] == 'e')
2776	{
2777	  rtx new = canon_reg (XEXP (x, i), insn);
2778	  int insn_code;
2779
2780	  /* If replacing pseudo with hard reg or vice versa, ensure the
2781	     insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2782	  if (insn != 0 && new != 0
2783	      && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2784	      && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2785		   != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2786		  || (insn_code = recog_memoized (insn)) < 0
2787		  || insn_n_dups[insn_code] > 0))
2788	    validate_change (insn, &XEXP (x, i), new, 1);
2789	  else
2790	    XEXP (x, i) = new;
2791	}
2792      else if (fmt[i] == 'E')
2793	for (j = 0; j < XVECLEN (x, i); j++)
2794	  XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2795    }
2796
2797  return x;
2798}
2799
2800/* LOC is a location within INSN that is an operand address (the contents of
2801   a MEM).  Find the best equivalent address to use that is valid for this
2802   insn.
2803
2804   On most CISC machines, complicated address modes are costly, and rtx_cost
2805   is a good approximation for that cost.  However, most RISC machines have
2806   only a few (usually only one) memory reference formats.  If an address is
2807   valid at all, it is often just as cheap as any other address.  Hence, for
2808   RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2809   costs of various addresses.  For two addresses of equal cost, choose the one
2810   with the highest `rtx_cost' value as that has the potential of eliminating
2811   the most insns.  For equal costs, we choose the first in the equivalence
2812   class.  Note that we ignore the fact that pseudo registers are cheaper
2813   than hard registers here because we would also prefer the pseudo registers.
2814  */
2815
2816static void
2817find_best_addr (insn, loc)
2818     rtx insn;
2819     rtx *loc;
2820{
2821  struct table_elt *elt;
2822  rtx addr = *loc;
2823#ifdef ADDRESS_COST
2824  struct table_elt *p;
2825  int found_better = 1;
2826#endif
2827  int save_do_not_record = do_not_record;
2828  int save_hash_arg_in_memory = hash_arg_in_memory;
2829  int save_hash_arg_in_struct = hash_arg_in_struct;
2830  int addr_volatile;
2831  int regno;
2832  unsigned hash;
2833
2834  /* Do not try to replace constant addresses or addresses of local and
2835     argument slots.  These MEM expressions are made only once and inserted
2836     in many instructions, as well as being used to control symbol table
2837     output.  It is not safe to clobber them.
2838
2839     There are some uncommon cases where the address is already in a register
2840     for some reason, but we cannot take advantage of that because we have
2841     no easy way to unshare the MEM.  In addition, looking up all stack
2842     addresses is costly.  */
2843  if ((GET_CODE (addr) == PLUS
2844       && GET_CODE (XEXP (addr, 0)) == REG
2845       && GET_CODE (XEXP (addr, 1)) == CONST_INT
2846       && (regno = REGNO (XEXP (addr, 0)),
2847	   regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2848	   || regno == ARG_POINTER_REGNUM))
2849      || (GET_CODE (addr) == REG
2850	  && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2851	      || regno == HARD_FRAME_POINTER_REGNUM
2852	      || regno == ARG_POINTER_REGNUM))
2853      || GET_CODE (addr) == ADDRESSOF
2854      || CONSTANT_ADDRESS_P (addr))
2855    return;
2856
2857  /* If this address is not simply a register, try to fold it.  This will
2858     sometimes simplify the expression.  Many simplifications
2859     will not be valid, but some, usually applying the associative rule, will
2860     be valid and produce better code.  */
2861  if (GET_CODE (addr) != REG)
2862    {
2863      rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2864
2865      if (1
2866#ifdef ADDRESS_COST
2867	  && (CSE_ADDRESS_COST (folded) < CSE_ADDRESS_COST (addr)
2868	      || (CSE_ADDRESS_COST (folded) == CSE_ADDRESS_COST (addr)
2869		  && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
2870#else
2871	  && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
2872#endif
2873	  && validate_change (insn, loc, folded, 0))
2874	addr = folded;
2875    }
2876
2877  /* If this address is not in the hash table, we can't look for equivalences
2878     of the whole address.  Also, ignore if volatile.  */
2879
2880  do_not_record = 0;
2881  hash = HASH (addr, Pmode);
2882  addr_volatile = do_not_record;
2883  do_not_record = save_do_not_record;
2884  hash_arg_in_memory = save_hash_arg_in_memory;
2885  hash_arg_in_struct = save_hash_arg_in_struct;
2886
2887  if (addr_volatile)
2888    return;
2889
2890  elt = lookup (addr, hash, Pmode);
2891
2892#ifndef ADDRESS_COST
2893  if (elt)
2894    {
2895      int our_cost = elt->cost;
2896
2897      /* Find the lowest cost below ours that works.  */
2898      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2899	if (elt->cost < our_cost
2900	    && (GET_CODE (elt->exp) == REG
2901		|| exp_equiv_p (elt->exp, elt->exp, 1, 0))
2902	    && validate_change (insn, loc,
2903				canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2904	  return;
2905    }
2906#else
2907
2908  if (elt)
2909    {
2910      /* We need to find the best (under the criteria documented above) entry
2911	 in the class that is valid.  We use the `flag' field to indicate
2912	 choices that were invalid and iterate until we can't find a better
2913	 one that hasn't already been tried.  */
2914
2915      for (p = elt->first_same_value; p; p = p->next_same_value)
2916	p->flag = 0;
2917
2918      while (found_better)
2919	{
2920	  int best_addr_cost = CSE_ADDRESS_COST (*loc);
2921	  int best_rtx_cost = (elt->cost + 1) >> 1;
2922	  struct table_elt *best_elt = elt;
2923
2924	  found_better = 0;
2925	  for (p = elt->first_same_value; p; p = p->next_same_value)
2926	    if (! p->flag)
2927	      {
2928		if ((GET_CODE (p->exp) == REG
2929		     || exp_equiv_p (p->exp, p->exp, 1, 0))
2930		    && (CSE_ADDRESS_COST (p->exp) < best_addr_cost
2931			|| (CSE_ADDRESS_COST (p->exp) == best_addr_cost
2932			    && (p->cost + 1) >> 1 > best_rtx_cost)))
2933		  {
2934		    found_better = 1;
2935		    best_addr_cost = CSE_ADDRESS_COST (p->exp);
2936		    best_rtx_cost = (p->cost + 1) >> 1;
2937		    best_elt = p;
2938		  }
2939	      }
2940
2941	  if (found_better)
2942	    {
2943	      if (validate_change (insn, loc,
2944				   canon_reg (copy_rtx (best_elt->exp),
2945					      NULL_RTX), 0))
2946		return;
2947	      else
2948		best_elt->flag = 1;
2949	    }
2950	}
2951    }
2952
2953  /* If the address is a binary operation with the first operand a register
2954     and the second a constant, do the same as above, but looking for
2955     equivalences of the register.  Then try to simplify before checking for
2956     the best address to use.  This catches a few cases:  First is when we
2957     have REG+const and the register is another REG+const.  We can often merge
2958     the constants and eliminate one insn and one register.  It may also be
2959     that a machine has a cheap REG+REG+const.  Finally, this improves the
2960     code on the Alpha for unaligned byte stores.  */
2961
2962  if (flag_expensive_optimizations
2963      && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2964	  || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2965      && GET_CODE (XEXP (*loc, 0)) == REG
2966      && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2967    {
2968      rtx c = XEXP (*loc, 1);
2969
2970      do_not_record = 0;
2971      hash = HASH (XEXP (*loc, 0), Pmode);
2972      do_not_record = save_do_not_record;
2973      hash_arg_in_memory = save_hash_arg_in_memory;
2974      hash_arg_in_struct = save_hash_arg_in_struct;
2975
2976      elt = lookup (XEXP (*loc, 0), hash, Pmode);
2977      if (elt == 0)
2978	return;
2979
2980      /* We need to find the best (under the criteria documented above) entry
2981	 in the class that is valid.  We use the `flag' field to indicate
2982	 choices that were invalid and iterate until we can't find a better
2983	 one that hasn't already been tried.  */
2984
2985      for (p = elt->first_same_value; p; p = p->next_same_value)
2986	p->flag = 0;
2987
2988      while (found_better)
2989	{
2990	  int best_addr_cost = CSE_ADDRESS_COST (*loc);
2991	  int best_rtx_cost = (COST (*loc) + 1) >> 1;
2992	  struct table_elt *best_elt = elt;
2993	  rtx best_rtx = *loc;
2994	  int count;
2995
2996	  /* This is at worst case an O(n^2) algorithm, so limit our search
2997	     to the first 32 elements on the list.  This avoids trouble
2998	     compiling code with very long basic blocks that can easily
2999	     call cse_gen_binary so many times that we run out of memory.  */
3000
3001	  found_better = 0;
3002	  for (p = elt->first_same_value, count = 0;
3003	       p && count < 32;
3004	       p = p->next_same_value, count++)
3005	    if (! p->flag
3006		&& (GET_CODE (p->exp) == REG
3007		    || exp_equiv_p (p->exp, p->exp, 1, 0)))
3008	      {
3009		rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
3010
3011		if ((CSE_ADDRESS_COST (new) < best_addr_cost
3012		    || (CSE_ADDRESS_COST (new) == best_addr_cost
3013			&& (COST (new) + 1) >> 1 > best_rtx_cost)))
3014		  {
3015		    found_better = 1;
3016		    best_addr_cost = CSE_ADDRESS_COST (new);
3017		    best_rtx_cost = (COST (new) + 1) >> 1;
3018		    best_elt = p;
3019		    best_rtx = new;
3020		  }
3021	      }
3022
3023	  if (found_better)
3024	    {
3025	      if (validate_change (insn, loc,
3026				   canon_reg (copy_rtx (best_rtx),
3027					      NULL_RTX), 0))
3028		return;
3029	      else
3030		best_elt->flag = 1;
3031	    }
3032	}
3033    }
3034#endif
3035}
3036
3037/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3038   operation (EQ, NE, GT, etc.), follow it back through the hash table and
3039   what values are being compared.
3040
3041   *PARG1 and *PARG2 are updated to contain the rtx representing the values
3042   actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
3043   was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3044   compared to produce cc0.
3045
3046   The return value is the comparison operator and is either the code of
3047   A or the code corresponding to the inverse of the comparison.  */
3048
3049static enum rtx_code
3050find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3051     enum rtx_code code;
3052     rtx *parg1, *parg2;
3053     enum machine_mode *pmode1, *pmode2;
3054{
3055  rtx arg1, arg2;
3056
3057  arg1 = *parg1, arg2 = *parg2;
3058
3059  /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
3060
3061  while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3062    {
3063      /* Set non-zero when we find something of interest.  */
3064      rtx x = 0;
3065      int reverse_code = 0;
3066      struct table_elt *p = 0;
3067
3068      /* If arg1 is a COMPARE, extract the comparison arguments from it.
3069	 On machines with CC0, this is the only case that can occur, since
3070	 fold_rtx will return the COMPARE or item being compared with zero
3071	 when given CC0.  */
3072
3073      if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3074	x = arg1;
3075
3076      /* If ARG1 is a comparison operator and CODE is testing for
3077	 STORE_FLAG_VALUE, get the inner arguments.  */
3078
3079      else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3080	{
3081	  if (code == NE
3082	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3083		  && code == LT && STORE_FLAG_VALUE == -1)
3084#ifdef FLOAT_STORE_FLAG_VALUE
3085	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3086		  && FLOAT_STORE_FLAG_VALUE < 0)
3087#endif
3088	      )
3089	    x = arg1;
3090	  else if (code == EQ
3091		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3092		       && code == GE && STORE_FLAG_VALUE == -1)
3093#ifdef FLOAT_STORE_FLAG_VALUE
3094		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3095		       && FLOAT_STORE_FLAG_VALUE < 0)
3096#endif
3097		   )
3098	    x = arg1, reverse_code = 1;
3099	}
3100
3101      /* ??? We could also check for
3102
3103	 (ne (and (eq (...) (const_int 1))) (const_int 0))
3104
3105	 and related forms, but let's wait until we see them occurring.  */
3106
3107      if (x == 0)
3108	/* Look up ARG1 in the hash table and see if it has an equivalence
3109	   that lets us see what is being compared.  */
3110	p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
3111		    GET_MODE (arg1));
3112      if (p) p = p->first_same_value;
3113
3114      for (; p; p = p->next_same_value)
3115	{
3116	  enum machine_mode inner_mode = GET_MODE (p->exp);
3117
3118	  /* If the entry isn't valid, skip it.  */
3119	  if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3120	    continue;
3121
3122	  if (GET_CODE (p->exp) == COMPARE
3123	      /* Another possibility is that this machine has a compare insn
3124		 that includes the comparison code.  In that case, ARG1 would
3125		 be equivalent to a comparison operation that would set ARG1 to
3126		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
3127		 ORIG_CODE is the actual comparison being done; if it is an EQ,
3128		 we must reverse ORIG_CODE.  On machine with a negative value
3129		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3130	      || ((code == NE
3131		   || (code == LT
3132		       && GET_MODE_CLASS (inner_mode) == MODE_INT
3133		       && (GET_MODE_BITSIZE (inner_mode)
3134			   <= HOST_BITS_PER_WIDE_INT)
3135		       && (STORE_FLAG_VALUE
3136			   & ((HOST_WIDE_INT) 1
3137			      << (GET_MODE_BITSIZE (inner_mode) - 1))))
3138#ifdef FLOAT_STORE_FLAG_VALUE
3139		   || (code == LT
3140		       && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3141		       && FLOAT_STORE_FLAG_VALUE < 0)
3142#endif
3143		   )
3144		  && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3145	    {
3146	      x = p->exp;
3147	      break;
3148	    }
3149	  else if ((code == EQ
3150		    || (code == GE
3151			&& GET_MODE_CLASS (inner_mode) == MODE_INT
3152			&& (GET_MODE_BITSIZE (inner_mode)
3153			    <= HOST_BITS_PER_WIDE_INT)
3154			&& (STORE_FLAG_VALUE
3155			    & ((HOST_WIDE_INT) 1
3156			       << (GET_MODE_BITSIZE (inner_mode) - 1))))
3157#ifdef FLOAT_STORE_FLAG_VALUE
3158		    || (code == GE
3159			&& GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3160			&& FLOAT_STORE_FLAG_VALUE < 0)
3161#endif
3162		    )
3163		   && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3164	    {
3165	      reverse_code = 1;
3166	      x = p->exp;
3167	      break;
3168	    }
3169
3170	  /* If this is fp + constant, the equivalent is a better operand since
3171	     it may let us predict the value of the comparison.  */
3172	  else if (NONZERO_BASE_PLUS_P (p->exp))
3173	    {
3174	      arg1 = p->exp;
3175	      continue;
3176	    }
3177	}
3178
3179      /* If we didn't find a useful equivalence for ARG1, we are done.
3180	 Otherwise, set up for the next iteration.  */
3181      if (x == 0)
3182	break;
3183
3184      arg1 = XEXP (x, 0),  arg2 = XEXP (x, 1);
3185      if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3186	code = GET_CODE (x);
3187
3188      if (reverse_code)
3189	code = reverse_condition (code);
3190    }
3191
3192  /* Return our results.  Return the modes from before fold_rtx
3193     because fold_rtx might produce const_int, and then it's too late.  */
3194  *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3195  *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3196
3197  return code;
3198}
3199
3200/* Try to simplify a unary operation CODE whose output mode is to be
3201   MODE with input operand OP whose mode was originally OP_MODE.
3202   Return zero if no simplification can be made.  */
3203
3204rtx
3205simplify_unary_operation (code, mode, op, op_mode)
3206     enum rtx_code code;
3207     enum machine_mode mode;
3208     rtx op;
3209     enum machine_mode op_mode;
3210{
3211  register int width = GET_MODE_BITSIZE (mode);
3212
3213  /* The order of these tests is critical so that, for example, we don't
3214     check the wrong mode (input vs. output) for a conversion operation,
3215     such as FIX.  At some point, this should be simplified.  */
3216
3217#if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
3218
3219  if (code == FLOAT && GET_MODE (op) == VOIDmode
3220      && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3221    {
3222      HOST_WIDE_INT hv, lv;
3223      REAL_VALUE_TYPE d;
3224
3225      if (GET_CODE (op) == CONST_INT)
3226	lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3227      else
3228	lv = CONST_DOUBLE_LOW (op),  hv = CONST_DOUBLE_HIGH (op);
3229
3230#ifdef REAL_ARITHMETIC
3231      REAL_VALUE_FROM_INT (d, lv, hv, mode);
3232#else
3233      if (hv < 0)
3234	{
3235	  d = (double) (~ hv);
3236	  d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3237		* (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3238	  d += (double) (unsigned HOST_WIDE_INT) (~ lv);
3239	  d = (- d - 1.0);
3240	}
3241      else
3242	{
3243	  d = (double) hv;
3244	  d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3245		* (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3246	  d += (double) (unsigned HOST_WIDE_INT) lv;
3247	}
3248#endif  /* REAL_ARITHMETIC */
3249      d = real_value_truncate (mode, d);
3250      return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3251    }
3252  else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
3253	   && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3254    {
3255      HOST_WIDE_INT hv, lv;
3256      REAL_VALUE_TYPE d;
3257
3258      if (GET_CODE (op) == CONST_INT)
3259	lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3260      else
3261	lv = CONST_DOUBLE_LOW (op),  hv = CONST_DOUBLE_HIGH (op);
3262
3263      if (op_mode == VOIDmode)
3264	{
3265	  /* We don't know how to interpret negative-looking numbers in
3266	     this case, so don't try to fold those.  */
3267	  if (hv < 0)
3268	    return 0;
3269	}
3270      else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
3271	;
3272      else
3273	hv = 0, lv &= GET_MODE_MASK (op_mode);
3274
3275#ifdef REAL_ARITHMETIC
3276      REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
3277#else
3278
3279      d = (double) (unsigned HOST_WIDE_INT) hv;
3280      d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3281	    * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3282      d += (double) (unsigned HOST_WIDE_INT) lv;
3283#endif  /* REAL_ARITHMETIC */
3284      d = real_value_truncate (mode, d);
3285      return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3286    }
3287#endif
3288
3289  if (GET_CODE (op) == CONST_INT
3290      && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3291    {
3292      register HOST_WIDE_INT arg0 = INTVAL (op);
3293      register HOST_WIDE_INT val;
3294
3295      switch (code)
3296	{
3297	case NOT:
3298	  val = ~ arg0;
3299	  break;
3300
3301	case NEG:
3302	  val = - arg0;
3303	  break;
3304
3305	case ABS:
3306	  val = (arg0 >= 0 ? arg0 : - arg0);
3307	  break;
3308
3309	case FFS:
3310	  /* Don't use ffs here.  Instead, get low order bit and then its
3311	     number.  If arg0 is zero, this will return 0, as desired.  */
3312	  arg0 &= GET_MODE_MASK (mode);
3313	  val = exact_log2 (arg0 & (- arg0)) + 1;
3314	  break;
3315
3316	case TRUNCATE:
3317	  val = arg0;
3318	  break;
3319
3320	case ZERO_EXTEND:
3321	  if (op_mode == VOIDmode)
3322	    op_mode = mode;
3323	  if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3324	    {
3325	      /* If we were really extending the mode,
3326		 we would have to distinguish between zero-extension
3327		 and sign-extension.  */
3328	      if (width != GET_MODE_BITSIZE (op_mode))
3329		abort ();
3330	      val = arg0;
3331	    }
3332	  else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3333	    val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3334	  else
3335	    return 0;
3336	  break;
3337
3338	case SIGN_EXTEND:
3339	  if (op_mode == VOIDmode)
3340	    op_mode = mode;
3341	  if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3342	    {
3343	      /* If we were really extending the mode,
3344		 we would have to distinguish between zero-extension
3345		 and sign-extension.  */
3346	      if (width != GET_MODE_BITSIZE (op_mode))
3347		abort ();
3348	      val = arg0;
3349	    }
3350	  else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3351	    {
3352	      val
3353		= arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3354	      if (val
3355		  & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3356		val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3357	    }
3358	  else
3359	    return 0;
3360	  break;
3361
3362	case SQRT:
3363	  return 0;
3364
3365	default:
3366	  abort ();
3367	}
3368
3369      /* Clear the bits that don't belong in our mode,
3370	 unless they and our sign bit are all one.
3371	 So we get either a reasonable negative value or a reasonable
3372	 unsigned value for this mode.  */
3373      if (width < HOST_BITS_PER_WIDE_INT
3374	  && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3375	      != ((HOST_WIDE_INT) (-1) << (width - 1))))
3376	val &= ((HOST_WIDE_INT) 1 << width) - 1;
3377
3378      /* If this would be an entire word for the target, but is not for
3379	 the host, then sign-extend on the host so that the number will look
3380	 the same way on the host that it would on the target.
3381
3382	 For example, when building a 64 bit alpha hosted 32 bit sparc
3383	 targeted compiler, then we want the 32 bit unsigned value -1 to be
3384	 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3385	 The later confuses the sparc backend.  */
3386
3387      if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3388	  && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3389	val |= ((HOST_WIDE_INT) (-1) << width);
3390
3391      return GEN_INT (val);
3392    }
3393
3394  /* We can do some operations on integer CONST_DOUBLEs.  Also allow
3395     for a DImode operation on a CONST_INT.  */
3396  else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
3397	   && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3398    {
3399      HOST_WIDE_INT l1, h1, lv, hv;
3400
3401      if (GET_CODE (op) == CONST_DOUBLE)
3402	l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3403      else
3404	l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3405
3406      switch (code)
3407	{
3408	case NOT:
3409	  lv = ~ l1;
3410	  hv = ~ h1;
3411	  break;
3412
3413	case NEG:
3414	  neg_double (l1, h1, &lv, &hv);
3415	  break;
3416
3417	case ABS:
3418	  if (h1 < 0)
3419	    neg_double (l1, h1, &lv, &hv);
3420	  else
3421	    lv = l1, hv = h1;
3422	  break;
3423
3424	case FFS:
3425	  hv = 0;
3426	  if (l1 == 0)
3427	    lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3428	  else
3429	    lv = exact_log2 (l1 & (-l1)) + 1;
3430	  break;
3431
3432	case TRUNCATE:
3433	  /* This is just a change-of-mode, so do nothing.  */
3434	  lv = l1, hv = h1;
3435	  break;
3436
3437	case ZERO_EXTEND:
3438	  if (op_mode == VOIDmode
3439	      || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3440	    return 0;
3441
3442	  hv = 0;
3443	  lv = l1 & GET_MODE_MASK (op_mode);
3444	  break;
3445
3446	case SIGN_EXTEND:
3447	  if (op_mode == VOIDmode
3448	      || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3449	    return 0;
3450	  else
3451	    {
3452	      lv = l1 & GET_MODE_MASK (op_mode);
3453	      if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3454		  && (lv & ((HOST_WIDE_INT) 1
3455			    << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3456		lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3457
3458	      hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3459	    }
3460	  break;
3461
3462	case SQRT:
3463	  return 0;
3464
3465	default:
3466	  return 0;
3467	}
3468
3469      return immed_double_const (lv, hv, mode);
3470    }
3471
3472#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3473  else if (GET_CODE (op) == CONST_DOUBLE
3474	   && GET_MODE_CLASS (mode) == MODE_FLOAT)
3475    {
3476      REAL_VALUE_TYPE d;
3477      jmp_buf handler;
3478      rtx x;
3479
3480      if (setjmp (handler))
3481	/* There used to be a warning here, but that is inadvisable.
3482	   People may want to cause traps, and the natural way
3483	   to do it should not get a warning.  */
3484	return 0;
3485
3486      set_float_handler (handler);
3487
3488      REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3489
3490      switch (code)
3491	{
3492	case NEG:
3493	  d = REAL_VALUE_NEGATE (d);
3494	  break;
3495
3496	case ABS:
3497	  if (REAL_VALUE_NEGATIVE (d))
3498	    d = REAL_VALUE_NEGATE (d);
3499	  break;
3500
3501	case FLOAT_TRUNCATE:
3502	  d = real_value_truncate (mode, d);
3503	  break;
3504
3505	case FLOAT_EXTEND:
3506	  /* All this does is change the mode.  */
3507	  break;
3508
3509	case FIX:
3510	  d = REAL_VALUE_RNDZINT (d);
3511	  break;
3512
3513	case UNSIGNED_FIX:
3514	  d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3515	  break;
3516
3517	case SQRT:
3518	  return 0;
3519
3520	default:
3521	  abort ();
3522	}
3523
3524      x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3525      set_float_handler (NULL_PTR);
3526      return x;
3527    }
3528
3529  else if (GET_CODE (op) == CONST_DOUBLE
3530	   && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3531	   && GET_MODE_CLASS (mode) == MODE_INT
3532	   && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3533    {
3534      REAL_VALUE_TYPE d;
3535      jmp_buf handler;
3536      HOST_WIDE_INT val;
3537
3538      if (setjmp (handler))
3539	return 0;
3540
3541      set_float_handler (handler);
3542
3543      REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3544
3545      switch (code)
3546	{
3547	case FIX:
3548	  val = REAL_VALUE_FIX (d);
3549	  break;
3550
3551	case UNSIGNED_FIX:
3552	  val = REAL_VALUE_UNSIGNED_FIX (d);
3553	  break;
3554
3555	default:
3556	  abort ();
3557	}
3558
3559      set_float_handler (NULL_PTR);
3560
3561      /* Clear the bits that don't belong in our mode,
3562	 unless they and our sign bit are all one.
3563	 So we get either a reasonable negative value or a reasonable
3564	 unsigned value for this mode.  */
3565      if (width < HOST_BITS_PER_WIDE_INT
3566	  && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3567	      != ((HOST_WIDE_INT) (-1) << (width - 1))))
3568	val &= ((HOST_WIDE_INT) 1 << width) - 1;
3569
3570      /* If this would be an entire word for the target, but is not for
3571	 the host, then sign-extend on the host so that the number will look
3572	 the same way on the host that it would on the target.
3573
3574	 For example, when building a 64 bit alpha hosted 32 bit sparc
3575	 targeted compiler, then we want the 32 bit unsigned value -1 to be
3576	 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3577	 The later confuses the sparc backend.  */
3578
3579      if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3580	  && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3581	val |= ((HOST_WIDE_INT) (-1) << width);
3582
3583      return GEN_INT (val);
3584    }
3585#endif
3586  /* This was formerly used only for non-IEEE float.
3587     eggert@twinsun.com says it is safe for IEEE also.  */
3588  else
3589    {
3590      /* There are some simplifications we can do even if the operands
3591	 aren't constant.  */
3592      switch (code)
3593	{
3594	case NEG:
3595	case NOT:
3596	  /* (not (not X)) == X, similarly for NEG.  */
3597	  if (GET_CODE (op) == code)
3598	    return XEXP (op, 0);
3599	  break;
3600
3601	case SIGN_EXTEND:
3602	  /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3603	     becomes just the MINUS if its mode is MODE.  This allows
3604	     folding switch statements on machines using casesi (such as
3605	     the Vax).  */
3606	  if (GET_CODE (op) == TRUNCATE
3607	      && GET_MODE (XEXP (op, 0)) == mode
3608	      && GET_CODE (XEXP (op, 0)) == MINUS
3609	      && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3610	      && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3611	    return XEXP (op, 0);
3612
3613#ifdef POINTERS_EXTEND_UNSIGNED
3614	  if (! POINTERS_EXTEND_UNSIGNED
3615	      && mode == Pmode && GET_MODE (op) == ptr_mode
3616	      && CONSTANT_P (op))
3617	    return convert_memory_address (Pmode, op);
3618#endif
3619	  break;
3620
3621#ifdef POINTERS_EXTEND_UNSIGNED
3622	case ZERO_EXTEND:
3623	  if (POINTERS_EXTEND_UNSIGNED
3624	      && mode == Pmode && GET_MODE (op) == ptr_mode
3625	      && CONSTANT_P (op))
3626	    return convert_memory_address (Pmode, op);
3627	  break;
3628#endif
3629
3630	default:
3631	  break;
3632	}
3633
3634      return 0;
3635    }
3636}
3637
3638/* Simplify a binary operation CODE with result mode MODE, operating on OP0
3639   and OP1.  Return 0 if no simplification is possible.
3640
3641   Don't use this for relational operations such as EQ or LT.
3642   Use simplify_relational_operation instead.  */
3643
3644rtx
3645simplify_binary_operation (code, mode, op0, op1)
3646     enum rtx_code code;
3647     enum machine_mode mode;
3648     rtx op0, op1;
3649{
3650  register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3651  HOST_WIDE_INT val;
3652  int width = GET_MODE_BITSIZE (mode);
3653  rtx tem;
3654
3655  /* Relational operations don't work here.  We must know the mode
3656     of the operands in order to do the comparison correctly.
3657     Assuming a full word can give incorrect results.
3658     Consider comparing 128 with -128 in QImode.  */
3659
3660  if (GET_RTX_CLASS (code) == '<')
3661    abort ();
3662
3663#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3664  if (GET_MODE_CLASS (mode) == MODE_FLOAT
3665      && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3666      && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3667    {
3668      REAL_VALUE_TYPE f0, f1, value;
3669      jmp_buf handler;
3670
3671      if (setjmp (handler))
3672	return 0;
3673
3674      set_float_handler (handler);
3675
3676      REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3677      REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3678      f0 = real_value_truncate (mode, f0);
3679      f1 = real_value_truncate (mode, f1);
3680
3681#ifdef REAL_ARITHMETIC
3682#ifndef REAL_INFINITY
3683      if (code == DIV && REAL_VALUES_EQUAL (f1, dconst0))
3684	return 0;
3685#endif
3686      REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3687#else
3688      switch (code)
3689	{
3690	case PLUS:
3691	  value = f0 + f1;
3692	  break;
3693	case MINUS:
3694	  value = f0 - f1;
3695	  break;
3696	case MULT:
3697	  value = f0 * f1;
3698	  break;
3699	case DIV:
3700#ifndef REAL_INFINITY
3701	  if (f1 == 0)
3702	    return 0;
3703#endif
3704	  value = f0 / f1;
3705	  break;
3706	case SMIN:
3707	  value = MIN (f0, f1);
3708	  break;
3709	case SMAX:
3710	  value = MAX (f0, f1);
3711	  break;
3712	default:
3713	  abort ();
3714	}
3715#endif
3716
3717      value = real_value_truncate (mode, value);
3718      set_float_handler (NULL_PTR);
3719      return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
3720    }
3721#endif  /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3722
3723  /* We can fold some multi-word operations.  */
3724  if (GET_MODE_CLASS (mode) == MODE_INT
3725      && width == HOST_BITS_PER_WIDE_INT * 2
3726      && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
3727      && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3728    {
3729      HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3730
3731      if (GET_CODE (op0) == CONST_DOUBLE)
3732	l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3733      else
3734	l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
3735
3736      if (GET_CODE (op1) == CONST_DOUBLE)
3737	l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3738      else
3739	l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3740
3741      switch (code)
3742	{
3743	case MINUS:
3744	  /* A - B == A + (-B).  */
3745	  neg_double (l2, h2, &lv, &hv);
3746	  l2 = lv, h2 = hv;
3747
3748	  /* .. fall through ...  */
3749
3750	case PLUS:
3751	  add_double (l1, h1, l2, h2, &lv, &hv);
3752	  break;
3753
3754	case MULT:
3755	  mul_double (l1, h1, l2, h2, &lv, &hv);
3756	  break;
3757
3758	case DIV:  case MOD:   case UDIV:  case UMOD:
3759	  /* We'd need to include tree.h to do this and it doesn't seem worth
3760	     it.  */
3761	  return 0;
3762
3763	case AND:
3764	  lv = l1 & l2, hv = h1 & h2;
3765	  break;
3766
3767	case IOR:
3768	  lv = l1 | l2, hv = h1 | h2;
3769	  break;
3770
3771	case XOR:
3772	  lv = l1 ^ l2, hv = h1 ^ h2;
3773	  break;
3774
3775	case SMIN:
3776	  if (h1 < h2
3777	      || (h1 == h2
3778		  && ((unsigned HOST_WIDE_INT) l1
3779		      < (unsigned HOST_WIDE_INT) l2)))
3780	    lv = l1, hv = h1;
3781	  else
3782	    lv = l2, hv = h2;
3783	  break;
3784
3785	case SMAX:
3786	  if (h1 > h2
3787	      || (h1 == h2
3788		  && ((unsigned HOST_WIDE_INT) l1
3789		      > (unsigned HOST_WIDE_INT) l2)))
3790	    lv = l1, hv = h1;
3791	  else
3792	    lv = l2, hv = h2;
3793	  break;
3794
3795	case UMIN:
3796	  if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3797	      || (h1 == h2
3798		  && ((unsigned HOST_WIDE_INT) l1
3799		      < (unsigned HOST_WIDE_INT) l2)))
3800	    lv = l1, hv = h1;
3801	  else
3802	    lv = l2, hv = h2;
3803	  break;
3804
3805	case UMAX:
3806	  if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3807	      || (h1 == h2
3808		  && ((unsigned HOST_WIDE_INT) l1
3809		      > (unsigned HOST_WIDE_INT) l2)))
3810	    lv = l1, hv = h1;
3811	  else
3812	    lv = l2, hv = h2;
3813	  break;
3814
3815	case LSHIFTRT:   case ASHIFTRT:
3816	case ASHIFT:
3817	case ROTATE:     case ROTATERT:
3818#ifdef SHIFT_COUNT_TRUNCATED
3819	  if (SHIFT_COUNT_TRUNCATED)
3820	    l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3821#endif
3822
3823	  if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3824	    return 0;
3825
3826	  if (code == LSHIFTRT || code == ASHIFTRT)
3827	    rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3828			   code == ASHIFTRT);
3829	  else if (code == ASHIFT)
3830	    lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3831	  else if (code == ROTATE)
3832	    lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3833	  else /* code == ROTATERT */
3834	    rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3835	  break;
3836
3837	default:
3838	  return 0;
3839	}
3840
3841      return immed_double_const (lv, hv, mode);
3842    }
3843
3844  if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3845      || width > HOST_BITS_PER_WIDE_INT || width == 0)
3846    {
3847      /* Even if we can't compute a constant result,
3848	 there are some cases worth simplifying.  */
3849
3850      switch (code)
3851	{
3852	case PLUS:
3853	  /* In IEEE floating point, x+0 is not the same as x.  Similarly
3854	     for the other optimizations below.  */
3855	  if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3856	      && FLOAT_MODE_P (mode) && ! flag_fast_math)
3857	    break;
3858
3859	  if (op1 == CONST0_RTX (mode))
3860	    return op0;
3861
3862	  /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3863	  if (GET_CODE (op0) == NEG)
3864	    return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3865	  else if (GET_CODE (op1) == NEG)
3866	    return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3867
3868	  /* Handle both-operands-constant cases.  We can only add
3869	     CONST_INTs to constants since the sum of relocatable symbols
3870	     can't be handled by most assemblers.  Don't add CONST_INT
3871	     to CONST_INT since overflow won't be computed properly if wider
3872	     than HOST_BITS_PER_WIDE_INT.  */
3873
3874	  if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3875	      && GET_CODE (op1) == CONST_INT)
3876	    return plus_constant (op0, INTVAL (op1));
3877	  else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3878		   && GET_CODE (op0) == CONST_INT)
3879	    return plus_constant (op1, INTVAL (op0));
3880
3881	  /* See if this is something like X * C - X or vice versa or
3882	     if the multiplication is written as a shift.  If so, we can
3883	     distribute and make a new multiply, shift, or maybe just
3884	     have X (if C is 2 in the example above).  But don't make
3885	     real multiply if we didn't have one before.  */
3886
3887	  if (! FLOAT_MODE_P (mode))
3888	    {
3889	      HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3890	      rtx lhs = op0, rhs = op1;
3891	      int had_mult = 0;
3892
3893	      if (GET_CODE (lhs) == NEG)
3894		coeff0 = -1, lhs = XEXP (lhs, 0);
3895	      else if (GET_CODE (lhs) == MULT
3896		       && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3897		{
3898		  coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3899		  had_mult = 1;
3900		}
3901	      else if (GET_CODE (lhs) == ASHIFT
3902		       && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3903		       && INTVAL (XEXP (lhs, 1)) >= 0
3904		       && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3905		{
3906		  coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3907		  lhs = XEXP (lhs, 0);
3908		}
3909
3910	      if (GET_CODE (rhs) == NEG)
3911		coeff1 = -1, rhs = XEXP (rhs, 0);
3912	      else if (GET_CODE (rhs) == MULT
3913		       && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3914		{
3915		  coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3916		  had_mult = 1;
3917		}
3918	      else if (GET_CODE (rhs) == ASHIFT
3919		       && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3920		       && INTVAL (XEXP (rhs, 1)) >= 0
3921		       && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3922		{
3923		  coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3924		  rhs = XEXP (rhs, 0);
3925		}
3926
3927	      if (rtx_equal_p (lhs, rhs))
3928		{
3929		  tem = cse_gen_binary (MULT, mode, lhs,
3930					GEN_INT (coeff0 + coeff1));
3931		  return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3932		}
3933	    }
3934
3935	  /* If one of the operands is a PLUS or a MINUS, see if we can
3936	     simplify this by the associative law.
3937	     Don't use the associative law for floating point.
3938	     The inaccuracy makes it nonassociative,
3939	     and subtle programs can break if operations are associated.  */
3940
3941	  if (INTEGRAL_MODE_P (mode)
3942	      && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3943		  || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3944	      && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3945	    return tem;
3946	  break;
3947
3948	case COMPARE:
3949#ifdef HAVE_cc0
3950	  /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3951	     using cc0, in which case we want to leave it as a COMPARE
3952	     so we can distinguish it from a register-register-copy.
3953
3954	     In IEEE floating point, x-0 is not the same as x.  */
3955
3956	  if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3957	       || ! FLOAT_MODE_P (mode) || flag_fast_math)
3958	      && op1 == CONST0_RTX (mode))
3959	    return op0;
3960#else
3961	  /* Do nothing here.  */
3962#endif
3963	  break;
3964
3965	case MINUS:
3966	  /* None of these optimizations can be done for IEEE
3967	     floating point.  */
3968	  if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3969	      && FLOAT_MODE_P (mode) && ! flag_fast_math)
3970	    break;
3971
3972	  /* We can't assume x-x is 0 even with non-IEEE floating point,
3973	     but since it is zero except in very strange circumstances, we
3974	     will treat it as zero with -ffast-math.  */
3975	  if (rtx_equal_p (op0, op1)
3976	      && ! side_effects_p (op0)
3977	      && (! FLOAT_MODE_P (mode) || flag_fast_math))
3978	    return CONST0_RTX (mode);
3979
3980	  /* Change subtraction from zero into negation.  */
3981	  if (op0 == CONST0_RTX (mode))
3982	    return gen_rtx_NEG (mode, op1);
3983
3984	  /* (-1 - a) is ~a.  */
3985	  if (op0 == constm1_rtx)
3986	    return gen_rtx_NOT (mode, op1);
3987
3988	  /* Subtracting 0 has no effect.  */
3989	  if (op1 == CONST0_RTX (mode))
3990	    return op0;
3991
3992	  /* See if this is something like X * C - X or vice versa or
3993	     if the multiplication is written as a shift.  If so, we can
3994	     distribute and make a new multiply, shift, or maybe just
3995	     have X (if C is 2 in the example above).  But don't make
3996	     real multiply if we didn't have one before.  */
3997
3998	  if (! FLOAT_MODE_P (mode))
3999	    {
4000	      HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
4001	      rtx lhs = op0, rhs = op1;
4002	      int had_mult = 0;
4003
4004	      if (GET_CODE (lhs) == NEG)
4005		coeff0 = -1, lhs = XEXP (lhs, 0);
4006	      else if (GET_CODE (lhs) == MULT
4007		       && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
4008		{
4009		  coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
4010		  had_mult = 1;
4011		}
4012	      else if (GET_CODE (lhs) == ASHIFT
4013		       && GET_CODE (XEXP (lhs, 1)) == CONST_INT
4014		       && INTVAL (XEXP (lhs, 1)) >= 0
4015		       && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
4016		{
4017		  coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
4018		  lhs = XEXP (lhs, 0);
4019		}
4020
4021	      if (GET_CODE (rhs) == NEG)
4022		coeff1 = - 1, rhs = XEXP (rhs, 0);
4023	      else if (GET_CODE (rhs) == MULT
4024		       && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
4025		{
4026		  coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
4027		  had_mult = 1;
4028		}
4029	      else if (GET_CODE (rhs) == ASHIFT
4030		       && GET_CODE (XEXP (rhs, 1)) == CONST_INT
4031		       && INTVAL (XEXP (rhs, 1)) >= 0
4032		       && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
4033		{
4034		  coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
4035		  rhs = XEXP (rhs, 0);
4036		}
4037
4038	      if (rtx_equal_p (lhs, rhs))
4039		{
4040		  tem = cse_gen_binary (MULT, mode, lhs,
4041					GEN_INT (coeff0 - coeff1));
4042		  return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
4043		}
4044	    }
4045
4046	  /* (a - (-b)) -> (a + b).  */
4047	  if (GET_CODE (op1) == NEG)
4048	    return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
4049
4050	  /* If one of the operands is a PLUS or a MINUS, see if we can
4051	     simplify this by the associative law.
4052	     Don't use the associative law for floating point.
4053	     The inaccuracy makes it nonassociative,
4054	     and subtle programs can break if operations are associated.  */
4055
4056	  if (INTEGRAL_MODE_P (mode)
4057	      && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
4058		  || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
4059	      && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
4060	    return tem;
4061
4062	  /* Don't let a relocatable value get a negative coeff.  */
4063	  if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
4064	    return plus_constant (op0, - INTVAL (op1));
4065
4066	  /* (x - (x & y)) -> (x & ~y) */
4067	  if (GET_CODE (op1) == AND)
4068	    {
4069	     if (rtx_equal_p (op0, XEXP (op1, 0)))
4070	       return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 1)));
4071	     if (rtx_equal_p (op0, XEXP (op1, 1)))
4072	       return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 0)));
4073	   }
4074	  break;
4075
4076	case MULT:
4077	  if (op1 == constm1_rtx)
4078	    {
4079	      tem = simplify_unary_operation (NEG, mode, op0, mode);
4080
4081	      return tem ? tem : gen_rtx_NEG (mode, op0);
4082	    }
4083
4084	  /* In IEEE floating point, x*0 is not always 0.  */
4085	  if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4086	       || ! FLOAT_MODE_P (mode) || flag_fast_math)
4087	      && op1 == CONST0_RTX (mode)
4088	      && ! side_effects_p (op0))
4089	    return op1;
4090
4091	  /* In IEEE floating point, x*1 is not equivalent to x for nans.
4092	     However, ANSI says we can drop signals,
4093	     so we can do this anyway.  */
4094	  if (op1 == CONST1_RTX (mode))
4095	    return op0;
4096
4097	  /* Convert multiply by constant power of two into shift unless
4098	     we are still generating RTL.  This test is a kludge.  */
4099	  if (GET_CODE (op1) == CONST_INT
4100	      && (val = exact_log2 (INTVAL (op1))) >= 0
4101	      /* If the mode is larger than the host word size, and the
4102		 uppermost bit is set, then this isn't a power of two due
4103		 to implicit sign extension.  */
4104	      && (width <= HOST_BITS_PER_WIDE_INT
4105		  || val != HOST_BITS_PER_WIDE_INT - 1)
4106	      && ! rtx_equal_function_value_matters)
4107	    return gen_rtx_ASHIFT (mode, op0, GEN_INT (val));
4108
4109	  if (GET_CODE (op1) == CONST_DOUBLE
4110	      && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
4111	    {
4112	      REAL_VALUE_TYPE d;
4113	      jmp_buf handler;
4114	      int op1is2, op1ism1;
4115
4116	      if (setjmp (handler))
4117		return 0;
4118
4119	      set_float_handler (handler);
4120	      REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
4121	      op1is2 = REAL_VALUES_EQUAL (d, dconst2);
4122	      op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
4123	      set_float_handler (NULL_PTR);
4124
4125	      /* x*2 is x+x and x*(-1) is -x */
4126	      if (op1is2 && GET_MODE (op0) == mode)
4127		return gen_rtx_PLUS (mode, op0, copy_rtx (op0));
4128
4129	      else if (op1ism1 && GET_MODE (op0) == mode)
4130		return gen_rtx_NEG (mode, op0);
4131	    }
4132	  break;
4133
4134	case IOR:
4135	  if (op1 == const0_rtx)
4136	    return op0;
4137	  if (GET_CODE (op1) == CONST_INT
4138	      && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4139	    return op1;
4140	  if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4141	    return op0;
4142	  /* A | (~A) -> -1 */
4143	  if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4144	       || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
4145	      && ! side_effects_p (op0)
4146	      && GET_MODE_CLASS (mode) != MODE_CC)
4147	    return constm1_rtx;
4148	  break;
4149
4150	case XOR:
4151	  if (op1 == const0_rtx)
4152	    return op0;
4153	  if (GET_CODE (op1) == CONST_INT
4154	      && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4155	    return gen_rtx_NOT (mode, op0);
4156	  if (op0 == op1 && ! side_effects_p (op0)
4157	      && GET_MODE_CLASS (mode) != MODE_CC)
4158	    return const0_rtx;
4159	  break;
4160
4161	case AND:
4162	  if (op1 == const0_rtx && ! side_effects_p (op0))
4163	    return const0_rtx;
4164	  if (GET_CODE (op1) == CONST_INT
4165	      && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4166	    return op0;
4167	  if (op0 == op1 && ! side_effects_p (op0)
4168	      && GET_MODE_CLASS (mode) != MODE_CC)
4169	    return op0;
4170	  /* A & (~A) -> 0 */
4171	  if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4172	       || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
4173	      && ! side_effects_p (op0)
4174	      && GET_MODE_CLASS (mode) != MODE_CC)
4175	    return const0_rtx;
4176	  break;
4177
4178	case UDIV:
4179	  /* Convert divide by power of two into shift (divide by 1 handled
4180	     below).  */
4181	  if (GET_CODE (op1) == CONST_INT
4182	      && (arg1 = exact_log2 (INTVAL (op1))) > 0)
4183	    return gen_rtx_LSHIFTRT (mode, op0, GEN_INT (arg1));
4184
4185	  /* ... fall through ...  */
4186
4187	case DIV:
4188	  if (op1 == CONST1_RTX (mode))
4189	    return op0;
4190
4191	  /* In IEEE floating point, 0/x is not always 0.  */
4192	  if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4193	       || ! FLOAT_MODE_P (mode) || flag_fast_math)
4194	      && op0 == CONST0_RTX (mode)
4195	      && ! side_effects_p (op1))
4196	    return op0;
4197
4198#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4199	  /* Change division by a constant into multiplication.  Only do
4200	     this with -ffast-math until an expert says it is safe in
4201	     general.  */
4202	  else if (GET_CODE (op1) == CONST_DOUBLE
4203		   && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
4204		   && op1 != CONST0_RTX (mode)
4205		   && flag_fast_math)
4206	    {
4207	      REAL_VALUE_TYPE d;
4208	      REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
4209
4210	      if (! REAL_VALUES_EQUAL (d, dconst0))
4211		{
4212#if defined (REAL_ARITHMETIC)
4213		  REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
4214		  return gen_rtx_MULT (mode, op0,
4215				       CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
4216#else
4217		  return gen_rtx_MULT (mode, op0,
4218				       CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
4219#endif
4220		}
4221	    }
4222#endif
4223	  break;
4224
4225	case UMOD:
4226	  /* Handle modulus by power of two (mod with 1 handled below).  */
4227	  if (GET_CODE (op1) == CONST_INT
4228	      && exact_log2 (INTVAL (op1)) > 0)
4229	    return gen_rtx_AND (mode, op0, GEN_INT (INTVAL (op1) - 1));
4230
4231	  /* ... fall through ...  */
4232
4233	case MOD:
4234	  if ((op0 == const0_rtx || op1 == const1_rtx)
4235	      && ! side_effects_p (op0) && ! side_effects_p (op1))
4236	    return const0_rtx;
4237	  break;
4238
4239	case ROTATERT:
4240	case ROTATE:
4241	  /* Rotating ~0 always results in ~0.  */
4242	  if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
4243	      && INTVAL (op0) == GET_MODE_MASK (mode)
4244	      && ! side_effects_p (op1))
4245	    return op0;
4246
4247	  /* ... fall through ...  */
4248
4249	case ASHIFT:
4250	case ASHIFTRT:
4251	case LSHIFTRT:
4252	  if (op1 == const0_rtx)
4253	    return op0;
4254	  if (op0 == const0_rtx && ! side_effects_p (op1))
4255	    return op0;
4256	  break;
4257
4258	case SMIN:
4259	  if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4260	      && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
4261	      && ! side_effects_p (op0))
4262	    return op1;
4263	  else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4264	    return op0;
4265	  break;
4266
4267	case SMAX:
4268	  if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4269	      && (INTVAL (op1)
4270		  == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
4271	      && ! side_effects_p (op0))
4272	    return op1;
4273	  else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4274	    return op0;
4275	  break;
4276
4277	case UMIN:
4278	  if (op1 == const0_rtx && ! side_effects_p (op0))
4279	    return op1;
4280	  else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4281	    return op0;
4282	  break;
4283
4284	case UMAX:
4285	  if (op1 == constm1_rtx && ! side_effects_p (op0))
4286	    return op1;
4287	  else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4288	    return op0;
4289	  break;
4290
4291	default:
4292	  abort ();
4293	}
4294
4295      return 0;
4296    }
4297
4298  /* Get the integer argument values in two forms:
4299     zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S.  */
4300
4301  arg0 = INTVAL (op0);
4302  arg1 = INTVAL (op1);
4303
4304  if (width < HOST_BITS_PER_WIDE_INT)
4305    {
4306      arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4307      arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
4308
4309      arg0s = arg0;
4310      if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4311	arg0s |= ((HOST_WIDE_INT) (-1) << width);
4312
4313      arg1s = arg1;
4314      if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4315	arg1s |= ((HOST_WIDE_INT) (-1) << width);
4316    }
4317  else
4318    {
4319      arg0s = arg0;
4320      arg1s = arg1;
4321    }
4322
4323  /* Compute the value of the arithmetic.  */
4324
4325  switch (code)
4326    {
4327    case PLUS:
4328      val = arg0s + arg1s;
4329      break;
4330
4331    case MINUS:
4332      val = arg0s - arg1s;
4333      break;
4334
4335    case MULT:
4336      val = arg0s * arg1s;
4337      break;
4338
4339    case DIV:
4340      if (arg1s == 0)
4341	return 0;
4342      val = arg0s / arg1s;
4343      break;
4344
4345    case MOD:
4346      if (arg1s == 0)
4347	return 0;
4348      val = arg0s % arg1s;
4349      break;
4350
4351    case UDIV:
4352      if (arg1 == 0)
4353	return 0;
4354      val = (unsigned HOST_WIDE_INT) arg0 / arg1;
4355      break;
4356
4357    case UMOD:
4358      if (arg1 == 0)
4359	return 0;
4360      val = (unsigned HOST_WIDE_INT) arg0 % arg1;
4361      break;
4362
4363    case AND:
4364      val = arg0 & arg1;
4365      break;
4366
4367    case IOR:
4368      val = arg0 | arg1;
4369      break;
4370
4371    case XOR:
4372      val = arg0 ^ arg1;
4373      break;
4374
4375    case LSHIFTRT:
4376      /* If shift count is undefined, don't fold it; let the machine do
4377	 what it wants.  But truncate it if the machine will do that.  */
4378      if (arg1 < 0)
4379	return 0;
4380
4381#ifdef SHIFT_COUNT_TRUNCATED
4382      if (SHIFT_COUNT_TRUNCATED)
4383	arg1 %= width;
4384#endif
4385
4386      val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
4387      break;
4388
4389    case ASHIFT:
4390      if (arg1 < 0)
4391	return 0;
4392
4393#ifdef SHIFT_COUNT_TRUNCATED
4394      if (SHIFT_COUNT_TRUNCATED)
4395	arg1 %= width;
4396#endif
4397
4398      val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
4399      break;
4400
4401    case ASHIFTRT:
4402      if (arg1 < 0)
4403	return 0;
4404
4405#ifdef SHIFT_COUNT_TRUNCATED
4406      if (SHIFT_COUNT_TRUNCATED)
4407	arg1 %= width;
4408#endif
4409
4410      val = arg0s >> arg1;
4411
4412      /* Bootstrap compiler may not have sign extended the right shift.
4413	 Manually extend the sign to insure bootstrap cc matches gcc.  */
4414      if (arg0s < 0 && arg1 > 0)
4415	val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4416
4417      break;
4418
4419    case ROTATERT:
4420      if (arg1 < 0)
4421	return 0;
4422
4423      arg1 %= width;
4424      val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4425	     | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
4426      break;
4427
4428    case ROTATE:
4429      if (arg1 < 0)
4430	return 0;
4431
4432      arg1 %= width;
4433      val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4434	     | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
4435      break;
4436
4437    case COMPARE:
4438      /* Do nothing here.  */
4439      return 0;
4440
4441    case SMIN:
4442      val = arg0s <= arg1s ? arg0s : arg1s;
4443      break;
4444
4445    case UMIN:
4446      val = ((unsigned HOST_WIDE_INT) arg0
4447	     <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4448      break;
4449
4450    case SMAX:
4451      val = arg0s > arg1s ? arg0s : arg1s;
4452      break;
4453
4454    case UMAX:
4455      val = ((unsigned HOST_WIDE_INT) arg0
4456	     > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4457      break;
4458
4459    default:
4460      abort ();
4461    }
4462
4463  /* Clear the bits that don't belong in our mode, unless they and our sign
4464     bit are all one.  So we get either a reasonable negative value or a
4465     reasonable unsigned value for this mode.  */
4466  if (width < HOST_BITS_PER_WIDE_INT
4467      && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4468	  != ((HOST_WIDE_INT) (-1) << (width - 1))))
4469    val &= ((HOST_WIDE_INT) 1 << width) - 1;
4470
4471  /* If this would be an entire word for the target, but is not for
4472     the host, then sign-extend on the host so that the number will look
4473     the same way on the host that it would on the target.
4474
4475     For example, when building a 64 bit alpha hosted 32 bit sparc
4476     targeted compiler, then we want the 32 bit unsigned value -1 to be
4477     represented as a 64 bit value -1, and not as 0x00000000ffffffff.
4478     The later confuses the sparc backend.  */
4479
4480  if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
4481      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
4482    val |= ((HOST_WIDE_INT) (-1) << width);
4483
4484  return GEN_INT (val);
4485}
4486
4487/* Simplify a PLUS or MINUS, at least one of whose operands may be another
4488   PLUS or MINUS.
4489
4490   Rather than test for specific case, we do this by a brute-force method
4491   and do all possible simplifications until no more changes occur.  Then
4492   we rebuild the operation.  */
4493
4494static rtx
4495simplify_plus_minus (code, mode, op0, op1)
4496     enum rtx_code code;
4497     enum machine_mode mode;
4498     rtx op0, op1;
4499{
4500  rtx ops[8];
4501  int negs[8];
4502  rtx result, tem;
4503  int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
4504  int first = 1, negate = 0, changed;
4505  int i, j;
4506
4507  bzero ((char *) ops, sizeof ops);
4508
4509  /* Set up the two operands and then expand them until nothing has been
4510     changed.  If we run out of room in our array, give up; this should
4511     almost never happen.  */
4512
4513  ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4514
4515  changed = 1;
4516  while (changed)
4517    {
4518      changed = 0;
4519
4520      for (i = 0; i < n_ops; i++)
4521	switch (GET_CODE (ops[i]))
4522	  {
4523	  case PLUS:
4524	  case MINUS:
4525	    if (n_ops == 7)
4526	      return 0;
4527
4528	    ops[n_ops] = XEXP (ops[i], 1);
4529	    negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4530	    ops[i] = XEXP (ops[i], 0);
4531	    input_ops++;
4532	    changed = 1;
4533	    break;
4534
4535	  case NEG:
4536	    ops[i] = XEXP (ops[i], 0);
4537	    negs[i] = ! negs[i];
4538	    changed = 1;
4539	    break;
4540
4541	  case CONST:
4542	    ops[i] = XEXP (ops[i], 0);
4543	    input_consts++;
4544	    changed = 1;
4545	    break;
4546
4547	  case NOT:
4548	    /* ~a -> (-a - 1) */
4549	    if (n_ops != 7)
4550	      {
4551		ops[n_ops] = constm1_rtx;
4552		negs[n_ops++] = negs[i];
4553		ops[i] = XEXP (ops[i], 0);
4554		negs[i] = ! negs[i];
4555		changed = 1;
4556	      }
4557	    break;
4558
4559	  case CONST_INT:
4560	    if (negs[i])
4561	      ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4562	    break;
4563
4564	  default:
4565	    break;
4566	  }
4567    }
4568
4569  /* If we only have two operands, we can't do anything.  */
4570  if (n_ops <= 2)
4571    return 0;
4572
4573  /* Now simplify each pair of operands until nothing changes.  The first
4574     time through just simplify constants against each other.  */
4575
4576  changed = 1;
4577  while (changed)
4578    {
4579      changed = first;
4580
4581      for (i = 0; i < n_ops - 1; i++)
4582	for (j = i + 1; j < n_ops; j++)
4583	  if (ops[i] != 0 && ops[j] != 0
4584	      && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4585	    {
4586	      rtx lhs = ops[i], rhs = ops[j];
4587	      enum rtx_code ncode = PLUS;
4588
4589	      if (negs[i] && ! negs[j])
4590		lhs = ops[j], rhs = ops[i], ncode = MINUS;
4591	      else if (! negs[i] && negs[j])
4592		ncode = MINUS;
4593
4594	      tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4595	      if (tem)
4596		{
4597		  ops[i] = tem, ops[j] = 0;
4598		  negs[i] = negs[i] && negs[j];
4599		  if (GET_CODE (tem) == NEG)
4600		    ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4601
4602		  if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4603		    ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4604		  changed = 1;
4605		}
4606	    }
4607
4608      first = 0;
4609    }
4610
4611  /* Pack all the operands to the lower-numbered entries and give up if
4612     we didn't reduce the number of operands we had.  Make sure we
4613     count a CONST as two operands.  If we have the same number of
4614     operands, but have made more CONSTs than we had, this is also
4615     an improvement, so accept it.  */
4616
4617  for (i = 0, j = 0; j < n_ops; j++)
4618    if (ops[j] != 0)
4619      {
4620	ops[i] = ops[j], negs[i++] = negs[j];
4621	if (GET_CODE (ops[j]) == CONST)
4622	  n_consts++;
4623      }
4624
4625  if (i + n_consts > input_ops
4626      || (i + n_consts == input_ops && n_consts <= input_consts))
4627    return 0;
4628
4629  n_ops = i;
4630
4631  /* If we have a CONST_INT, put it last.  */
4632  for (i = 0; i < n_ops - 1; i++)
4633    if (GET_CODE (ops[i]) == CONST_INT)
4634      {
4635	tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4636	j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4637      }
4638
4639  /* Put a non-negated operand first.  If there aren't any, make all
4640     operands positive and negate the whole thing later.  */
4641  for (i = 0; i < n_ops && negs[i]; i++)
4642    ;
4643
4644  if (i == n_ops)
4645    {
4646      for (i = 0; i < n_ops; i++)
4647	negs[i] = 0;
4648      negate = 1;
4649    }
4650  else if (i != 0)
4651    {
4652      tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4653      j = negs[0], negs[0] = negs[i], negs[i] = j;
4654    }
4655
4656  /* Now make the result by performing the requested operations.  */
4657  result = ops[0];
4658  for (i = 1; i < n_ops; i++)
4659    result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4660
4661  return negate ? gen_rtx_NEG (mode, result) : result;
4662}
4663
4664/* Make a binary operation by properly ordering the operands and
4665   seeing if the expression folds.  */
4666
4667static rtx
4668cse_gen_binary (code, mode, op0, op1)
4669     enum rtx_code code;
4670     enum machine_mode mode;
4671     rtx op0, op1;
4672{
4673  rtx tem;
4674
4675  /* Put complex operands first and constants second if commutative.  */
4676  if (GET_RTX_CLASS (code) == 'c'
4677      && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4678	  || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4679	      && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4680	  || (GET_CODE (op0) == SUBREG
4681	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4682	      && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4683    tem = op0, op0 = op1, op1 = tem;
4684
4685  /* If this simplifies, do it.  */
4686  tem = simplify_binary_operation (code, mode, op0, op1);
4687
4688  if (tem)
4689    return tem;
4690
4691  /* Handle addition and subtraction of CONST_INT specially.  Otherwise,
4692     just form the operation.  */
4693
4694  if (code == PLUS && GET_CODE (op1) == CONST_INT
4695      && GET_MODE (op0) != VOIDmode)
4696    return plus_constant (op0, INTVAL (op1));
4697  else if (code == MINUS && GET_CODE (op1) == CONST_INT
4698	   && GET_MODE (op0) != VOIDmode)
4699    return plus_constant (op0, - INTVAL (op1));
4700  else
4701    return gen_rtx_fmt_ee (code, mode, op0, op1);
4702}
4703
4704struct cfc_args
4705{
4706  /* Input */
4707  rtx op0, op1;
4708  /* Output */
4709  int equal, op0lt, op1lt;
4710};
4711
4712static void
4713check_fold_consts (data)
4714  PTR data;
4715{
4716  struct cfc_args * args = (struct cfc_args *) data;
4717  REAL_VALUE_TYPE d0, d1;
4718
4719  REAL_VALUE_FROM_CONST_DOUBLE (d0, args->op0);
4720  REAL_VALUE_FROM_CONST_DOUBLE (d1, args->op1);
4721  args->equal = REAL_VALUES_EQUAL (d0, d1);
4722  args->op0lt = REAL_VALUES_LESS (d0, d1);
4723  args->op1lt = REAL_VALUES_LESS (d1, d0);
4724}
4725
4726/* Like simplify_binary_operation except used for relational operators.
4727   MODE is the mode of the operands, not that of the result.  If MODE
4728   is VOIDmode, both operands must also be VOIDmode and we compare the
4729   operands in "infinite precision".
4730
4731   If no simplification is possible, this function returns zero.  Otherwise,
4732   it returns either const_true_rtx or const0_rtx.  */
4733
4734rtx
4735simplify_relational_operation (code, mode, op0, op1)
4736     enum rtx_code code;
4737     enum machine_mode mode;
4738     rtx op0, op1;
4739{
4740  int equal, op0lt, op0ltu, op1lt, op1ltu;
4741  rtx tem;
4742
4743  /* If op0 is a compare, extract the comparison arguments from it.  */
4744  if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4745    op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4746
4747  /* We can't simplify MODE_CC values since we don't know what the
4748     actual comparison is.  */
4749  if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4750#ifdef HAVE_cc0
4751      || op0 == cc0_rtx
4752#endif
4753      )
4754    return 0;
4755
4756  /* For integer comparisons of A and B maybe we can simplify A - B and can
4757     then simplify a comparison of that with zero.  If A and B are both either
4758     a register or a CONST_INT, this can't help; testing for these cases will
4759     prevent infinite recursion here and speed things up.
4760
4761     If CODE is an unsigned comparison, then we can never do this optimization,
4762     because it gives an incorrect result if the subtraction wraps around zero.
4763     ANSI C defines unsigned operations such that they never overflow, and
4764     thus such cases can not be ignored.  */
4765
4766  if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4767      && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4768	    && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4769      && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4770      && code != GTU && code != GEU && code != LTU && code != LEU)
4771    return simplify_relational_operation (signed_condition (code),
4772					  mode, tem, const0_rtx);
4773
4774  /* For non-IEEE floating-point, if the two operands are equal, we know the
4775     result.  */
4776  if (rtx_equal_p (op0, op1)
4777      && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4778	  || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4779    equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4780
4781  /* If the operands are floating-point constants, see if we can fold
4782     the result.  */
4783#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4784  else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4785	   && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4786    {
4787      struct cfc_args args;
4788
4789      /* Setup input for check_fold_consts() */
4790      args.op0 = op0;
4791      args.op1 = op1;
4792
4793      if (do_float_handler(check_fold_consts, (PTR) &args) == 0)
4794	/* We got an exception from check_fold_consts() */
4795	return 0;
4796
4797      /* Receive output from check_fold_consts() */
4798      equal = args.equal;
4799      op0lt = op0ltu = args.op0lt;
4800      op1lt = op1ltu = args.op1lt;
4801    }
4802#endif  /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4803
4804  /* Otherwise, see if the operands are both integers.  */
4805  else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4806	   && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4807	   && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4808    {
4809      int width = GET_MODE_BITSIZE (mode);
4810      HOST_WIDE_INT l0s, h0s, l1s, h1s;
4811      unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4812
4813      /* Get the two words comprising each integer constant.  */
4814      if (GET_CODE (op0) == CONST_DOUBLE)
4815	{
4816	  l0u = l0s = CONST_DOUBLE_LOW (op0);
4817	  h0u = h0s = CONST_DOUBLE_HIGH (op0);
4818	}
4819      else
4820	{
4821	  l0u = l0s = INTVAL (op0);
4822	  h0u = h0s = l0s < 0 ? -1 : 0;
4823	}
4824
4825      if (GET_CODE (op1) == CONST_DOUBLE)
4826	{
4827	  l1u = l1s = CONST_DOUBLE_LOW (op1);
4828	  h1u = h1s = CONST_DOUBLE_HIGH (op1);
4829	}
4830      else
4831	{
4832	  l1u = l1s = INTVAL (op1);
4833	  h1u = h1s = l1s < 0 ? -1 : 0;
4834	}
4835
4836      /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4837	 we have to sign or zero-extend the values.  */
4838      if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4839	h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
4840
4841      if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4842	{
4843	  l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4844	  l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4845
4846	  if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4847	    l0s |= ((HOST_WIDE_INT) (-1) << width);
4848
4849	  if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4850	    l1s |= ((HOST_WIDE_INT) (-1) << width);
4851	}
4852
4853      equal = (h0u == h1u && l0u == l1u);
4854      op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4855      op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4856      op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4857      op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4858    }
4859
4860  /* Otherwise, there are some code-specific tests we can make.  */
4861  else
4862    {
4863      switch (code)
4864	{
4865	case EQ:
4866	  /* References to the frame plus a constant or labels cannot
4867	     be zero, but a SYMBOL_REF can due to #pragma weak.  */
4868	  if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4869	       || GET_CODE (op0) == LABEL_REF)
4870#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4871	      /* On some machines, the ap reg can be 0 sometimes.  */
4872	      && op0 != arg_pointer_rtx
4873#endif
4874		)
4875	    return const0_rtx;
4876	  break;
4877
4878	case NE:
4879	  if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4880	       || GET_CODE (op0) == LABEL_REF)
4881#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4882	      && op0 != arg_pointer_rtx
4883#endif
4884	      )
4885	    return const_true_rtx;
4886	  break;
4887
4888	case GEU:
4889	  /* Unsigned values are never negative.  */
4890	  if (op1 == const0_rtx)
4891	    return const_true_rtx;
4892	  break;
4893
4894	case LTU:
4895	  if (op1 == const0_rtx)
4896	    return const0_rtx;
4897	  break;
4898
4899	case LEU:
4900	  /* Unsigned values are never greater than the largest
4901	     unsigned value.  */
4902	  if (GET_CODE (op1) == CONST_INT
4903	      && INTVAL (op1) == GET_MODE_MASK (mode)
4904	    && INTEGRAL_MODE_P (mode))
4905	  return const_true_rtx;
4906	  break;
4907
4908	case GTU:
4909	  if (GET_CODE (op1) == CONST_INT
4910	      && INTVAL (op1) == GET_MODE_MASK (mode)
4911	      && INTEGRAL_MODE_P (mode))
4912	    return const0_rtx;
4913	  break;
4914
4915	default:
4916	  break;
4917	}
4918
4919      return 0;
4920    }
4921
4922  /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4923     as appropriate.  */
4924  switch (code)
4925    {
4926    case EQ:
4927      return equal ? const_true_rtx : const0_rtx;
4928    case NE:
4929      return ! equal ? const_true_rtx : const0_rtx;
4930    case LT:
4931      return op0lt ? const_true_rtx : const0_rtx;
4932    case GT:
4933      return op1lt ? const_true_rtx : const0_rtx;
4934    case LTU:
4935      return op0ltu ? const_true_rtx : const0_rtx;
4936    case GTU:
4937      return op1ltu ? const_true_rtx : const0_rtx;
4938    case LE:
4939      return equal || op0lt ? const_true_rtx : const0_rtx;
4940    case GE:
4941      return equal || op1lt ? const_true_rtx : const0_rtx;
4942    case LEU:
4943      return equal || op0ltu ? const_true_rtx : const0_rtx;
4944    case GEU:
4945      return equal || op1ltu ? const_true_rtx : const0_rtx;
4946    default:
4947      abort ();
4948    }
4949}
4950
4951/* Simplify CODE, an operation with result mode MODE and three operands,
4952   OP0, OP1, and OP2.  OP0_MODE was the mode of OP0 before it became
4953   a constant.  Return 0 if no simplifications is possible.  */
4954
4955rtx
4956simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4957     enum rtx_code code;
4958     enum machine_mode mode, op0_mode;
4959     rtx op0, op1, op2;
4960{
4961  int width = GET_MODE_BITSIZE (mode);
4962
4963  /* VOIDmode means "infinite" precision.  */
4964  if (width == 0)
4965    width = HOST_BITS_PER_WIDE_INT;
4966
4967  switch (code)
4968    {
4969    case SIGN_EXTRACT:
4970    case ZERO_EXTRACT:
4971      if (GET_CODE (op0) == CONST_INT
4972	  && GET_CODE (op1) == CONST_INT
4973	  && GET_CODE (op2) == CONST_INT
4974	  && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4975	  && width <= HOST_BITS_PER_WIDE_INT)
4976	{
4977	  /* Extracting a bit-field from a constant */
4978	  HOST_WIDE_INT val = INTVAL (op0);
4979
4980	  if (BITS_BIG_ENDIAN)
4981	    val >>= (GET_MODE_BITSIZE (op0_mode)
4982		     - INTVAL (op2) - INTVAL (op1));
4983	  else
4984	    val >>= INTVAL (op2);
4985
4986	  if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4987	    {
4988	      /* First zero-extend.  */
4989	      val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4990	      /* If desired, propagate sign bit.  */
4991	      if (code == SIGN_EXTRACT
4992		  && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4993		val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4994	    }
4995
4996	  /* Clear the bits that don't belong in our mode,
4997	     unless they and our sign bit are all one.
4998	     So we get either a reasonable negative value or a reasonable
4999	     unsigned value for this mode.  */
5000	  if (width < HOST_BITS_PER_WIDE_INT
5001	      && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
5002		  != ((HOST_WIDE_INT) (-1) << (width - 1))))
5003	    val &= ((HOST_WIDE_INT) 1 << width) - 1;
5004
5005	  return GEN_INT (val);
5006	}
5007      break;
5008
5009    case IF_THEN_ELSE:
5010      if (GET_CODE (op0) == CONST_INT)
5011	return op0 != const0_rtx ? op1 : op2;
5012
5013      /* Convert a == b ? b : a to "a".  */
5014      if (GET_CODE (op0) == NE && ! side_effects_p (op0)
5015	  && rtx_equal_p (XEXP (op0, 0), op1)
5016	  && rtx_equal_p (XEXP (op0, 1), op2))
5017	return op1;
5018      else if (GET_CODE (op0) == EQ && ! side_effects_p (op0)
5019	  && rtx_equal_p (XEXP (op0, 1), op1)
5020	  && rtx_equal_p (XEXP (op0, 0), op2))
5021	return op2;
5022      else if (GET_RTX_CLASS (GET_CODE (op0)) == '<' && ! side_effects_p (op0))
5023	{
5024	  rtx temp;
5025	  temp = simplify_relational_operation (GET_CODE (op0), op0_mode,
5026						XEXP (op0, 0), XEXP (op0, 1));
5027	  /* See if any simplifications were possible.  */
5028	  if (temp == const0_rtx)
5029	    return op2;
5030	  else if (temp == const1_rtx)
5031	    return op1;
5032	}
5033      break;
5034
5035    default:
5036      abort ();
5037    }
5038
5039  return 0;
5040}
5041
5042/* If X is a nontrivial arithmetic operation on an argument
5043   for which a constant value can be determined, return
5044   the result of operating on that value, as a constant.
5045   Otherwise, return X, possibly with one or more operands
5046   modified by recursive calls to this function.
5047
5048   If X is a register whose contents are known, we do NOT
5049   return those contents here.  equiv_constant is called to
5050   perform that task.
5051
5052   INSN is the insn that we may be modifying.  If it is 0, make a copy
5053   of X before modifying it.  */
5054
5055static rtx
5056fold_rtx (x, insn)
5057     rtx x;
5058     rtx insn;
5059{
5060  register enum rtx_code code;
5061  register enum machine_mode mode;
5062  register char *fmt;
5063  register int i;
5064  rtx new = 0;
5065  int copied = 0;
5066  int must_swap = 0;
5067
5068  /* Folded equivalents of first two operands of X.  */
5069  rtx folded_arg0;
5070  rtx folded_arg1;
5071
5072  /* Constant equivalents of first three operands of X;
5073     0 when no such equivalent is known.  */
5074  rtx const_arg0;
5075  rtx const_arg1;
5076  rtx const_arg2;
5077
5078  /* The mode of the first operand of X.  We need this for sign and zero
5079     extends.  */
5080  enum machine_mode mode_arg0;
5081
5082  if (x == 0)
5083    return x;
5084
5085  mode = GET_MODE (x);
5086  code = GET_CODE (x);
5087  switch (code)
5088    {
5089    case CONST:
5090    case CONST_INT:
5091    case CONST_DOUBLE:
5092    case SYMBOL_REF:
5093    case LABEL_REF:
5094    case REG:
5095      /* No use simplifying an EXPR_LIST
5096	 since they are used only for lists of args
5097	 in a function call's REG_EQUAL note.  */
5098    case EXPR_LIST:
5099      /* Changing anything inside an ADDRESSOF is incorrect; we don't
5100	 want to (e.g.,) make (addressof (const_int 0)) just because
5101	 the location is known to be zero.  */
5102    case ADDRESSOF:
5103      return x;
5104
5105#ifdef HAVE_cc0
5106    case CC0:
5107      return prev_insn_cc0;
5108#endif
5109
5110    case PC:
5111      /* If the next insn is a CODE_LABEL followed by a jump table,
5112	 PC's value is a LABEL_REF pointing to that label.  That
5113	 lets us fold switch statements on the Vax.  */
5114      if (insn && GET_CODE (insn) == JUMP_INSN)
5115	{
5116	  rtx next = next_nonnote_insn (insn);
5117
5118	  if (next && GET_CODE (next) == CODE_LABEL
5119	      && NEXT_INSN (next) != 0
5120	      && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
5121	      && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
5122		  || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
5123	    return gen_rtx_LABEL_REF (Pmode, next);
5124	}
5125      break;
5126
5127    case SUBREG:
5128      /* See if we previously assigned a constant value to this SUBREG.  */
5129      if ((new = lookup_as_function (x, CONST_INT)) != 0
5130	  || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
5131	return new;
5132
5133      /* If this is a paradoxical SUBREG, we have no idea what value the
5134	 extra bits would have.  However, if the operand is equivalent
5135	 to a SUBREG whose operand is the same as our mode, and all the
5136	 modes are within a word, we can just use the inner operand
5137	 because these SUBREGs just say how to treat the register.
5138
5139	 Similarly if we find an integer constant.  */
5140
5141      if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
5142	{
5143	  enum machine_mode imode = GET_MODE (SUBREG_REG (x));
5144	  struct table_elt *elt;
5145
5146	  if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
5147	      && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
5148	      && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
5149				imode)) != 0)
5150	    for (elt = elt->first_same_value;
5151		 elt; elt = elt->next_same_value)
5152	      {
5153		if (CONSTANT_P (elt->exp)
5154		    && GET_MODE (elt->exp) == VOIDmode)
5155		  return elt->exp;
5156
5157		if (GET_CODE (elt->exp) == SUBREG
5158		    && GET_MODE (SUBREG_REG (elt->exp)) == mode
5159		    && exp_equiv_p (elt->exp, elt->exp, 1, 0))
5160		  return copy_rtx (SUBREG_REG (elt->exp));
5161	    }
5162
5163	  return x;
5164	}
5165
5166      /* Fold SUBREG_REG.  If it changed, see if we can simplify the SUBREG.
5167	 We might be able to if the SUBREG is extracting a single word in an
5168	 integral mode or extracting the low part.  */
5169
5170      folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
5171      const_arg0 = equiv_constant (folded_arg0);
5172      if (const_arg0)
5173	folded_arg0 = const_arg0;
5174
5175      if (folded_arg0 != SUBREG_REG (x))
5176	{
5177	  new = 0;
5178
5179	  if (GET_MODE_CLASS (mode) == MODE_INT
5180	      && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5181	      && GET_MODE (SUBREG_REG (x)) != VOIDmode)
5182	    new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
5183				   GET_MODE (SUBREG_REG (x)));
5184	  if (new == 0 && subreg_lowpart_p (x))
5185	    new = gen_lowpart_if_possible (mode, folded_arg0);
5186	  if (new)
5187	    return new;
5188	}
5189
5190      /* If this is a narrowing SUBREG and our operand is a REG, see if
5191	 we can find an equivalence for REG that is an arithmetic operation
5192	 in a wider mode where both operands are paradoxical SUBREGs
5193	 from objects of our result mode.  In that case, we couldn't report
5194	 an equivalent value for that operation, since we don't know what the
5195	 extra bits will be.  But we can find an equivalence for this SUBREG
5196	 by folding that operation is the narrow mode.  This allows us to
5197	 fold arithmetic in narrow modes when the machine only supports
5198	 word-sized arithmetic.
5199
5200	 Also look for a case where we have a SUBREG whose operand is the
5201	 same as our result.  If both modes are smaller than a word, we
5202	 are simply interpreting a register in different modes and we
5203	 can use the inner value.  */
5204
5205      if (GET_CODE (folded_arg0) == REG
5206	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
5207	  && subreg_lowpart_p (x))
5208	{
5209	  struct table_elt *elt;
5210
5211	  /* We can use HASH here since we know that canon_hash won't be
5212	     called.  */
5213	  elt = lookup (folded_arg0,
5214			HASH (folded_arg0, GET_MODE (folded_arg0)),
5215			GET_MODE (folded_arg0));
5216
5217	  if (elt)
5218	    elt = elt->first_same_value;
5219
5220	  for (; elt; elt = elt->next_same_value)
5221	    {
5222	      enum rtx_code eltcode = GET_CODE (elt->exp);
5223
5224	      /* Just check for unary and binary operations.  */
5225	      if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
5226		  && GET_CODE (elt->exp) != SIGN_EXTEND
5227		  && GET_CODE (elt->exp) != ZERO_EXTEND
5228		  && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5229		  && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
5230		{
5231		  rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
5232
5233		  if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
5234		    op0 = fold_rtx (op0, NULL_RTX);
5235
5236		  op0 = equiv_constant (op0);
5237		  if (op0)
5238		    new = simplify_unary_operation (GET_CODE (elt->exp), mode,
5239						    op0, mode);
5240		}
5241	      else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
5242			|| GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
5243		       && eltcode != DIV && eltcode != MOD
5244		       && eltcode != UDIV && eltcode != UMOD
5245		       && eltcode != ASHIFTRT && eltcode != LSHIFTRT
5246		       && eltcode != ROTATE && eltcode != ROTATERT
5247		       && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5248			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
5249				== mode))
5250			   || CONSTANT_P (XEXP (elt->exp, 0)))
5251		       && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
5252			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
5253				== mode))
5254			   || CONSTANT_P (XEXP (elt->exp, 1))))
5255		{
5256		  rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
5257		  rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
5258
5259		  if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
5260		    op0 = fold_rtx (op0, NULL_RTX);
5261
5262		  if (op0)
5263		    op0 = equiv_constant (op0);
5264
5265		  if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
5266		    op1 = fold_rtx (op1, NULL_RTX);
5267
5268		  if (op1)
5269		    op1 = equiv_constant (op1);
5270
5271		  /* If we are looking for the low SImode part of
5272		     (ashift:DI c (const_int 32)), it doesn't work
5273		     to compute that in SImode, because a 32-bit shift
5274		     in SImode is unpredictable.  We know the value is 0.  */
5275		  if (op0 && op1
5276		      && GET_CODE (elt->exp) == ASHIFT
5277		      && GET_CODE (op1) == CONST_INT
5278		      && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
5279		    {
5280		      if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
5281
5282			/* If the count fits in the inner mode's width,
5283			   but exceeds the outer mode's width,
5284			   the value will get truncated to 0
5285			   by the subreg.  */
5286			new = const0_rtx;
5287		      else
5288			/* If the count exceeds even the inner mode's width,
5289			   don't fold this expression.  */
5290			new = 0;
5291		    }
5292		  else if (op0 && op1)
5293		    new = simplify_binary_operation (GET_CODE (elt->exp), mode,
5294						     op0, op1);
5295		}
5296
5297	      else if (GET_CODE (elt->exp) == SUBREG
5298		       && GET_MODE (SUBREG_REG (elt->exp)) == mode
5299		       && (GET_MODE_SIZE (GET_MODE (folded_arg0))
5300			   <= UNITS_PER_WORD)
5301		       && exp_equiv_p (elt->exp, elt->exp, 1, 0))
5302		new = copy_rtx (SUBREG_REG (elt->exp));
5303
5304	      if (new)
5305		return new;
5306	    }
5307	}
5308
5309      return x;
5310
5311    case NOT:
5312    case NEG:
5313      /* If we have (NOT Y), see if Y is known to be (NOT Z).
5314	 If so, (NOT Y) simplifies to Z.  Similarly for NEG.  */
5315      new = lookup_as_function (XEXP (x, 0), code);
5316      if (new)
5317	return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
5318      break;
5319
5320    case MEM:
5321      /* If we are not actually processing an insn, don't try to find the
5322	 best address.  Not only don't we care, but we could modify the
5323	 MEM in an invalid way since we have no insn to validate against.  */
5324      if (insn != 0)
5325	find_best_addr (insn, &XEXP (x, 0));
5326
5327      {
5328	/* Even if we don't fold in the insn itself,
5329	   we can safely do so here, in hopes of getting a constant.  */
5330	rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
5331	rtx base = 0;
5332	HOST_WIDE_INT offset = 0;
5333
5334	if (GET_CODE (addr) == REG
5335	    && REGNO_QTY_VALID_P (REGNO (addr))
5336	    && GET_MODE (addr) == qty_mode[REG_QTY (REGNO (addr))]
5337	    && qty_const[REG_QTY (REGNO (addr))] != 0)
5338	  addr = qty_const[REG_QTY (REGNO (addr))];
5339
5340	/* If address is constant, split it into a base and integer offset.  */
5341	if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
5342	  base = addr;
5343	else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
5344		 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
5345	  {
5346	    base = XEXP (XEXP (addr, 0), 0);
5347	    offset = INTVAL (XEXP (XEXP (addr, 0), 1));
5348	  }
5349	else if (GET_CODE (addr) == LO_SUM
5350		 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
5351	  base = XEXP (addr, 1);
5352	else if (GET_CODE (addr) == ADDRESSOF)
5353	  return change_address (x, VOIDmode, addr);
5354
5355	/* If this is a constant pool reference, we can fold it into its
5356	   constant to allow better value tracking.  */
5357	if (base && GET_CODE (base) == SYMBOL_REF
5358	    && CONSTANT_POOL_ADDRESS_P (base))
5359	  {
5360	    rtx constant = get_pool_constant (base);
5361	    enum machine_mode const_mode = get_pool_mode (base);
5362	    rtx new;
5363
5364	    if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
5365	      constant_pool_entries_cost = COST (constant);
5366
5367	    /* If we are loading the full constant, we have an equivalence.  */
5368	    if (offset == 0 && mode == const_mode)
5369	      return constant;
5370
5371	    /* If this actually isn't a constant (weird!), we can't do
5372	       anything.  Otherwise, handle the two most common cases:
5373	       extracting a word from a multi-word constant, and extracting
5374	       the low-order bits.  Other cases don't seem common enough to
5375	       worry about.  */
5376	    if (! CONSTANT_P (constant))
5377	      return x;
5378
5379	    if (GET_MODE_CLASS (mode) == MODE_INT
5380		&& GET_MODE_SIZE (mode) == UNITS_PER_WORD
5381		&& offset % UNITS_PER_WORD == 0
5382		&& (new = operand_subword (constant,
5383					   offset / UNITS_PER_WORD,
5384					   0, const_mode)) != 0)
5385	      return new;
5386
5387	    if (((BYTES_BIG_ENDIAN
5388		  && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5389		 || (! BYTES_BIG_ENDIAN && offset == 0))
5390		&& (new = gen_lowpart_if_possible (mode, constant)) != 0)
5391	      return new;
5392	  }
5393
5394	/* If this is a reference to a label at a known position in a jump
5395	   table, we also know its value.  */
5396	if (base && GET_CODE (base) == LABEL_REF)
5397	  {
5398	    rtx label = XEXP (base, 0);
5399	    rtx table_insn = NEXT_INSN (label);
5400
5401	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5402		&& GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5403	      {
5404		rtx table = PATTERN (table_insn);
5405
5406		if (offset >= 0
5407		    && (offset / GET_MODE_SIZE (GET_MODE (table))
5408			< XVECLEN (table, 0)))
5409		  return XVECEXP (table, 0,
5410				  offset / GET_MODE_SIZE (GET_MODE (table)));
5411	      }
5412	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5413		&& GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5414	      {
5415		rtx table = PATTERN (table_insn);
5416
5417		if (offset >= 0
5418		    && (offset / GET_MODE_SIZE (GET_MODE (table))
5419			< XVECLEN (table, 1)))
5420		  {
5421		    offset /= GET_MODE_SIZE (GET_MODE (table));
5422		    new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
5423					 XEXP (table, 0));
5424
5425		    if (GET_MODE (table) != Pmode)
5426		      new = gen_rtx_TRUNCATE (GET_MODE (table), new);
5427
5428		    /* Indicate this is a constant.  This isn't a
5429		       valid form of CONST, but it will only be used
5430		       to fold the next insns and then discarded, so
5431		       it should be safe.
5432
5433		       Note this expression must be explicitly discarded,
5434		       by cse_insn, else it may end up in a REG_EQUAL note
5435		       and "escape" to cause problems elsewhere.  */
5436		    return gen_rtx_CONST (GET_MODE (new), new);
5437		  }
5438	      }
5439	  }
5440
5441	return x;
5442      }
5443
5444    case ASM_OPERANDS:
5445      for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
5446	validate_change (insn, &XVECEXP (x, 3, i),
5447			 fold_rtx (XVECEXP (x, 3, i), insn), 0);
5448      break;
5449
5450    default:
5451      break;
5452    }
5453
5454  const_arg0 = 0;
5455  const_arg1 = 0;
5456  const_arg2 = 0;
5457  mode_arg0 = VOIDmode;
5458
5459  /* Try folding our operands.
5460     Then see which ones have constant values known.  */
5461
5462  fmt = GET_RTX_FORMAT (code);
5463  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5464    if (fmt[i] == 'e')
5465      {
5466	rtx arg = XEXP (x, i);
5467	rtx folded_arg = arg, const_arg = 0;
5468	enum machine_mode mode_arg = GET_MODE (arg);
5469	rtx cheap_arg, expensive_arg;
5470	rtx replacements[2];
5471	int j;
5472
5473	/* Most arguments are cheap, so handle them specially.  */
5474	switch (GET_CODE (arg))
5475	  {
5476	  case REG:
5477	    /* This is the same as calling equiv_constant; it is duplicated
5478	       here for speed.  */
5479	    if (REGNO_QTY_VALID_P (REGNO (arg))
5480		&& qty_const[REG_QTY (REGNO (arg))] != 0
5481		&& GET_CODE (qty_const[REG_QTY (REGNO (arg))]) != REG
5482		&& GET_CODE (qty_const[REG_QTY (REGNO (arg))]) != PLUS)
5483	      const_arg
5484		= gen_lowpart_if_possible (GET_MODE (arg),
5485					   qty_const[REG_QTY (REGNO (arg))]);
5486	    break;
5487
5488	  case CONST:
5489	  case CONST_INT:
5490	  case SYMBOL_REF:
5491	  case LABEL_REF:
5492	  case CONST_DOUBLE:
5493	    const_arg = arg;
5494	    break;
5495
5496#ifdef HAVE_cc0
5497	  case CC0:
5498	    folded_arg = prev_insn_cc0;
5499	    mode_arg = prev_insn_cc0_mode;
5500	    const_arg = equiv_constant (folded_arg);
5501	    break;
5502#endif
5503
5504	  default:
5505	    folded_arg = fold_rtx (arg, insn);
5506	    const_arg = equiv_constant (folded_arg);
5507	  }
5508
5509	/* For the first three operands, see if the operand
5510	   is constant or equivalent to a constant.  */
5511	switch (i)
5512	  {
5513	  case 0:
5514	    folded_arg0 = folded_arg;
5515	    const_arg0 = const_arg;
5516	    mode_arg0 = mode_arg;
5517	    break;
5518	  case 1:
5519	    folded_arg1 = folded_arg;
5520	    const_arg1 = const_arg;
5521	    break;
5522	  case 2:
5523	    const_arg2 = const_arg;
5524	    break;
5525	  }
5526
5527	/* Pick the least expensive of the folded argument and an
5528	   equivalent constant argument.  */
5529	if (const_arg == 0 || const_arg == folded_arg
5530	    || COST (const_arg) > COST (folded_arg))
5531	  cheap_arg = folded_arg, expensive_arg = const_arg;
5532	else
5533	  cheap_arg = const_arg, expensive_arg = folded_arg;
5534
5535	/* Try to replace the operand with the cheapest of the two
5536	   possibilities.  If it doesn't work and this is either of the first
5537	   two operands of a commutative operation, try swapping them.
5538	   If THAT fails, try the more expensive, provided it is cheaper
5539	   than what is already there.  */
5540
5541	if (cheap_arg == XEXP (x, i))
5542	  continue;
5543
5544	if (insn == 0 && ! copied)
5545	  {
5546	    x = copy_rtx (x);
5547	    copied = 1;
5548	  }
5549
5550	replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5551	for (j = 0;
5552	     j < 2 && replacements[j]
5553	     && COST (replacements[j]) < COST (XEXP (x, i));
5554	     j++)
5555	  {
5556	    if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5557	      break;
5558
5559	    if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5560	      {
5561		validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5562		validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5563
5564		if (apply_change_group ())
5565		  {
5566		    /* Swap them back to be invalid so that this loop can
5567		       continue and flag them to be swapped back later.  */
5568		    rtx tem;
5569
5570		    tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5571				       XEXP (x, 1) = tem;
5572		    must_swap = 1;
5573		    break;
5574		  }
5575	      }
5576	  }
5577      }
5578
5579    else
5580      {
5581	if (fmt[i] == 'E')
5582	  /* Don't try to fold inside of a vector of expressions.
5583	     Doing nothing is harmless.  */
5584	  {;}
5585      }
5586
5587  /* If a commutative operation, place a constant integer as the second
5588     operand unless the first operand is also a constant integer.  Otherwise,
5589     place any constant second unless the first operand is also a constant.  */
5590
5591  if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5592    {
5593      if (must_swap || (const_arg0
5594	  		&& (const_arg1 == 0
5595	      		    || (GET_CODE (const_arg0) == CONST_INT
5596			        && GET_CODE (const_arg1) != CONST_INT))))
5597	{
5598	  register rtx tem = XEXP (x, 0);
5599
5600	  if (insn == 0 && ! copied)
5601	    {
5602	      x = copy_rtx (x);
5603	      copied = 1;
5604	    }
5605
5606	  validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5607	  validate_change (insn, &XEXP (x, 1), tem, 1);
5608	  if (apply_change_group ())
5609	    {
5610	      tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5611	      tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5612	    }
5613	}
5614    }
5615
5616  /* If X is an arithmetic operation, see if we can simplify it.  */
5617
5618  switch (GET_RTX_CLASS (code))
5619    {
5620    case '1':
5621      {
5622	int is_const = 0;
5623
5624	/* We can't simplify extension ops unless we know the
5625	   original mode.  */
5626	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5627	    && mode_arg0 == VOIDmode)
5628	  break;
5629
5630	/* If we had a CONST, strip it off and put it back later if we
5631	   fold.  */
5632	if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5633	  is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5634
5635	new = simplify_unary_operation (code, mode,
5636					const_arg0 ? const_arg0 : folded_arg0,
5637					mode_arg0);
5638	if (new != 0 && is_const)
5639	  new = gen_rtx_CONST (mode, new);
5640      }
5641      break;
5642
5643    case '<':
5644      /* See what items are actually being compared and set FOLDED_ARG[01]
5645	 to those values and CODE to the actual comparison code.  If any are
5646	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
5647	 do anything if both operands are already known to be constant.  */
5648
5649      if (const_arg0 == 0 || const_arg1 == 0)
5650	{
5651	  struct table_elt *p0, *p1;
5652	  rtx true = const_true_rtx, false = const0_rtx;
5653	  enum machine_mode mode_arg1;
5654
5655#ifdef FLOAT_STORE_FLAG_VALUE
5656	  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5657	    {
5658	      true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5659						   mode);
5660	      false = CONST0_RTX (mode);
5661	    }
5662#endif
5663
5664	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5665				       &mode_arg0, &mode_arg1);
5666	  const_arg0 = equiv_constant (folded_arg0);
5667	  const_arg1 = equiv_constant (folded_arg1);
5668
5669	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5670	     what kinds of things are being compared, so we can't do
5671	     anything with this comparison.  */
5672
5673	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5674	    break;
5675
5676	  /* If we do not now have two constants being compared, see
5677	     if we can nevertheless deduce some things about the
5678	     comparison.  */
5679	  if (const_arg0 == 0 || const_arg1 == 0)
5680	    {
5681	      /* Is FOLDED_ARG0 frame-pointer plus a constant?  Or
5682		 non-explicit constant?  These aren't zero, but we
5683		 don't know their sign.  */
5684	      if (const_arg1 == const0_rtx
5685		  && (NONZERO_BASE_PLUS_P (folded_arg0)
5686#if 0  /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5687	  come out as 0.  */
5688		      || GET_CODE (folded_arg0) == SYMBOL_REF
5689#endif
5690		      || GET_CODE (folded_arg0) == LABEL_REF
5691		      || GET_CODE (folded_arg0) == CONST))
5692		{
5693		  if (code == EQ)
5694		    return false;
5695		  else if (code == NE)
5696		    return true;
5697		}
5698
5699	      /* See if the two operands are the same.  We don't do this
5700		 for IEEE floating-point since we can't assume x == x
5701		 since x might be a NaN.  */
5702
5703	      if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5704		   || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
5705		  && (folded_arg0 == folded_arg1
5706		      || (GET_CODE (folded_arg0) == REG
5707			  && GET_CODE (folded_arg1) == REG
5708			  && (REG_QTY (REGNO (folded_arg0))
5709			      == REG_QTY (REGNO (folded_arg1))))
5710		      || ((p0 = lookup (folded_arg0,
5711					(safe_hash (folded_arg0, mode_arg0)
5712					 % NBUCKETS), mode_arg0))
5713			  && (p1 = lookup (folded_arg1,
5714					   (safe_hash (folded_arg1, mode_arg0)
5715					    % NBUCKETS), mode_arg0))
5716			  && p0->first_same_value == p1->first_same_value)))
5717		return ((code == EQ || code == LE || code == GE
5718			 || code == LEU || code == GEU)
5719			? true : false);
5720
5721	      /* If FOLDED_ARG0 is a register, see if the comparison we are
5722		 doing now is either the same as we did before or the reverse
5723		 (we only check the reverse if not floating-point).  */
5724	      else if (GET_CODE (folded_arg0) == REG)
5725		{
5726		  int qty = REG_QTY (REGNO (folded_arg0));
5727
5728		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5729		      && (comparison_dominates_p (qty_comparison_code[qty], code)
5730			  || (comparison_dominates_p (qty_comparison_code[qty],
5731						      reverse_condition (code))
5732			      && ! FLOAT_MODE_P (mode_arg0)))
5733		      && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5734			  || (const_arg1
5735			      && rtx_equal_p (qty_comparison_const[qty],
5736					      const_arg1))
5737			  || (GET_CODE (folded_arg1) == REG
5738			      && (REG_QTY (REGNO (folded_arg1))
5739				  == qty_comparison_qty[qty]))))
5740		    return (comparison_dominates_p (qty_comparison_code[qty],
5741						    code)
5742			    ? true : false);
5743		}
5744	    }
5745	}
5746
5747      /* If we are comparing against zero, see if the first operand is
5748	 equivalent to an IOR with a constant.  If so, we may be able to
5749	 determine the result of this comparison.  */
5750
5751      if (const_arg1 == const0_rtx)
5752	{
5753	  rtx y = lookup_as_function (folded_arg0, IOR);
5754	  rtx inner_const;
5755
5756	  if (y != 0
5757	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5758	      && GET_CODE (inner_const) == CONST_INT
5759	      && INTVAL (inner_const) != 0)
5760	    {
5761	      int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5762	      int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5763			      && (INTVAL (inner_const)
5764				  & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5765	      rtx true = const_true_rtx, false = const0_rtx;
5766
5767#ifdef FLOAT_STORE_FLAG_VALUE
5768	      if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5769		{
5770		  true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5771						       mode);
5772		  false = CONST0_RTX (mode);
5773		}
5774#endif
5775
5776	      switch (code)
5777		{
5778		case EQ:
5779		  return false;
5780		case NE:
5781		  return true;
5782		case LT:  case LE:
5783		  if (has_sign)
5784		    return true;
5785		  break;
5786		case GT:  case GE:
5787		  if (has_sign)
5788		    return false;
5789		  break;
5790		default:
5791		  break;
5792		}
5793	    }
5794	}
5795
5796      new = simplify_relational_operation (code, mode_arg0,
5797					   const_arg0 ? const_arg0 : folded_arg0,
5798					   const_arg1 ? const_arg1 : folded_arg1);
5799#ifdef FLOAT_STORE_FLAG_VALUE
5800      if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5801	new = ((new == const0_rtx) ? CONST0_RTX (mode)
5802	       : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
5803#endif
5804      break;
5805
5806    case '2':
5807    case 'c':
5808      switch (code)
5809	{
5810	case PLUS:
5811	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
5812	     with that LABEL_REF as its second operand.  If so, the result is
5813	     the first operand of that MINUS.  This handles switches with an
5814	     ADDR_DIFF_VEC table.  */
5815	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5816	    {
5817	      rtx y
5818		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
5819		  : lookup_as_function (folded_arg0, MINUS);
5820
5821	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5822		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5823		return XEXP (y, 0);
5824
5825	      /* Now try for a CONST of a MINUS like the above.  */
5826	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5827			: lookup_as_function (folded_arg0, CONST))) != 0
5828		  && GET_CODE (XEXP (y, 0)) == MINUS
5829		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5830		  && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5831		return XEXP (XEXP (y, 0), 0);
5832	    }
5833
5834	  /* Likewise if the operands are in the other order.  */
5835	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5836	    {
5837	      rtx y
5838		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
5839		  : lookup_as_function (folded_arg1, MINUS);
5840
5841	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5842		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5843		return XEXP (y, 0);
5844
5845	      /* Now try for a CONST of a MINUS like the above.  */
5846	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5847			: lookup_as_function (folded_arg1, CONST))) != 0
5848		  && GET_CODE (XEXP (y, 0)) == MINUS
5849		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5850		  && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5851		return XEXP (XEXP (y, 0), 0);
5852	    }
5853
5854	  /* If second operand is a register equivalent to a negative
5855	     CONST_INT, see if we can find a register equivalent to the
5856	     positive constant.  Make a MINUS if so.  Don't do this for
5857	     a non-negative constant since we might then alternate between
5858	     chosing positive and negative constants.  Having the positive
5859	     constant previously-used is the more common case.  Be sure
5860	     the resulting constant is non-negative; if const_arg1 were
5861	     the smallest negative number this would overflow: depending
5862	     on the mode, this would either just be the same value (and
5863	     hence not save anything) or be incorrect.  */
5864	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
5865	      && INTVAL (const_arg1) < 0
5866	      /* This used to test
5867
5868	         - INTVAL (const_arg1) >= 0
5869
5870		 But The Sun V5.0 compilers mis-compiled that test.  So
5871		 instead we test for the problematic value in a more direct
5872		 manner and hope the Sun compilers get it correct.  */
5873	      && INTVAL (const_arg1) !=
5874	        ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
5875	      && GET_CODE (folded_arg1) == REG)
5876	    {
5877	      rtx new_const = GEN_INT (- INTVAL (const_arg1));
5878	      struct table_elt *p
5879		= lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5880			  mode);
5881
5882	      if (p)
5883		for (p = p->first_same_value; p; p = p->next_same_value)
5884		  if (GET_CODE (p->exp) == REG)
5885		    return cse_gen_binary (MINUS, mode, folded_arg0,
5886					   canon_reg (p->exp, NULL_RTX));
5887	    }
5888	  goto from_plus;
5889
5890	case MINUS:
5891	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5892	     If so, produce (PLUS Z C2-C).  */
5893	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5894	    {
5895	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5896	      if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5897		return fold_rtx (plus_constant (copy_rtx (y),
5898						-INTVAL (const_arg1)),
5899				 NULL_RTX);
5900	    }
5901
5902	  /* ... fall through ...  */
5903
5904	from_plus:
5905	case SMIN:    case SMAX:      case UMIN:    case UMAX:
5906	case IOR:     case AND:       case XOR:
5907	case MULT:    case DIV:       case UDIV:
5908	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
5909	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5910	     is known to be of similar form, we may be able to replace the
5911	     operation with a combined operation.  This may eliminate the
5912	     intermediate operation if every use is simplified in this way.
5913	     Note that the similar optimization done by combine.c only works
5914	     if the intermediate operation's result has only one reference.  */
5915
5916	  if (GET_CODE (folded_arg0) == REG
5917	      && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5918	    {
5919	      int is_shift
5920		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5921	      rtx y = lookup_as_function (folded_arg0, code);
5922	      rtx inner_const;
5923	      enum rtx_code associate_code;
5924	      rtx new_const;
5925
5926	      if (y == 0
5927		  || 0 == (inner_const
5928			   = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5929		  || GET_CODE (inner_const) != CONST_INT
5930		  /* If we have compiled a statement like
5931		     "if (x == (x & mask1))", and now are looking at
5932		     "x & mask2", we will have a case where the first operand
5933		     of Y is the same as our first operand.  Unless we detect
5934		     this case, an infinite loop will result.  */
5935		  || XEXP (y, 0) == folded_arg0)
5936		break;
5937
5938	      /* Don't associate these operations if they are a PLUS with the
5939		 same constant and it is a power of two.  These might be doable
5940		 with a pre- or post-increment.  Similarly for two subtracts of
5941		 identical powers of two with post decrement.  */
5942
5943	      if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5944		  && ((HAVE_PRE_INCREMENT
5945			  && exact_log2 (INTVAL (const_arg1)) >= 0)
5946		      || (HAVE_POST_INCREMENT
5947			  && exact_log2 (INTVAL (const_arg1)) >= 0)
5948		      || (HAVE_PRE_DECREMENT
5949			  && exact_log2 (- INTVAL (const_arg1)) >= 0)
5950		      || (HAVE_POST_DECREMENT
5951			  && exact_log2 (- INTVAL (const_arg1)) >= 0)))
5952		break;
5953
5954	      /* Compute the code used to compose the constants.  For example,
5955		 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT.  */
5956
5957	      associate_code
5958		= (code == MULT || code == DIV || code == UDIV ? MULT
5959		   : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5960
5961	      new_const = simplify_binary_operation (associate_code, mode,
5962						     const_arg1, inner_const);
5963
5964	      if (new_const == 0)
5965		break;
5966
5967	      /* If we are associating shift operations, don't let this
5968		 produce a shift of the size of the object or larger.
5969		 This could occur when we follow a sign-extend by a right
5970		 shift on a machine that does a sign-extend as a pair
5971		 of shifts.  */
5972
5973	      if (is_shift && GET_CODE (new_const) == CONST_INT
5974		  && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5975		{
5976		  /* As an exception, we can turn an ASHIFTRT of this
5977		     form into a shift of the number of bits - 1.  */
5978		  if (code == ASHIFTRT)
5979		    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5980		  else
5981		    break;
5982		}
5983
5984	      y = copy_rtx (XEXP (y, 0));
5985
5986	      /* If Y contains our first operand (the most common way this
5987		 can happen is if Y is a MEM), we would do into an infinite
5988		 loop if we tried to fold it.  So don't in that case.  */
5989
5990	      if (! reg_mentioned_p (folded_arg0, y))
5991		y = fold_rtx (y, insn);
5992
5993	      return cse_gen_binary (code, mode, y, new_const);
5994	    }
5995	  break;
5996
5997	default:
5998	  break;
5999	}
6000
6001      new = simplify_binary_operation (code, mode,
6002				       const_arg0 ? const_arg0 : folded_arg0,
6003				       const_arg1 ? const_arg1 : folded_arg1);
6004      break;
6005
6006    case 'o':
6007      /* (lo_sum (high X) X) is simply X.  */
6008      if (code == LO_SUM && const_arg0 != 0
6009	  && GET_CODE (const_arg0) == HIGH
6010	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
6011	return const_arg1;
6012      break;
6013
6014    case '3':
6015    case 'b':
6016      new = simplify_ternary_operation (code, mode, mode_arg0,
6017					const_arg0 ? const_arg0 : folded_arg0,
6018					const_arg1 ? const_arg1 : folded_arg1,
6019					const_arg2 ? const_arg2 : XEXP (x, 2));
6020      break;
6021
6022    case 'x':
6023      /* Always eliminate CONSTANT_P_RTX at this stage. */
6024      if (code == CONSTANT_P_RTX)
6025	return (const_arg0 ? const1_rtx : const0_rtx);
6026      break;
6027    }
6028
6029  return new ? new : x;
6030}
6031
6032/* Return a constant value currently equivalent to X.
6033   Return 0 if we don't know one.  */
6034
6035static rtx
6036equiv_constant (x)
6037     rtx x;
6038{
6039  if (GET_CODE (x) == REG
6040      && REGNO_QTY_VALID_P (REGNO (x))
6041      && qty_const[REG_QTY (REGNO (x))])
6042    x = gen_lowpart_if_possible (GET_MODE (x), qty_const[REG_QTY (REGNO (x))]);
6043
6044  if (x == 0 || CONSTANT_P (x))
6045    return x;
6046
6047  /* If X is a MEM, try to fold it outside the context of any insn to see if
6048     it might be equivalent to a constant.  That handles the case where it
6049     is a constant-pool reference.  Then try to look it up in the hash table
6050     in case it is something whose value we have seen before.  */
6051
6052  if (GET_CODE (x) == MEM)
6053    {
6054      struct table_elt *elt;
6055
6056      x = fold_rtx (x, NULL_RTX);
6057      if (CONSTANT_P (x))
6058	return x;
6059
6060      elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
6061      if (elt == 0)
6062	return 0;
6063
6064      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
6065	if (elt->is_const && CONSTANT_P (elt->exp))
6066	  return elt->exp;
6067    }
6068
6069  return 0;
6070}
6071
6072/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
6073   number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
6074   least-significant part of X.
6075   MODE specifies how big a part of X to return.
6076
6077   If the requested operation cannot be done, 0 is returned.
6078
6079   This is similar to gen_lowpart in emit-rtl.c.  */
6080
6081rtx
6082gen_lowpart_if_possible (mode, x)
6083     enum machine_mode mode;
6084     register rtx x;
6085{
6086  rtx result = gen_lowpart_common (mode, x);
6087
6088  if (result)
6089    return result;
6090  else if (GET_CODE (x) == MEM)
6091    {
6092      /* This is the only other case we handle.  */
6093      register int offset = 0;
6094      rtx new;
6095
6096      if (WORDS_BIG_ENDIAN)
6097	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
6098		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
6099      if (BYTES_BIG_ENDIAN)
6100	/* Adjust the address so that the address-after-the-data is
6101	   unchanged.  */
6102	offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
6103		   - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
6104      new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
6105      if (! memory_address_p (mode, XEXP (new, 0)))
6106	return 0;
6107      RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
6108      MEM_COPY_ATTRIBUTES (new, x);
6109      return new;
6110    }
6111  else
6112    return 0;
6113}
6114
6115/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
6116   branch.  It will be zero if not.
6117
6118   In certain cases, this can cause us to add an equivalence.  For example,
6119   if we are following the taken case of
6120   	if (i == 2)
6121   we can add the fact that `i' and '2' are now equivalent.
6122
6123   In any case, we can record that this comparison was passed.  If the same
6124   comparison is seen later, we will know its value.  */
6125
6126static void
6127record_jump_equiv (insn, taken)
6128     rtx insn;
6129     int taken;
6130{
6131  int cond_known_true;
6132  rtx op0, op1;
6133  enum machine_mode mode, mode0, mode1;
6134  int reversed_nonequality = 0;
6135  enum rtx_code code;
6136
6137  /* Ensure this is the right kind of insn.  */
6138  if (! condjump_p (insn) || simplejump_p (insn))
6139    return;
6140
6141  /* See if this jump condition is known true or false.  */
6142  if (taken)
6143    cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
6144  else
6145    cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
6146
6147  /* Get the type of comparison being done and the operands being compared.
6148     If we had to reverse a non-equality condition, record that fact so we
6149     know that it isn't valid for floating-point.  */
6150  code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
6151  op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
6152  op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
6153
6154  code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
6155  if (! cond_known_true)
6156    {
6157      reversed_nonequality = (code != EQ && code != NE);
6158      code = reverse_condition (code);
6159    }
6160
6161  /* The mode is the mode of the non-constant.  */
6162  mode = mode0;
6163  if (mode1 != VOIDmode)
6164    mode = mode1;
6165
6166  record_jump_cond (code, mode, op0, op1, reversed_nonequality);
6167}
6168
6169/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
6170   REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
6171   Make any useful entries we can with that information.  Called from
6172   above function and called recursively.  */
6173
6174static void
6175record_jump_cond (code, mode, op0, op1, reversed_nonequality)
6176     enum rtx_code code;
6177     enum machine_mode mode;
6178     rtx op0, op1;
6179     int reversed_nonequality;
6180{
6181  unsigned op0_hash, op1_hash;
6182  int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
6183  struct table_elt *op0_elt, *op1_elt;
6184
6185  /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
6186     we know that they are also equal in the smaller mode (this is also
6187     true for all smaller modes whether or not there is a SUBREG, but
6188     is not worth testing for with no SUBREG).  */
6189
6190  /* Note that GET_MODE (op0) may not equal MODE.  */
6191  if (code == EQ && GET_CODE (op0) == SUBREG
6192      && (GET_MODE_SIZE (GET_MODE (op0))
6193	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
6194    {
6195      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6196      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6197
6198      record_jump_cond (code, mode, SUBREG_REG (op0),
6199			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
6200			reversed_nonequality);
6201    }
6202
6203  if (code == EQ && GET_CODE (op1) == SUBREG
6204      && (GET_MODE_SIZE (GET_MODE (op1))
6205	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
6206    {
6207      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6208      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6209
6210      record_jump_cond (code, mode, SUBREG_REG (op1),
6211			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
6212			reversed_nonequality);
6213    }
6214
6215  /* Similarly, if this is an NE comparison, and either is a SUBREG
6216     making a smaller mode, we know the whole thing is also NE.  */
6217
6218  /* Note that GET_MODE (op0) may not equal MODE;
6219     if we test MODE instead, we can get an infinite recursion
6220     alternating between two modes each wider than MODE.  */
6221
6222  if (code == NE && GET_CODE (op0) == SUBREG
6223      && subreg_lowpart_p (op0)
6224      && (GET_MODE_SIZE (GET_MODE (op0))
6225	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
6226    {
6227      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6228      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6229
6230      record_jump_cond (code, mode, SUBREG_REG (op0),
6231			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
6232			reversed_nonequality);
6233    }
6234
6235  if (code == NE && GET_CODE (op1) == SUBREG
6236      && subreg_lowpart_p (op1)
6237      && (GET_MODE_SIZE (GET_MODE (op1))
6238	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
6239    {
6240      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6241      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6242
6243      record_jump_cond (code, mode, SUBREG_REG (op1),
6244			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
6245			reversed_nonequality);
6246    }
6247
6248  /* Hash both operands.  */
6249
6250  do_not_record = 0;
6251  hash_arg_in_memory = 0;
6252  hash_arg_in_struct = 0;
6253  op0_hash = HASH (op0, mode);
6254  op0_in_memory = hash_arg_in_memory;
6255  op0_in_struct = hash_arg_in_struct;
6256
6257  if (do_not_record)
6258    return;
6259
6260  do_not_record = 0;
6261  hash_arg_in_memory = 0;
6262  hash_arg_in_struct = 0;
6263  op1_hash = HASH (op1, mode);
6264  op1_in_memory = hash_arg_in_memory;
6265  op1_in_struct = hash_arg_in_struct;
6266
6267  if (do_not_record)
6268    return;
6269
6270  /* Look up both operands.  */
6271  op0_elt = lookup (op0, op0_hash, mode);
6272  op1_elt = lookup (op1, op1_hash, mode);
6273
6274  /* If both operands are already equivalent or if they are not in the
6275     table but are identical, do nothing.  */
6276  if ((op0_elt != 0 && op1_elt != 0
6277       && op0_elt->first_same_value == op1_elt->first_same_value)
6278      || op0 == op1 || rtx_equal_p (op0, op1))
6279    return;
6280
6281  /* If we aren't setting two things equal all we can do is save this
6282     comparison.   Similarly if this is floating-point.  In the latter
6283     case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
6284     If we record the equality, we might inadvertently delete code
6285     whose intent was to change -0 to +0.  */
6286
6287  if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
6288    {
6289      /* If we reversed a floating-point comparison, if OP0 is not a
6290	 register, or if OP1 is neither a register or constant, we can't
6291	 do anything.  */
6292
6293      if (GET_CODE (op1) != REG)
6294	op1 = equiv_constant (op1);
6295
6296      if ((reversed_nonequality && FLOAT_MODE_P (mode))
6297	  || GET_CODE (op0) != REG || op1 == 0)
6298	return;
6299
6300      /* Put OP0 in the hash table if it isn't already.  This gives it a
6301	 new quantity number.  */
6302      if (op0_elt == 0)
6303	{
6304	  if (insert_regs (op0, NULL_PTR, 0))
6305	    {
6306	      rehash_using_reg (op0);
6307	      op0_hash = HASH (op0, mode);
6308
6309	      /* If OP0 is contained in OP1, this changes its hash code
6310		 as well.  Faster to rehash than to check, except
6311		 for the simple case of a constant.  */
6312	      if (! CONSTANT_P (op1))
6313		op1_hash = HASH (op1,mode);
6314	    }
6315
6316	  op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6317	  op0_elt->in_memory = op0_in_memory;
6318	  op0_elt->in_struct = op0_in_struct;
6319	}
6320
6321      qty_comparison_code[REG_QTY (REGNO (op0))] = code;
6322      if (GET_CODE (op1) == REG)
6323	{
6324	  /* Look it up again--in case op0 and op1 are the same.  */
6325	  op1_elt = lookup (op1, op1_hash, mode);
6326
6327	  /* Put OP1 in the hash table so it gets a new quantity number.  */
6328	  if (op1_elt == 0)
6329	    {
6330	      if (insert_regs (op1, NULL_PTR, 0))
6331		{
6332		  rehash_using_reg (op1);
6333		  op1_hash = HASH (op1, mode);
6334		}
6335
6336	      op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6337	      op1_elt->in_memory = op1_in_memory;
6338	      op1_elt->in_struct = op1_in_struct;
6339	    }
6340
6341	  qty_comparison_qty[REG_QTY (REGNO (op0))] = REG_QTY (REGNO (op1));
6342	  qty_comparison_const[REG_QTY (REGNO (op0))] = 0;
6343	}
6344      else
6345	{
6346	  qty_comparison_qty[REG_QTY (REGNO (op0))] = -1;
6347	  qty_comparison_const[REG_QTY (REGNO (op0))] = op1;
6348	}
6349
6350      return;
6351    }
6352
6353  /* If either side is still missing an equivalence, make it now,
6354     then merge the equivalences.  */
6355
6356  if (op0_elt == 0)
6357    {
6358      if (insert_regs (op0, NULL_PTR, 0))
6359	{
6360	  rehash_using_reg (op0);
6361	  op0_hash = HASH (op0, mode);
6362	}
6363
6364      op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6365      op0_elt->in_memory = op0_in_memory;
6366      op0_elt->in_struct = op0_in_struct;
6367    }
6368
6369  if (op1_elt == 0)
6370    {
6371      if (insert_regs (op1, NULL_PTR, 0))
6372	{
6373	  rehash_using_reg (op1);
6374	  op1_hash = HASH (op1, mode);
6375	}
6376
6377      op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6378      op1_elt->in_memory = op1_in_memory;
6379      op1_elt->in_struct = op1_in_struct;
6380    }
6381
6382  merge_equiv_classes (op0_elt, op1_elt);
6383  last_jump_equiv_class = op0_elt;
6384}
6385
6386/* CSE processing for one instruction.
6387   First simplify sources and addresses of all assignments
6388   in the instruction, using previously-computed equivalents values.
6389   Then install the new sources and destinations in the table
6390   of available values.
6391
6392   If LIBCALL_INSN is nonzero, don't record any equivalence made in
6393   the insn.  It means that INSN is inside libcall block.  In this
6394   case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
6395
6396/* Data on one SET contained in the instruction.  */
6397
6398struct set
6399{
6400  /* The SET rtx itself.  */
6401  rtx rtl;
6402  /* The SET_SRC of the rtx (the original value, if it is changing).  */
6403  rtx src;
6404  /* The hash-table element for the SET_SRC of the SET.  */
6405  struct table_elt *src_elt;
6406  /* Hash value for the SET_SRC.  */
6407  unsigned src_hash;
6408  /* Hash value for the SET_DEST.  */
6409  unsigned dest_hash;
6410  /* The SET_DEST, with SUBREG, etc., stripped.  */
6411  rtx inner_dest;
6412  /* Place where the pointer to the INNER_DEST was found.  */
6413  rtx *inner_dest_loc;
6414  /* Nonzero if the SET_SRC is in memory.  */
6415  char src_in_memory;
6416  /* Nonzero if the SET_SRC is in a structure.  */
6417  char src_in_struct;
6418  /* Nonzero if the SET_SRC contains something
6419     whose value cannot be predicted and understood.  */
6420  char src_volatile;
6421  /* Original machine mode, in case it becomes a CONST_INT.  */
6422  enum machine_mode mode;
6423  /* A constant equivalent for SET_SRC, if any.  */
6424  rtx src_const;
6425  /* Hash value of constant equivalent for SET_SRC.  */
6426  unsigned src_const_hash;
6427  /* Table entry for constant equivalent for SET_SRC, if any.  */
6428  struct table_elt *src_const_elt;
6429};
6430
6431static void
6432cse_insn (insn, libcall_insn)
6433     rtx insn;
6434     rtx libcall_insn;
6435{
6436  register rtx x = PATTERN (insn);
6437  register int i;
6438  rtx tem;
6439  register int n_sets = 0;
6440
6441#ifdef HAVE_cc0
6442  /* Records what this insn does to set CC0.  */
6443  rtx this_insn_cc0 = 0;
6444  enum machine_mode this_insn_cc0_mode = VOIDmode;
6445#endif
6446
6447  rtx src_eqv = 0;
6448  struct table_elt *src_eqv_elt = 0;
6449  int src_eqv_volatile;
6450  int src_eqv_in_memory;
6451  int src_eqv_in_struct;
6452  unsigned src_eqv_hash;
6453
6454  struct set *sets;
6455
6456  this_insn = insn;
6457
6458  /* Find all the SETs and CLOBBERs in this instruction.
6459     Record all the SETs in the array `set' and count them.
6460     Also determine whether there is a CLOBBER that invalidates
6461     all memory references, or all references at varying addresses.  */
6462
6463  if (GET_CODE (insn) == CALL_INSN)
6464    {
6465      for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6466	if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6467          invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6468    }
6469
6470  if (GET_CODE (x) == SET)
6471    {
6472      sets = (struct set *) alloca (sizeof (struct set));
6473      sets[0].rtl = x;
6474
6475      /* Ignore SETs that are unconditional jumps.
6476	 They never need cse processing, so this does not hurt.
6477	 The reason is not efficiency but rather
6478	 so that we can test at the end for instructions
6479	 that have been simplified to unconditional jumps
6480	 and not be misled by unchanged instructions
6481	 that were unconditional jumps to begin with.  */
6482      if (SET_DEST (x) == pc_rtx
6483	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
6484	;
6485
6486      /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6487	 The hard function value register is used only once, to copy to
6488	 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6489	 Ensure we invalidate the destination register.  On the 80386 no
6490	 other code would invalidate it since it is a fixed_reg.
6491	 We need not check the return of apply_change_group; see canon_reg.  */
6492
6493      else if (GET_CODE (SET_SRC (x)) == CALL)
6494	{
6495	  canon_reg (SET_SRC (x), insn);
6496	  apply_change_group ();
6497	  fold_rtx (SET_SRC (x), insn);
6498	  invalidate (SET_DEST (x), VOIDmode);
6499	}
6500      else
6501	n_sets = 1;
6502    }
6503  else if (GET_CODE (x) == PARALLEL)
6504    {
6505      register int lim = XVECLEN (x, 0);
6506
6507      sets = (struct set *) alloca (lim * sizeof (struct set));
6508
6509      /* Find all regs explicitly clobbered in this insn,
6510	 and ensure they are not replaced with any other regs
6511	 elsewhere in this insn.
6512	 When a reg that is clobbered is also used for input,
6513	 we should presume that that is for a reason,
6514	 and we should not substitute some other register
6515	 which is not supposed to be clobbered.
6516	 Therefore, this loop cannot be merged into the one below
6517	 because a CALL may precede a CLOBBER and refer to the
6518	 value clobbered.  We must not let a canonicalization do
6519	 anything in that case.  */
6520      for (i = 0; i < lim; i++)
6521	{
6522	  register rtx y = XVECEXP (x, 0, i);
6523	  if (GET_CODE (y) == CLOBBER)
6524	    {
6525	      rtx clobbered = XEXP (y, 0);
6526
6527	      if (GET_CODE (clobbered) == REG
6528		  || GET_CODE (clobbered) == SUBREG)
6529		invalidate (clobbered, VOIDmode);
6530	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
6531		       || GET_CODE (clobbered) == ZERO_EXTRACT)
6532		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6533	    }
6534	}
6535
6536      for (i = 0; i < lim; i++)
6537	{
6538	  register rtx y = XVECEXP (x, 0, i);
6539	  if (GET_CODE (y) == SET)
6540	    {
6541	      /* As above, we ignore unconditional jumps and call-insns and
6542		 ignore the result of apply_change_group.  */
6543	      if (GET_CODE (SET_SRC (y)) == CALL)
6544		{
6545		  canon_reg (SET_SRC (y), insn);
6546		  apply_change_group ();
6547		  fold_rtx (SET_SRC (y), insn);
6548		  invalidate (SET_DEST (y), VOIDmode);
6549		}
6550	      else if (SET_DEST (y) == pc_rtx
6551		       && GET_CODE (SET_SRC (y)) == LABEL_REF)
6552		;
6553	      else
6554		sets[n_sets++].rtl = y;
6555	    }
6556	  else if (GET_CODE (y) == CLOBBER)
6557	    {
6558	      /* If we clobber memory, canon the address.
6559		 This does nothing when a register is clobbered
6560		 because we have already invalidated the reg.  */
6561	      if (GET_CODE (XEXP (y, 0)) == MEM)
6562		canon_reg (XEXP (y, 0), NULL_RTX);
6563	    }
6564	  else if (GET_CODE (y) == USE
6565		   && ! (GET_CODE (XEXP (y, 0)) == REG
6566			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
6567	    canon_reg (y, NULL_RTX);
6568	  else if (GET_CODE (y) == CALL)
6569	    {
6570	      /* The result of apply_change_group can be ignored; see
6571		 canon_reg.  */
6572	      canon_reg (y, insn);
6573	      apply_change_group ();
6574	      fold_rtx (y, insn);
6575	    }
6576	}
6577    }
6578  else if (GET_CODE (x) == CLOBBER)
6579    {
6580      if (GET_CODE (XEXP (x, 0)) == MEM)
6581	canon_reg (XEXP (x, 0), NULL_RTX);
6582    }
6583
6584  /* Canonicalize a USE of a pseudo register or memory location.  */
6585  else if (GET_CODE (x) == USE
6586	   && ! (GET_CODE (XEXP (x, 0)) == REG
6587		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
6588    canon_reg (XEXP (x, 0), NULL_RTX);
6589  else if (GET_CODE (x) == CALL)
6590    {
6591      /* The result of apply_change_group can be ignored; see canon_reg.  */
6592      canon_reg (x, insn);
6593      apply_change_group ();
6594      fold_rtx (x, insn);
6595    }
6596
6597  /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6598     is a STRICT_LOW_PART.  The latter condition is necessary because SRC_EQV
6599     is handled specially for this case, and if it isn't set, then there will
6600     be no equivalence for the destination.  */
6601  if (n_sets == 1 && REG_NOTES (insn) != 0
6602      && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
6603      && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6604	  || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
6605    src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
6606
6607  /* Canonicalize sources and addresses of destinations.
6608     We do this in a separate pass to avoid problems when a MATCH_DUP is
6609     present in the insn pattern.  In that case, we want to ensure that
6610     we don't break the duplicate nature of the pattern.  So we will replace
6611     both operands at the same time.  Otherwise, we would fail to find an
6612     equivalent substitution in the loop calling validate_change below.
6613
6614     We used to suppress canonicalization of DEST if it appears in SRC,
6615     but we don't do this any more.  */
6616
6617  for (i = 0; i < n_sets; i++)
6618    {
6619      rtx dest = SET_DEST (sets[i].rtl);
6620      rtx src = SET_SRC (sets[i].rtl);
6621      rtx new = canon_reg (src, insn);
6622      int insn_code;
6623
6624      if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6625	   && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6626	       != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6627	  || (insn_code = recog_memoized (insn)) < 0
6628	  || insn_n_dups[insn_code] > 0)
6629	validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6630      else
6631	SET_SRC (sets[i].rtl) = new;
6632
6633      if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6634	{
6635	  validate_change (insn, &XEXP (dest, 1),
6636			   canon_reg (XEXP (dest, 1), insn), 1);
6637	  validate_change (insn, &XEXP (dest, 2),
6638			   canon_reg (XEXP (dest, 2), insn), 1);
6639	}
6640
6641      while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6642	     || GET_CODE (dest) == ZERO_EXTRACT
6643	     || GET_CODE (dest) == SIGN_EXTRACT)
6644	dest = XEXP (dest, 0);
6645
6646      if (GET_CODE (dest) == MEM)
6647	canon_reg (dest, insn);
6648    }
6649
6650  /* Now that we have done all the replacements, we can apply the change
6651     group and see if they all work.  Note that this will cause some
6652     canonicalizations that would have worked individually not to be applied
6653     because some other canonicalization didn't work, but this should not
6654     occur often.
6655
6656     The result of apply_change_group can be ignored; see canon_reg.  */
6657
6658  apply_change_group ();
6659
6660  /* Set sets[i].src_elt to the class each source belongs to.
6661     Detect assignments from or to volatile things
6662     and set set[i] to zero so they will be ignored
6663     in the rest of this function.
6664
6665     Nothing in this loop changes the hash table or the register chains.  */
6666
6667  for (i = 0; i < n_sets; i++)
6668    {
6669      register rtx src, dest;
6670      register rtx src_folded;
6671      register struct table_elt *elt = 0, *p;
6672      enum machine_mode mode;
6673      rtx src_eqv_here;
6674      rtx src_const = 0;
6675      rtx src_related = 0;
6676      struct table_elt *src_const_elt = 0;
6677      int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6678      int src_related_cost = 10000, src_elt_cost = 10000;
6679      /* Set non-zero if we need to call force_const_mem on with the
6680	 contents of src_folded before using it.  */
6681      int src_folded_force_flag = 0;
6682
6683      dest = SET_DEST (sets[i].rtl);
6684      src = SET_SRC (sets[i].rtl);
6685
6686      /* If SRC is a constant that has no machine mode,
6687	 hash it with the destination's machine mode.
6688	 This way we can keep different modes separate.  */
6689
6690      mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6691      sets[i].mode = mode;
6692
6693      if (src_eqv)
6694	{
6695	  enum machine_mode eqvmode = mode;
6696	  if (GET_CODE (dest) == STRICT_LOW_PART)
6697	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6698	  do_not_record = 0;
6699	  hash_arg_in_memory = 0;
6700	  hash_arg_in_struct = 0;
6701	  src_eqv = fold_rtx (src_eqv, insn);
6702	  src_eqv_hash = HASH (src_eqv, eqvmode);
6703
6704	  /* Find the equivalence class for the equivalent expression.  */
6705
6706	  if (!do_not_record)
6707	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
6708
6709	  src_eqv_volatile = do_not_record;
6710	  src_eqv_in_memory = hash_arg_in_memory;
6711	  src_eqv_in_struct = hash_arg_in_struct;
6712	}
6713
6714      /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6715	 value of the INNER register, not the destination.  So it is not
6716	 a valid substitution for the source.  But save it for later.  */
6717      if (GET_CODE (dest) == STRICT_LOW_PART)
6718	src_eqv_here = 0;
6719      else
6720	src_eqv_here = src_eqv;
6721
6722      /* Simplify and foldable subexpressions in SRC.  Then get the fully-
6723	 simplified result, which may not necessarily be valid.  */
6724      src_folded = fold_rtx (src, insn);
6725
6726#if 0
6727      /* ??? This caused bad code to be generated for the m68k port with -O2.
6728	 Suppose src is (CONST_INT -1), and that after truncation src_folded
6729	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
6730	 At the end we will add src and src_const to the same equivalence
6731	 class.  We now have 3 and -1 on the same equivalence class.  This
6732	 causes later instructions to be mis-optimized.  */
6733      /* If storing a constant in a bitfield, pre-truncate the constant
6734	 so we will be able to record it later.  */
6735      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6736	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6737	{
6738	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6739
6740	  if (GET_CODE (src) == CONST_INT
6741	      && GET_CODE (width) == CONST_INT
6742	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6743	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6744	    src_folded
6745	      = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6746					  << INTVAL (width)) - 1));
6747	}
6748#endif
6749
6750      /* Compute SRC's hash code, and also notice if it
6751	 should not be recorded at all.  In that case,
6752	 prevent any further processing of this assignment.  */
6753      do_not_record = 0;
6754      hash_arg_in_memory = 0;
6755      hash_arg_in_struct = 0;
6756
6757      sets[i].src = src;
6758      sets[i].src_hash = HASH (src, mode);
6759      sets[i].src_volatile = do_not_record;
6760      sets[i].src_in_memory = hash_arg_in_memory;
6761      sets[i].src_in_struct = hash_arg_in_struct;
6762
6763      /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
6764	 a pseudo that is set more than once, do not record SRC.  Using
6765	 SRC as a replacement for anything else will be incorrect in that
6766	 situation.  Note that this usually occurs only for stack slots,
6767	 in which case all the RTL would be referring to SRC, so we don't
6768	 lose any optimization opportunities by not having SRC in the
6769	 hash table.  */
6770
6771      if (GET_CODE (src) == MEM
6772	  && find_reg_note (insn, REG_EQUIV, src) != 0
6773	  && GET_CODE (dest) == REG
6774	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER
6775	  && REG_N_SETS (REGNO (dest)) != 1)
6776	sets[i].src_volatile = 1;
6777
6778#if 0
6779      /* It is no longer clear why we used to do this, but it doesn't
6780	 appear to still be needed.  So let's try without it since this
6781	 code hurts cse'ing widened ops.  */
6782      /* If source is a perverse subreg (such as QI treated as an SI),
6783	 treat it as volatile.  It may do the work of an SI in one context
6784	 where the extra bits are not being used, but cannot replace an SI
6785	 in general.  */
6786      if (GET_CODE (src) == SUBREG
6787	  && (GET_MODE_SIZE (GET_MODE (src))
6788	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6789	sets[i].src_volatile = 1;
6790#endif
6791
6792      /* Locate all possible equivalent forms for SRC.  Try to replace
6793         SRC in the insn with each cheaper equivalent.
6794
6795         We have the following types of equivalents: SRC itself, a folded
6796         version, a value given in a REG_EQUAL note, or a value related
6797	 to a constant.
6798
6799         Each of these equivalents may be part of an additional class
6800         of equivalents (if more than one is in the table, they must be in
6801         the same class; we check for this).
6802
6803	 If the source is volatile, we don't do any table lookups.
6804
6805         We note any constant equivalent for possible later use in a
6806         REG_NOTE.  */
6807
6808      if (!sets[i].src_volatile)
6809	elt = lookup (src, sets[i].src_hash, mode);
6810
6811      sets[i].src_elt = elt;
6812
6813      if (elt && src_eqv_here && src_eqv_elt)
6814        {
6815          if (elt->first_same_value != src_eqv_elt->first_same_value)
6816	    {
6817	      /* The REG_EQUAL is indicating that two formerly distinct
6818		 classes are now equivalent.  So merge them.  */
6819	      merge_equiv_classes (elt, src_eqv_elt);
6820	      src_eqv_hash = HASH (src_eqv, elt->mode);
6821	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
6822	    }
6823
6824          src_eqv_here = 0;
6825        }
6826
6827      else if (src_eqv_elt)
6828        elt = src_eqv_elt;
6829
6830      /* Try to find a constant somewhere and record it in `src_const'.
6831	 Record its table element, if any, in `src_const_elt'.  Look in
6832	 any known equivalences first.  (If the constant is not in the
6833	 table, also set `sets[i].src_const_hash').  */
6834      if (elt)
6835        for (p = elt->first_same_value; p; p = p->next_same_value)
6836	  if (p->is_const)
6837	    {
6838	      src_const = p->exp;
6839	      src_const_elt = elt;
6840	      break;
6841	    }
6842
6843      if (src_const == 0
6844	  && (CONSTANT_P (src_folded)
6845	      /* Consider (minus (label_ref L1) (label_ref L2)) as
6846		 "constant" here so we will record it. This allows us
6847		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
6848	      || (GET_CODE (src_folded) == MINUS
6849		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6850		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6851	src_const = src_folded, src_const_elt = elt;
6852      else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6853	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6854
6855      /* If we don't know if the constant is in the table, get its
6856	 hash code and look it up.  */
6857      if (src_const && src_const_elt == 0)
6858	{
6859	  sets[i].src_const_hash = HASH (src_const, mode);
6860	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
6861	}
6862
6863      sets[i].src_const = src_const;
6864      sets[i].src_const_elt = src_const_elt;
6865
6866      /* If the constant and our source are both in the table, mark them as
6867	 equivalent.  Otherwise, if a constant is in the table but the source
6868	 isn't, set ELT to it.  */
6869      if (src_const_elt && elt
6870	  && src_const_elt->first_same_value != elt->first_same_value)
6871	merge_equiv_classes (elt, src_const_elt);
6872      else if (src_const_elt && elt == 0)
6873	elt = src_const_elt;
6874
6875      /* See if there is a register linearly related to a constant
6876         equivalent of SRC.  */
6877      if (src_const
6878	  && (GET_CODE (src_const) == CONST
6879	      || (src_const_elt && src_const_elt->related_value != 0)))
6880        {
6881          src_related = use_related_value (src_const, src_const_elt);
6882          if (src_related)
6883            {
6884	      struct table_elt *src_related_elt
6885		    = lookup (src_related, HASH (src_related, mode), mode);
6886	      if (src_related_elt && elt)
6887	        {
6888		  if (elt->first_same_value
6889		      != src_related_elt->first_same_value)
6890		    /* This can occur when we previously saw a CONST
6891		       involving a SYMBOL_REF and then see the SYMBOL_REF
6892		       twice.  Merge the involved classes.  */
6893		    merge_equiv_classes (elt, src_related_elt);
6894
6895	          src_related = 0;
6896		  src_related_elt = 0;
6897	        }
6898              else if (src_related_elt && elt == 0)
6899	        elt = src_related_elt;
6900	    }
6901        }
6902
6903      /* See if we have a CONST_INT that is already in a register in a
6904	 wider mode.  */
6905
6906      if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6907	  && GET_MODE_CLASS (mode) == MODE_INT
6908	  && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6909	{
6910	  enum machine_mode wider_mode;
6911
6912	  for (wider_mode = GET_MODE_WIDER_MODE (mode);
6913	       GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6914	       && src_related == 0;
6915	       wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6916	    {
6917	      struct table_elt *const_elt
6918		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6919
6920	      if (const_elt == 0)
6921		continue;
6922
6923	      for (const_elt = const_elt->first_same_value;
6924		   const_elt; const_elt = const_elt->next_same_value)
6925		if (GET_CODE (const_elt->exp) == REG)
6926		  {
6927		    src_related = gen_lowpart_if_possible (mode,
6928							   const_elt->exp);
6929		    break;
6930		  }
6931	    }
6932	}
6933
6934      /* Another possibility is that we have an AND with a constant in
6935	 a mode narrower than a word.  If so, it might have been generated
6936	 as part of an "if" which would narrow the AND.  If we already
6937	 have done the AND in a wider mode, we can use a SUBREG of that
6938	 value.  */
6939
6940      if (flag_expensive_optimizations && ! src_related
6941	  && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6942	  && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6943	{
6944	  enum machine_mode tmode;
6945	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
6946
6947	  for (tmode = GET_MODE_WIDER_MODE (mode);
6948	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6949	       tmode = GET_MODE_WIDER_MODE (tmode))
6950	    {
6951	      rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6952	      struct table_elt *larger_elt;
6953
6954	      if (inner)
6955		{
6956		  PUT_MODE (new_and, tmode);
6957		  XEXP (new_and, 0) = inner;
6958		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6959		  if (larger_elt == 0)
6960		    continue;
6961
6962		  for (larger_elt = larger_elt->first_same_value;
6963		       larger_elt; larger_elt = larger_elt->next_same_value)
6964		    if (GET_CODE (larger_elt->exp) == REG)
6965		      {
6966			src_related
6967			  = gen_lowpart_if_possible (mode, larger_elt->exp);
6968			break;
6969		      }
6970
6971		  if (src_related)
6972		    break;
6973		}
6974	    }
6975	}
6976
6977#ifdef LOAD_EXTEND_OP
6978      /* See if a MEM has already been loaded with a widening operation;
6979	 if it has, we can use a subreg of that.  Many CISC machines
6980	 also have such operations, but this is only likely to be
6981	 beneficial these machines.  */
6982
6983      if (flag_expensive_optimizations &&  src_related == 0
6984	  && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6985	  && GET_MODE_CLASS (mode) == MODE_INT
6986	  && GET_CODE (src) == MEM && ! do_not_record
6987	  && LOAD_EXTEND_OP (mode) != NIL)
6988	{
6989	  enum machine_mode tmode;
6990
6991	  /* Set what we are trying to extend and the operation it might
6992	     have been extended with.  */
6993	  PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6994	  XEXP (memory_extend_rtx, 0) = src;
6995
6996	  for (tmode = GET_MODE_WIDER_MODE (mode);
6997	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6998	       tmode = GET_MODE_WIDER_MODE (tmode))
6999	    {
7000	      struct table_elt *larger_elt;
7001
7002	      PUT_MODE (memory_extend_rtx, tmode);
7003	      larger_elt = lookup (memory_extend_rtx,
7004				   HASH (memory_extend_rtx, tmode), tmode);
7005	      if (larger_elt == 0)
7006		continue;
7007
7008	      for (larger_elt = larger_elt->first_same_value;
7009		   larger_elt; larger_elt = larger_elt->next_same_value)
7010		if (GET_CODE (larger_elt->exp) == REG)
7011		  {
7012		    src_related = gen_lowpart_if_possible (mode,
7013							   larger_elt->exp);
7014		    break;
7015		  }
7016
7017	      if (src_related)
7018		break;
7019	    }
7020	}
7021#endif /* LOAD_EXTEND_OP */
7022
7023      if (src == src_folded)
7024        src_folded = 0;
7025
7026      /* At this point, ELT, if non-zero, points to a class of expressions
7027         equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
7028	 and SRC_RELATED, if non-zero, each contain additional equivalent
7029	 expressions.  Prune these latter expressions by deleting expressions
7030	 already in the equivalence class.
7031
7032	 Check for an equivalent identical to the destination.  If found,
7033	 this is the preferred equivalent since it will likely lead to
7034	 elimination of the insn.  Indicate this by placing it in
7035	 `src_related'.  */
7036
7037      if (elt) elt = elt->first_same_value;
7038      for (p = elt; p; p = p->next_same_value)
7039        {
7040	  enum rtx_code code = GET_CODE (p->exp);
7041
7042	  /* If the expression is not valid, ignore it.  Then we do not
7043	     have to check for validity below.  In most cases, we can use
7044	     `rtx_equal_p', since canonicalization has already been done.  */
7045	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
7046	    continue;
7047
7048	  /* Also skip paradoxical subregs, unless that's what we're
7049	     looking for.  */
7050	  if (code == SUBREG
7051	      && (GET_MODE_SIZE (GET_MODE (p->exp))
7052		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
7053	      && ! (src != 0
7054		    && GET_CODE (src) == SUBREG
7055		    && GET_MODE (src) == GET_MODE (p->exp)
7056		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
7057			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
7058	    continue;
7059
7060          if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
7061	    src = 0;
7062          else if (src_folded && GET_CODE (src_folded) == code
7063		   && rtx_equal_p (src_folded, p->exp))
7064	    src_folded = 0;
7065          else if (src_eqv_here && GET_CODE (src_eqv_here) == code
7066		   && rtx_equal_p (src_eqv_here, p->exp))
7067	    src_eqv_here = 0;
7068          else if (src_related && GET_CODE (src_related) == code
7069		   && rtx_equal_p (src_related, p->exp))
7070	    src_related = 0;
7071
7072	  /* This is the same as the destination of the insns, we want
7073	     to prefer it.  Copy it to src_related.  The code below will
7074	     then give it a negative cost.  */
7075	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
7076	    src_related = dest;
7077
7078        }
7079
7080      /* Find the cheapest valid equivalent, trying all the available
7081         possibilities.  Prefer items not in the hash table to ones
7082         that are when they are equal cost.  Note that we can never
7083         worsen an insn as the current contents will also succeed.
7084	 If we find an equivalent identical to the destination, use it as best,
7085	 since this insn will probably be eliminated in that case.  */
7086      if (src)
7087	{
7088	  if (rtx_equal_p (src, dest))
7089	    src_cost = -1;
7090	  else
7091	    src_cost = COST (src);
7092	}
7093
7094      if (src_eqv_here)
7095	{
7096	  if (rtx_equal_p (src_eqv_here, dest))
7097	    src_eqv_cost = -1;
7098	  else
7099	    src_eqv_cost = COST (src_eqv_here);
7100	}
7101
7102      if (src_folded)
7103	{
7104	  if (rtx_equal_p (src_folded, dest))
7105	    src_folded_cost = -1;
7106	  else
7107	    src_folded_cost = COST (src_folded);
7108	}
7109
7110      if (src_related)
7111	{
7112	  if (rtx_equal_p (src_related, dest))
7113	    src_related_cost = -1;
7114	  else
7115	    src_related_cost = COST (src_related);
7116	}
7117
7118      /* If this was an indirect jump insn, a known label will really be
7119	 cheaper even though it looks more expensive.  */
7120      if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
7121	src_folded = src_const, src_folded_cost = -1;
7122
7123      /* Terminate loop when replacement made.  This must terminate since
7124         the current contents will be tested and will always be valid.  */
7125      while (1)
7126        {
7127          rtx trial, old_src;
7128
7129          /* Skip invalid entries.  */
7130          while (elt && GET_CODE (elt->exp) != REG
7131	         && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7132	    elt = elt->next_same_value;
7133
7134	  /* A paradoxical subreg would be bad here: it'll be the right
7135	     size, but later may be adjusted so that the upper bits aren't
7136	     what we want.  So reject it.  */
7137	  if (elt != 0
7138	      && GET_CODE (elt->exp) == SUBREG
7139	      && (GET_MODE_SIZE (GET_MODE (elt->exp))
7140		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
7141	      /* It is okay, though, if the rtx we're trying to match
7142		 will ignore any of the bits we can't predict.  */
7143	      && ! (src != 0
7144		    && GET_CODE (src) == SUBREG
7145		    && GET_MODE (src) == GET_MODE (elt->exp)
7146		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
7147			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
7148	    {
7149	      elt = elt->next_same_value;
7150	      continue;
7151	    }
7152
7153          if (elt) src_elt_cost = elt->cost;
7154
7155          /* Find cheapest and skip it for the next time.   For items
7156	     of equal cost, use this order:
7157	     src_folded, src, src_eqv, src_related and hash table entry.  */
7158          if (src_folded_cost <= src_cost
7159	      && src_folded_cost <= src_eqv_cost
7160	      && src_folded_cost <= src_related_cost
7161	      && src_folded_cost <= src_elt_cost)
7162	    {
7163	      trial = src_folded, src_folded_cost = 10000;
7164	      if (src_folded_force_flag)
7165		trial = force_const_mem (mode, trial);
7166	    }
7167          else if (src_cost <= src_eqv_cost
7168	           && src_cost <= src_related_cost
7169	           && src_cost <= src_elt_cost)
7170	    trial = src, src_cost = 10000;
7171          else if (src_eqv_cost <= src_related_cost
7172	           && src_eqv_cost <= src_elt_cost)
7173	    trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
7174          else if (src_related_cost <= src_elt_cost)
7175	    trial = copy_rtx (src_related), src_related_cost = 10000;
7176          else
7177	    {
7178	      trial = copy_rtx (elt->exp);
7179	      elt = elt->next_same_value;
7180	      src_elt_cost = 10000;
7181	    }
7182
7183	  /* We don't normally have an insn matching (set (pc) (pc)), so
7184	     check for this separately here.  We will delete such an
7185	     insn below.
7186
7187	     Tablejump insns contain a USE of the table, so simply replacing
7188	     the operand with the constant won't match.  This is simply an
7189	     unconditional branch, however, and is therefore valid.  Just
7190	     insert the substitution here and we will delete and re-emit
7191	     the insn later.  */
7192
7193	  /* Keep track of the original SET_SRC so that we can fix notes
7194	     on libcall instructions.  */
7195 	  old_src = SET_SRC (sets[i].rtl);
7196
7197	  if (n_sets == 1 && dest == pc_rtx
7198	      && (trial == pc_rtx
7199		  || (GET_CODE (trial) == LABEL_REF
7200		      && ! condjump_p (insn))))
7201	    {
7202	      /* If TRIAL is a label in front of a jump table, we are
7203		 really falling through the switch (this is how casesi
7204		 insns work), so we must branch around the table.  */
7205	      if (GET_CODE (trial) == CODE_LABEL
7206		  && NEXT_INSN (trial) != 0
7207		  && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
7208		  && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
7209		      || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
7210
7211		trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
7212
7213	      SET_SRC (sets[i].rtl) = trial;
7214 	      cse_jumps_altered = 1;
7215	      break;
7216	    }
7217
7218	  /* Look for a substitution that makes a valid insn.  */
7219          else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
7220	    {
7221	      /* If we just made a substitution inside a libcall, then we
7222		 need to make the same substitution in any notes attached
7223		 to the RETVAL insn.  */
7224	      if (libcall_insn
7225		  && (GET_CODE (old_src) == REG
7226		      || GET_CODE (old_src) == SUBREG
7227		      ||  GET_CODE (old_src) == MEM))
7228		replace_rtx (REG_NOTES (libcall_insn), old_src,
7229			     canon_reg (SET_SRC (sets[i].rtl), insn));
7230
7231	      /* The result of apply_change_group can be ignored; see
7232		 canon_reg.  */
7233
7234	      validate_change (insn, &SET_SRC (sets[i].rtl),
7235			       canon_reg (SET_SRC (sets[i].rtl), insn),
7236			       1);
7237	      apply_change_group ();
7238	      break;
7239	    }
7240
7241	  /* If we previously found constant pool entries for
7242	     constants and this is a constant, try making a
7243	     pool entry.  Put it in src_folded unless we already have done
7244	     this since that is where it likely came from.  */
7245
7246	  else if (constant_pool_entries_cost
7247		   && CONSTANT_P (trial)
7248		   && ! (GET_CODE (trial) == CONST
7249			 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
7250		   && (src_folded == 0
7251		       || (GET_CODE (src_folded) != MEM
7252			   && ! src_folded_force_flag))
7253		   && GET_MODE_CLASS (mode) != MODE_CC
7254		   && mode != VOIDmode)
7255	    {
7256	      src_folded_force_flag = 1;
7257	      src_folded = trial;
7258	      src_folded_cost = constant_pool_entries_cost;
7259	    }
7260        }
7261
7262      src = SET_SRC (sets[i].rtl);
7263
7264      /* In general, it is good to have a SET with SET_SRC == SET_DEST.
7265	 However, there is an important exception:  If both are registers
7266	 that are not the head of their equivalence class, replace SET_SRC
7267	 with the head of the class.  If we do not do this, we will have
7268	 both registers live over a portion of the basic block.  This way,
7269	 their lifetimes will likely abut instead of overlapping.  */
7270      if (GET_CODE (dest) == REG
7271	  && REGNO_QTY_VALID_P (REGNO (dest))
7272	  && qty_mode[REG_QTY (REGNO (dest))] == GET_MODE (dest)
7273	  && qty_first_reg[REG_QTY (REGNO (dest))] != REGNO (dest)
7274	  && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
7275	  /* Don't do this if the original insn had a hard reg as
7276	     SET_SRC.  */
7277	  && (GET_CODE (sets[i].src) != REG
7278	      || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
7279	/* We can't call canon_reg here because it won't do anything if
7280	   SRC is a hard register.  */
7281	{
7282	  int first = qty_first_reg[REG_QTY (REGNO (src))];
7283	  rtx new_src
7284	    = (first >= FIRST_PSEUDO_REGISTER
7285	       ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
7286
7287	  /* We must use validate-change even for this, because this
7288	     might be a special no-op instruction, suitable only to
7289	     tag notes onto.  */
7290	  if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
7291	    {
7292	      src = new_src;
7293	      /* If we had a constant that is cheaper than what we are now
7294		 setting SRC to, use that constant.  We ignored it when we
7295		 thought we could make this into a no-op.  */
7296	      if (src_const && COST (src_const) < COST (src)
7297		  && validate_change (insn, &SET_SRC (sets[i].rtl), src_const,
7298				      0))
7299		src = src_const;
7300	    }
7301	}
7302
7303      /* If we made a change, recompute SRC values.  */
7304      if (src != sets[i].src)
7305        {
7306          do_not_record = 0;
7307          hash_arg_in_memory = 0;
7308          hash_arg_in_struct = 0;
7309	  sets[i].src = src;
7310          sets[i].src_hash = HASH (src, mode);
7311          sets[i].src_volatile = do_not_record;
7312          sets[i].src_in_memory = hash_arg_in_memory;
7313          sets[i].src_in_struct = hash_arg_in_struct;
7314          sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
7315        }
7316
7317      /* If this is a single SET, we are setting a register, and we have an
7318	 equivalent constant, we want to add a REG_NOTE.   We don't want
7319	 to write a REG_EQUAL note for a constant pseudo since verifying that
7320	 that pseudo hasn't been eliminated is a pain.  Such a note also
7321	 won't help anything.
7322
7323	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
7324	 which can be created for a reference to a compile time computable
7325	 entry in a jump table.  */
7326
7327      if (n_sets == 1 && src_const && GET_CODE (dest) == REG
7328	  && GET_CODE (src_const) != REG
7329	  && ! (GET_CODE (src_const) == CONST
7330		&& GET_CODE (XEXP (src_const, 0)) == MINUS
7331		&& GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
7332		&& GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
7333	{
7334	  tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7335
7336	  /* Make sure that the rtx is not shared with any other insn.  */
7337	  src_const = copy_rtx (src_const);
7338
7339	  /* Record the actual constant value in a REG_EQUAL note, making
7340	     a new one if one does not already exist.  */
7341	  if (tem)
7342	    XEXP (tem, 0) = src_const;
7343	  else
7344	    REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
7345						  src_const, REG_NOTES (insn));
7346
7347          /* If storing a constant value in a register that
7348	     previously held the constant value 0,
7349	     record this fact with a REG_WAS_0 note on this insn.
7350
7351	     Note that the *register* is required to have previously held 0,
7352	     not just any register in the quantity and we must point to the
7353	     insn that set that register to zero.
7354
7355	     Rather than track each register individually, we just see if
7356	     the last set for this quantity was for this register.  */
7357
7358	  if (REGNO_QTY_VALID_P (REGNO (dest))
7359	      && qty_const[REG_QTY (REGNO (dest))] == const0_rtx)
7360	    {
7361	      /* See if we previously had a REG_WAS_0 note.  */
7362	      rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7363	      rtx const_insn = qty_const_insn[REG_QTY (REGNO (dest))];
7364
7365	      if ((tem = single_set (const_insn)) != 0
7366		  && rtx_equal_p (SET_DEST (tem), dest))
7367		{
7368		  if (note)
7369		    XEXP (note, 0) = const_insn;
7370		  else
7371		    REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_WAS_0,
7372							  const_insn,
7373							  REG_NOTES (insn));
7374		}
7375	    }
7376	}
7377
7378      /* Now deal with the destination.  */
7379      do_not_record = 0;
7380      sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
7381
7382      /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
7383	 to the MEM or REG within it.  */
7384      while (GET_CODE (dest) == SIGN_EXTRACT
7385	     || GET_CODE (dest) == ZERO_EXTRACT
7386	     || GET_CODE (dest) == SUBREG
7387	     || GET_CODE (dest) == STRICT_LOW_PART)
7388	{
7389	  sets[i].inner_dest_loc = &XEXP (dest, 0);
7390	  dest = XEXP (dest, 0);
7391	}
7392
7393      sets[i].inner_dest = dest;
7394
7395      if (GET_CODE (dest) == MEM)
7396	{
7397#ifdef PUSH_ROUNDING
7398	  /* Stack pushes invalidate the stack pointer.  */
7399	  rtx addr = XEXP (dest, 0);
7400	  if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7401	       || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7402	      && XEXP (addr, 0) == stack_pointer_rtx)
7403	    invalidate (stack_pointer_rtx, Pmode);
7404#endif
7405	  dest = fold_rtx (dest, insn);
7406	}
7407
7408      /* Compute the hash code of the destination now,
7409	 before the effects of this instruction are recorded,
7410	 since the register values used in the address computation
7411	 are those before this instruction.  */
7412      sets[i].dest_hash = HASH (dest, mode);
7413
7414      /* Don't enter a bit-field in the hash table
7415	 because the value in it after the store
7416	 may not equal what was stored, due to truncation.  */
7417
7418      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
7419	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
7420	{
7421	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
7422
7423	  if (src_const != 0 && GET_CODE (src_const) == CONST_INT
7424	      && GET_CODE (width) == CONST_INT
7425	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
7426	      && ! (INTVAL (src_const)
7427		    & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7428	    /* Exception: if the value is constant,
7429	       and it won't be truncated, record it.  */
7430	    ;
7431	  else
7432	    {
7433	      /* This is chosen so that the destination will be invalidated
7434		 but no new value will be recorded.
7435		 We must invalidate because sometimes constant
7436		 values can be recorded for bitfields.  */
7437	      sets[i].src_elt = 0;
7438	      sets[i].src_volatile = 1;
7439	      src_eqv = 0;
7440	      src_eqv_elt = 0;
7441	    }
7442	}
7443
7444      /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
7445	 the insn.  */
7446      else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
7447	{
7448	  PUT_CODE (insn, NOTE);
7449	  NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7450	  NOTE_SOURCE_FILE (insn) = 0;
7451	  cse_jumps_altered = 1;
7452	  /* One less use of the label this insn used to jump to.  */
7453	  if (JUMP_LABEL (insn) != 0)
7454	    --LABEL_NUSES (JUMP_LABEL (insn));
7455	  /* No more processing for this set.  */
7456	  sets[i].rtl = 0;
7457	}
7458
7459      /* If this SET is now setting PC to a label, we know it used to
7460	 be a conditional or computed branch.  So we see if we can follow
7461	 it.  If it was a computed branch, delete it and re-emit.  */
7462      else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
7463	{
7464	  rtx p;
7465
7466	  /* If this is not in the format for a simple branch and
7467	     we are the only SET in it, re-emit it.  */
7468	  if (! simplejump_p (insn) && n_sets == 1)
7469	    {
7470	      rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7471	      JUMP_LABEL (new) = XEXP (src, 0);
7472	      LABEL_NUSES (XEXP (src, 0))++;
7473	      insn = new;
7474	    }
7475	  else
7476	    /* Otherwise, force rerecognition, since it probably had
7477	       a different pattern before.
7478	       This shouldn't really be necessary, since whatever
7479	       changed the source value above should have done this.
7480	       Until the right place is found, might as well do this here.  */
7481	    INSN_CODE (insn) = -1;
7482
7483	  /* Now emit a BARRIER after the unconditional jump.  Do not bother
7484	     deleting any unreachable code, let jump/flow do that.  */
7485	  if (NEXT_INSN (insn) != 0
7486	      && GET_CODE (NEXT_INSN (insn)) != BARRIER)
7487	    emit_barrier_after (insn);
7488
7489	  cse_jumps_altered = 1;
7490	  sets[i].rtl = 0;
7491	}
7492
7493      /* If destination is volatile, invalidate it and then do no further
7494	 processing for this assignment.  */
7495
7496      else if (do_not_record)
7497	{
7498	  if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7499	      || GET_CODE (dest) == MEM)
7500	    invalidate (dest, VOIDmode);
7501	  else if (GET_CODE (dest) == STRICT_LOW_PART
7502		   || GET_CODE (dest) == ZERO_EXTRACT)
7503	    invalidate (XEXP (dest, 0), GET_MODE (dest));
7504	  sets[i].rtl = 0;
7505	}
7506
7507      if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
7508	sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7509
7510#ifdef HAVE_cc0
7511      /* If setting CC0, record what it was set to, or a constant, if it
7512	 is equivalent to a constant.  If it is being set to a floating-point
7513	 value, make a COMPARE with the appropriate constant of 0.  If we
7514	 don't do this, later code can interpret this as a test against
7515	 const0_rtx, which can cause problems if we try to put it into an
7516	 insn as a floating-point operand.  */
7517      if (dest == cc0_rtx)
7518	{
7519	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7520	  this_insn_cc0_mode = mode;
7521	  if (FLOAT_MODE_P (mode))
7522	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
7523					     CONST0_RTX (mode));
7524	}
7525#endif
7526    }
7527
7528  /* Now enter all non-volatile source expressions in the hash table
7529     if they are not already present.
7530     Record their equivalence classes in src_elt.
7531     This way we can insert the corresponding destinations into
7532     the same classes even if the actual sources are no longer in them
7533     (having been invalidated).  */
7534
7535  if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7536      && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7537    {
7538      register struct table_elt *elt;
7539      register struct table_elt *classp = sets[0].src_elt;
7540      rtx dest = SET_DEST (sets[0].rtl);
7541      enum machine_mode eqvmode = GET_MODE (dest);
7542
7543      if (GET_CODE (dest) == STRICT_LOW_PART)
7544	{
7545	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7546	  classp = 0;
7547	}
7548      if (insert_regs (src_eqv, classp, 0))
7549	{
7550	  rehash_using_reg (src_eqv);
7551	  src_eqv_hash = HASH (src_eqv, eqvmode);
7552	}
7553      elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7554      elt->in_memory = src_eqv_in_memory;
7555      elt->in_struct = src_eqv_in_struct;
7556      src_eqv_elt = elt;
7557
7558      /* Check to see if src_eqv_elt is the same as a set source which
7559	 does not yet have an elt, and if so set the elt of the set source
7560	 to src_eqv_elt.  */
7561      for (i = 0; i < n_sets; i++)
7562	if (sets[i].rtl && sets[i].src_elt == 0
7563	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7564	  sets[i].src_elt = src_eqv_elt;
7565    }
7566
7567  for (i = 0; i < n_sets; i++)
7568    if (sets[i].rtl && ! sets[i].src_volatile
7569	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7570      {
7571	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7572	  {
7573	    /* REG_EQUAL in setting a STRICT_LOW_PART
7574	       gives an equivalent for the entire destination register,
7575	       not just for the subreg being stored in now.
7576	       This is a more interesting equivalence, so we arrange later
7577	       to treat the entire reg as the destination.  */
7578	    sets[i].src_elt = src_eqv_elt;
7579	    sets[i].src_hash = src_eqv_hash;
7580	  }
7581	else
7582	  {
7583	    /* Insert source and constant equivalent into hash table, if not
7584	       already present.  */
7585	    register struct table_elt *classp = src_eqv_elt;
7586	    register rtx src = sets[i].src;
7587	    register rtx dest = SET_DEST (sets[i].rtl);
7588	    enum machine_mode mode
7589	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7590
7591	    /* Don't put a hard register source into the table if this is
7592	       the last insn of a libcall.  */
7593	    if (sets[i].src_elt == 0
7594		&& (GET_CODE (src) != REG
7595		    || REGNO (src) >= FIRST_PSEUDO_REGISTER
7596		    || ! find_reg_note (insn, REG_RETVAL, NULL_RTX)))
7597	      {
7598		register struct table_elt *elt;
7599
7600		/* Note that these insert_regs calls cannot remove
7601		   any of the src_elt's, because they would have failed to
7602		   match if not still valid.  */
7603		if (insert_regs (src, classp, 0))
7604		  {
7605		    rehash_using_reg (src);
7606		    sets[i].src_hash = HASH (src, mode);
7607		  }
7608		elt = insert (src, classp, sets[i].src_hash, mode);
7609		elt->in_memory = sets[i].src_in_memory;
7610		elt->in_struct = sets[i].src_in_struct;
7611		sets[i].src_elt = classp = elt;
7612	      }
7613
7614	    if (sets[i].src_const && sets[i].src_const_elt == 0
7615		&& src != sets[i].src_const
7616		&& ! rtx_equal_p (sets[i].src_const, src))
7617	      sets[i].src_elt = insert (sets[i].src_const, classp,
7618					sets[i].src_const_hash, mode);
7619	  }
7620      }
7621    else if (sets[i].src_elt == 0)
7622      /* If we did not insert the source into the hash table (e.g., it was
7623	 volatile), note the equivalence class for the REG_EQUAL value, if any,
7624	 so that the destination goes into that class.  */
7625      sets[i].src_elt = src_eqv_elt;
7626
7627  invalidate_from_clobbers (x);
7628
7629  /* Some registers are invalidated by subroutine calls.  Memory is
7630     invalidated by non-constant calls.  */
7631
7632  if (GET_CODE (insn) == CALL_INSN)
7633    {
7634      if (! CONST_CALL_P (insn))
7635	invalidate_memory ();
7636      invalidate_for_call ();
7637    }
7638
7639  /* Now invalidate everything set by this instruction.
7640     If a SUBREG or other funny destination is being set,
7641     sets[i].rtl is still nonzero, so here we invalidate the reg
7642     a part of which is being set.  */
7643
7644  for (i = 0; i < n_sets; i++)
7645    if (sets[i].rtl)
7646      {
7647	/* We can't use the inner dest, because the mode associated with
7648	   a ZERO_EXTRACT is significant.  */
7649	register rtx dest = SET_DEST (sets[i].rtl);
7650
7651	/* Needed for registers to remove the register from its
7652	   previous quantity's chain.
7653	   Needed for memory if this is a nonvarying address, unless
7654	   we have just done an invalidate_memory that covers even those.  */
7655	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7656	    || GET_CODE (dest) == MEM)
7657	  invalidate (dest, VOIDmode);
7658	else if (GET_CODE (dest) == STRICT_LOW_PART
7659		 || GET_CODE (dest) == ZERO_EXTRACT)
7660	  invalidate (XEXP (dest, 0), GET_MODE (dest));
7661      }
7662
7663  /* A volatile ASM invalidates everything.  */
7664  if (GET_CODE (insn) == INSN
7665      && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
7666      && MEM_VOLATILE_P (PATTERN (insn)))
7667    flush_hash_table ();
7668
7669  /* Make sure registers mentioned in destinations
7670     are safe for use in an expression to be inserted.
7671     This removes from the hash table
7672     any invalid entry that refers to one of these registers.
7673
7674     We don't care about the return value from mention_regs because
7675     we are going to hash the SET_DEST values unconditionally.  */
7676
7677  for (i = 0; i < n_sets; i++)
7678    {
7679      if (sets[i].rtl)
7680	{
7681	  rtx x = SET_DEST (sets[i].rtl);
7682
7683	  if (GET_CODE (x) != REG)
7684	    mention_regs (x);
7685	  else
7686	    {
7687	      /* We used to rely on all references to a register becoming
7688		 inaccessible when a register changes to a new quantity,
7689		 since that changes the hash code.  However, that is not
7690		 safe, since after NBUCKETS new quantities we get a
7691		 hash 'collision' of a register with its own invalid
7692		 entries.  And since SUBREGs have been changed not to
7693		 change their hash code with the hash code of the register,
7694		 it wouldn't work any longer at all.  So we have to check
7695		 for any invalid references lying around now.
7696		 This code is similar to the REG case in mention_regs,
7697		 but it knows that reg_tick has been incremented, and
7698		 it leaves reg_in_table as -1 .  */
7699	      register int regno = REGNO (x);
7700	      register int endregno
7701		= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
7702			   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
7703	      int i;
7704
7705	      for (i = regno; i < endregno; i++)
7706		{
7707		  if (REG_IN_TABLE (i) >= 0)
7708		    {
7709		      remove_invalid_refs (i);
7710		      REG_IN_TABLE (i) = -1;
7711		    }
7712		}
7713	    }
7714	}
7715    }
7716
7717  /* We may have just removed some of the src_elt's from the hash table.
7718     So replace each one with the current head of the same class.  */
7719
7720  for (i = 0; i < n_sets; i++)
7721    if (sets[i].rtl)
7722      {
7723	if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7724	  /* If elt was removed, find current head of same class,
7725	     or 0 if nothing remains of that class.  */
7726	  {
7727	    register struct table_elt *elt = sets[i].src_elt;
7728
7729	    while (elt && elt->prev_same_value)
7730	      elt = elt->prev_same_value;
7731
7732	    while (elt && elt->first_same_value == 0)
7733	      elt = elt->next_same_value;
7734	    sets[i].src_elt = elt ? elt->first_same_value : 0;
7735	  }
7736      }
7737
7738  /* Now insert the destinations into their equivalence classes.  */
7739
7740  for (i = 0; i < n_sets; i++)
7741    if (sets[i].rtl)
7742      {
7743	register rtx dest = SET_DEST (sets[i].rtl);
7744	rtx inner_dest = sets[i].inner_dest;
7745	register struct table_elt *elt;
7746
7747	/* Don't record value if we are not supposed to risk allocating
7748	   floating-point values in registers that might be wider than
7749	   memory.  */
7750	if ((flag_float_store
7751	     && GET_CODE (dest) == MEM
7752	     && FLOAT_MODE_P (GET_MODE (dest)))
7753	    /* Don't record BLKmode values, because we don't know the
7754	       size of it, and can't be sure that other BLKmode values
7755	       have the same or smaller size.  */
7756	    || GET_MODE (dest) == BLKmode
7757	    /* Don't record values of destinations set inside a libcall block
7758	       since we might delete the libcall.  Things should have been set
7759	       up so we won't want to reuse such a value, but we play it safe
7760	       here.  */
7761	    || libcall_insn
7762	    /* If we didn't put a REG_EQUAL value or a source into the hash
7763	       table, there is no point is recording DEST.  */
7764	    || sets[i].src_elt == 0
7765	    /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7766	       or SIGN_EXTEND, don't record DEST since it can cause
7767	       some tracking to be wrong.
7768
7769	       ??? Think about this more later.  */
7770	    || (GET_CODE (dest) == SUBREG
7771		&& (GET_MODE_SIZE (GET_MODE (dest))
7772		    > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7773		&& (GET_CODE (sets[i].src) == SIGN_EXTEND
7774		    || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7775	  continue;
7776
7777	/* STRICT_LOW_PART isn't part of the value BEING set,
7778	   and neither is the SUBREG inside it.
7779	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
7780	if (GET_CODE (dest) == STRICT_LOW_PART)
7781	  dest = SUBREG_REG (XEXP (dest, 0));
7782
7783	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7784	  /* Registers must also be inserted into chains for quantities.  */
7785	  if (insert_regs (dest, sets[i].src_elt, 1))
7786	    {
7787	      /* If `insert_regs' changes something, the hash code must be
7788		 recalculated.  */
7789	      rehash_using_reg (dest);
7790	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7791	    }
7792
7793	if (GET_CODE (inner_dest) == MEM
7794	    && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
7795	  /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
7796	     that (MEM (ADDRESSOF (X))) is equivalent to Y.
7797	     Consider the case in which the address of the MEM is
7798	     passed to a function, which alters the MEM.  Then, if we
7799	     later use Y instead of the MEM we'll miss the update.  */
7800	  elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
7801	else
7802	  elt = insert (dest, sets[i].src_elt,
7803			sets[i].dest_hash, GET_MODE (dest));
7804
7805	elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
7806			  && (! RTX_UNCHANGING_P (sets[i].inner_dest)
7807			      || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
7808							  0))));
7809
7810	if (elt->in_memory)
7811	  {
7812	    /* This implicitly assumes a whole struct
7813	       need not have MEM_IN_STRUCT_P.
7814	       But a whole struct is *supposed* to have MEM_IN_STRUCT_P.  */
7815	    elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7816			      || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7817	  }
7818
7819	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7820	   narrower than M2, and both M1 and M2 are the same number of words,
7821	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7822	   make that equivalence as well.
7823
7824	   However, BAR may have equivalences for which gen_lowpart_if_possible
7825	   will produce a simpler value than gen_lowpart_if_possible applied to
7826	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7827	   BAR's equivalences.  If we don't get a simplified form, make
7828	   the SUBREG.  It will not be used in an equivalence, but will
7829	   cause two similar assignments to be detected.
7830
7831	   Note the loop below will find SUBREG_REG (DEST) since we have
7832	   already entered SRC and DEST of the SET in the table.  */
7833
7834	if (GET_CODE (dest) == SUBREG
7835	    && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7836		 / UNITS_PER_WORD)
7837		== (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7838	    && (GET_MODE_SIZE (GET_MODE (dest))
7839		>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7840	    && sets[i].src_elt != 0)
7841	  {
7842	    enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7843	    struct table_elt *elt, *classp = 0;
7844
7845	    for (elt = sets[i].src_elt->first_same_value; elt;
7846		 elt = elt->next_same_value)
7847	      {
7848		rtx new_src = 0;
7849		unsigned src_hash;
7850		struct table_elt *src_elt;
7851
7852		/* Ignore invalid entries.  */
7853		if (GET_CODE (elt->exp) != REG
7854		    && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7855		  continue;
7856
7857		new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7858		if (new_src == 0)
7859		  new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
7860
7861		src_hash = HASH (new_src, new_mode);
7862		src_elt = lookup (new_src, src_hash, new_mode);
7863
7864		/* Put the new source in the hash table is if isn't
7865		   already.  */
7866		if (src_elt == 0)
7867		  {
7868		    if (insert_regs (new_src, classp, 0))
7869		      {
7870			rehash_using_reg (new_src);
7871			src_hash = HASH (new_src, new_mode);
7872		      }
7873		    src_elt = insert (new_src, classp, src_hash, new_mode);
7874		    src_elt->in_memory = elt->in_memory;
7875		    src_elt->in_struct = elt->in_struct;
7876		  }
7877		else if (classp && classp != src_elt->first_same_value)
7878		  /* Show that two things that we've seen before are
7879		     actually the same.  */
7880		  merge_equiv_classes (src_elt, classp);
7881
7882		classp = src_elt->first_same_value;
7883		/* Ignore invalid entries.  */
7884		while (classp
7885		       && GET_CODE (classp->exp) != REG
7886		       && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
7887		  classp = classp->next_same_value;
7888	      }
7889	  }
7890      }
7891
7892  /* Special handling for (set REG0 REG1)
7893     where REG0 is the "cheapest", cheaper than REG1.
7894     After cse, REG1 will probably not be used in the sequel,
7895     so (if easily done) change this insn to (set REG1 REG0) and
7896     replace REG1 with REG0 in the previous insn that computed their value.
7897     Then REG1 will become a dead store and won't cloud the situation
7898     for later optimizations.
7899
7900     Do not make this change if REG1 is a hard register, because it will
7901     then be used in the sequel and we may be changing a two-operand insn
7902     into a three-operand insn.
7903
7904     Also do not do this if we are operating on a copy of INSN.
7905
7906     Also don't do this if INSN ends a libcall; this would cause an unrelated
7907     register to be set in the middle of a libcall, and we then get bad code
7908     if the libcall is deleted.  */
7909
7910  if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7911      && NEXT_INSN (PREV_INSN (insn)) == insn
7912      && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7913      && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7914      && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7915      && (qty_first_reg[REG_QTY (REGNO (SET_SRC (sets[0].rtl)))]
7916	  == REGNO (SET_DEST (sets[0].rtl)))
7917      && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
7918    {
7919      rtx prev = PREV_INSN (insn);
7920      while (prev && GET_CODE (prev) == NOTE)
7921	prev = PREV_INSN (prev);
7922
7923      if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7924	  && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7925	{
7926	  rtx dest = SET_DEST (sets[0].rtl);
7927	  rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7928
7929	  validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7930	  validate_change (insn, & SET_DEST (sets[0].rtl),
7931			   SET_SRC (sets[0].rtl), 1);
7932	  validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7933	  apply_change_group ();
7934
7935	  /* If REG1 was equivalent to a constant, REG0 is not.  */
7936	  if (note)
7937	    PUT_REG_NOTE_KIND (note, REG_EQUAL);
7938
7939	  /* If there was a REG_WAS_0 note on PREV, remove it.  Move
7940	     any REG_WAS_0 note on INSN to PREV.  */
7941	  note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7942	  if (note)
7943	    remove_note (prev, note);
7944
7945	  note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7946	  if (note)
7947	    {
7948	      remove_note (insn, note);
7949	      XEXP (note, 1) = REG_NOTES (prev);
7950	      REG_NOTES (prev) = note;
7951	    }
7952
7953	  /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7954	     then we must delete it, because the value in REG0 has changed.  */
7955	  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7956	  if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7957	    remove_note (insn, note);
7958	}
7959    }
7960
7961  /* If this is a conditional jump insn, record any known equivalences due to
7962     the condition being tested.  */
7963
7964  last_jump_equiv_class = 0;
7965  if (GET_CODE (insn) == JUMP_INSN
7966      && n_sets == 1 && GET_CODE (x) == SET
7967      && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7968    record_jump_equiv (insn, 0);
7969
7970#ifdef HAVE_cc0
7971  /* If the previous insn set CC0 and this insn no longer references CC0,
7972     delete the previous insn.  Here we use the fact that nothing expects CC0
7973     to be valid over an insn, which is true until the final pass.  */
7974  if (prev_insn && GET_CODE (prev_insn) == INSN
7975      && (tem = single_set (prev_insn)) != 0
7976      && SET_DEST (tem) == cc0_rtx
7977      && ! reg_mentioned_p (cc0_rtx, x))
7978    {
7979      PUT_CODE (prev_insn, NOTE);
7980      NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7981      NOTE_SOURCE_FILE (prev_insn) = 0;
7982    }
7983
7984  prev_insn_cc0 = this_insn_cc0;
7985  prev_insn_cc0_mode = this_insn_cc0_mode;
7986#endif
7987
7988  prev_insn = insn;
7989}
7990
7991/* Remove from the hash table all expressions that reference memory.  */
7992static void
7993invalidate_memory ()
7994{
7995  register int i;
7996  register struct table_elt *p, *next;
7997
7998  for (i = 0; i < NBUCKETS; i++)
7999    for (p = table[i]; p; p = next)
8000      {
8001	next = p->next_same_hash;
8002	if (p->in_memory)
8003	  remove_from_table (p, i);
8004      }
8005}
8006
8007/* XXX ??? The name of this function bears little resemblance to
8008   what this function actually does.  FIXME.  */
8009static int
8010note_mem_written (addr)
8011     register rtx addr;
8012{
8013  /* Pushing or popping the stack invalidates just the stack pointer.  */
8014  if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
8015       || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
8016      && GET_CODE (XEXP (addr, 0)) == REG
8017      && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
8018    {
8019      if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
8020	REG_TICK (STACK_POINTER_REGNUM)++;
8021
8022      /* This should be *very* rare.  */
8023      if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
8024	invalidate (stack_pointer_rtx, VOIDmode);
8025      return 1;
8026    }
8027  return 0;
8028}
8029
8030/* Perform invalidation on the basis of everything about an insn
8031   except for invalidating the actual places that are SET in it.
8032   This includes the places CLOBBERed, and anything that might
8033   alias with something that is SET or CLOBBERed.
8034
8035   X is the pattern of the insn.  */
8036
8037static void
8038invalidate_from_clobbers (x)
8039     rtx x;
8040{
8041  if (GET_CODE (x) == CLOBBER)
8042    {
8043      rtx ref = XEXP (x, 0);
8044      if (ref)
8045	{
8046	  if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
8047	      || GET_CODE (ref) == MEM)
8048	    invalidate (ref, VOIDmode);
8049	  else if (GET_CODE (ref) == STRICT_LOW_PART
8050		   || GET_CODE (ref) == ZERO_EXTRACT)
8051	    invalidate (XEXP (ref, 0), GET_MODE (ref));
8052	}
8053    }
8054  else if (GET_CODE (x) == PARALLEL)
8055    {
8056      register int i;
8057      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
8058	{
8059	  register rtx y = XVECEXP (x, 0, i);
8060	  if (GET_CODE (y) == CLOBBER)
8061	    {
8062	      rtx ref = XEXP (y, 0);
8063	      if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
8064		  || GET_CODE (ref) == MEM)
8065		invalidate (ref, VOIDmode);
8066	      else if (GET_CODE (ref) == STRICT_LOW_PART
8067		       || GET_CODE (ref) == ZERO_EXTRACT)
8068		invalidate (XEXP (ref, 0), GET_MODE (ref));
8069	    }
8070	}
8071    }
8072}
8073
8074/* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
8075   and replace any registers in them with either an equivalent constant
8076   or the canonical form of the register.  If we are inside an address,
8077   only do this if the address remains valid.
8078
8079   OBJECT is 0 except when within a MEM in which case it is the MEM.
8080
8081   Return the replacement for X.  */
8082
8083static rtx
8084cse_process_notes (x, object)
8085     rtx x;
8086     rtx object;
8087{
8088  enum rtx_code code = GET_CODE (x);
8089  char *fmt = GET_RTX_FORMAT (code);
8090  int i;
8091
8092  switch (code)
8093    {
8094    case CONST_INT:
8095    case CONST:
8096    case SYMBOL_REF:
8097    case LABEL_REF:
8098    case CONST_DOUBLE:
8099    case PC:
8100    case CC0:
8101    case LO_SUM:
8102      return x;
8103
8104    case MEM:
8105      XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
8106      return x;
8107
8108    case EXPR_LIST:
8109    case INSN_LIST:
8110      if (REG_NOTE_KIND (x) == REG_EQUAL)
8111	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
8112      if (XEXP (x, 1))
8113	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
8114      return x;
8115
8116    case SIGN_EXTEND:
8117    case ZERO_EXTEND:
8118    case SUBREG:
8119      {
8120	rtx new = cse_process_notes (XEXP (x, 0), object);
8121	/* We don't substitute VOIDmode constants into these rtx,
8122	   since they would impede folding.  */
8123	if (GET_MODE (new) != VOIDmode)
8124	  validate_change (object, &XEXP (x, 0), new, 0);
8125	return x;
8126      }
8127
8128    case REG:
8129      i = REG_QTY (REGNO (x));
8130
8131      /* Return a constant or a constant register.  */
8132      if (REGNO_QTY_VALID_P (REGNO (x))
8133	  && qty_const[i] != 0
8134	  && (CONSTANT_P (qty_const[i])
8135	      || GET_CODE (qty_const[i]) == REG))
8136	{
8137	  rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
8138	  if (new)
8139	    return new;
8140	}
8141
8142      /* Otherwise, canonicalize this register.  */
8143      return canon_reg (x, NULL_RTX);
8144
8145    default:
8146      break;
8147    }
8148
8149  for (i = 0; i < GET_RTX_LENGTH (code); i++)
8150    if (fmt[i] == 'e')
8151      validate_change (object, &XEXP (x, i),
8152		       cse_process_notes (XEXP (x, i), object), 0);
8153
8154  return x;
8155}
8156
8157/* Find common subexpressions between the end test of a loop and the beginning
8158   of the loop.  LOOP_START is the CODE_LABEL at the start of a loop.
8159
8160   Often we have a loop where an expression in the exit test is used
8161   in the body of the loop.  For example "while (*p) *q++ = *p++;".
8162   Because of the way we duplicate the loop exit test in front of the loop,
8163   however, we don't detect that common subexpression.  This will be caught
8164   when global cse is implemented, but this is a quite common case.
8165
8166   This function handles the most common cases of these common expressions.
8167   It is called after we have processed the basic block ending with the
8168   NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
8169   jumps to a label used only once.  */
8170
8171static void
8172cse_around_loop (loop_start)
8173     rtx loop_start;
8174{
8175  rtx insn;
8176  int i;
8177  struct table_elt *p;
8178
8179  /* If the jump at the end of the loop doesn't go to the start, we don't
8180     do anything.  */
8181  for (insn = PREV_INSN (loop_start);
8182       insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
8183       insn = PREV_INSN (insn))
8184    ;
8185
8186  if (insn == 0
8187      || GET_CODE (insn) != NOTE
8188      || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
8189    return;
8190
8191  /* If the last insn of the loop (the end test) was an NE comparison,
8192     we will interpret it as an EQ comparison, since we fell through
8193     the loop.  Any equivalences resulting from that comparison are
8194     therefore not valid and must be invalidated.  */
8195  if (last_jump_equiv_class)
8196    for (p = last_jump_equiv_class->first_same_value; p;
8197	 p = p->next_same_value)
8198      {
8199        if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
8200	    || (GET_CODE (p->exp) == SUBREG
8201	        && GET_CODE (SUBREG_REG (p->exp)) == REG))
8202	  invalidate (p->exp, VOIDmode);
8203        else if (GET_CODE (p->exp) == STRICT_LOW_PART
8204	         || GET_CODE (p->exp) == ZERO_EXTRACT)
8205	  invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
8206      }
8207
8208  /* Process insns starting after LOOP_START until we hit a CALL_INSN or
8209     a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
8210
8211     The only thing we do with SET_DEST is invalidate entries, so we
8212     can safely process each SET in order.  It is slightly less efficient
8213     to do so, but we only want to handle the most common cases.
8214
8215     The gen_move_insn call in cse_set_around_loop may create new pseudos.
8216     These pseudos won't have valid entries in any of the tables indexed
8217     by register number, such as reg_qty.  We avoid out-of-range array
8218     accesses by not processing any instructions created after cse started.  */
8219
8220  for (insn = NEXT_INSN (loop_start);
8221       GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
8222       && INSN_UID (insn) < max_insn_uid
8223       && ! (GET_CODE (insn) == NOTE
8224	     && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
8225       insn = NEXT_INSN (insn))
8226    {
8227      if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8228	  && (GET_CODE (PATTERN (insn)) == SET
8229	      || GET_CODE (PATTERN (insn)) == CLOBBER))
8230	cse_set_around_loop (PATTERN (insn), insn, loop_start);
8231      else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8232	       && GET_CODE (PATTERN (insn)) == PARALLEL)
8233	for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8234	  if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
8235	      || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
8236	    cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
8237				 loop_start);
8238    }
8239}
8240
8241/* Process one SET of an insn that was skipped.  We ignore CLOBBERs
8242   since they are done elsewhere.  This function is called via note_stores.  */
8243
8244static void
8245invalidate_skipped_set (dest, set)
8246     rtx set;
8247     rtx dest;
8248{
8249  enum rtx_code code = GET_CODE (dest);
8250
8251  if (code == MEM
8252      && ! note_mem_written (dest)	/* If this is not a stack push ... */
8253      /* There are times when an address can appear varying and be a PLUS
8254	 during this scan when it would be a fixed address were we to know
8255	 the proper equivalences.  So invalidate all memory if there is
8256	 a BLKmode or nonscalar memory reference or a reference to a
8257	 variable address.  */
8258      && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
8259	  || cse_rtx_varies_p (XEXP (dest, 0))))
8260    {
8261      invalidate_memory ();
8262      return;
8263    }
8264
8265  if (GET_CODE (set) == CLOBBER
8266#ifdef HAVE_cc0
8267      || dest == cc0_rtx
8268#endif
8269      || dest == pc_rtx)
8270    return;
8271
8272  if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
8273    invalidate (XEXP (dest, 0), GET_MODE (dest));
8274  else if (code == REG || code == SUBREG || code == MEM)
8275    invalidate (dest, VOIDmode);
8276}
8277
8278/* Invalidate all insns from START up to the end of the function or the
8279   next label.  This called when we wish to CSE around a block that is
8280   conditionally executed.  */
8281
8282static void
8283invalidate_skipped_block (start)
8284     rtx start;
8285{
8286  rtx insn;
8287
8288  for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
8289       insn = NEXT_INSN (insn))
8290    {
8291      if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8292	continue;
8293
8294      if (GET_CODE (insn) == CALL_INSN)
8295	{
8296	  if (! CONST_CALL_P (insn))
8297	    invalidate_memory ();
8298	  invalidate_for_call ();
8299	}
8300
8301      invalidate_from_clobbers (PATTERN (insn));
8302      note_stores (PATTERN (insn), invalidate_skipped_set);
8303    }
8304}
8305
8306/* Used for communication between the following two routines; contains a
8307   value to be checked for modification.  */
8308
8309static rtx cse_check_loop_start_value;
8310
8311/* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
8312   indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0.  */
8313
8314static void
8315cse_check_loop_start (x, set)
8316     rtx x;
8317     rtx set ATTRIBUTE_UNUSED;
8318{
8319  if (cse_check_loop_start_value == 0
8320      || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
8321    return;
8322
8323  if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
8324      || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
8325    cse_check_loop_start_value = 0;
8326}
8327
8328/* X is a SET or CLOBBER contained in INSN that was found near the start of
8329   a loop that starts with the label at LOOP_START.
8330
8331   If X is a SET, we see if its SET_SRC is currently in our hash table.
8332   If so, we see if it has a value equal to some register used only in the
8333   loop exit code (as marked by jump.c).
8334
8335   If those two conditions are true, we search backwards from the start of
8336   the loop to see if that same value was loaded into a register that still
8337   retains its value at the start of the loop.
8338
8339   If so, we insert an insn after the load to copy the destination of that
8340   load into the equivalent register and (try to) replace our SET_SRC with that
8341   register.
8342
8343   In any event, we invalidate whatever this SET or CLOBBER modifies.  */
8344
8345static void
8346cse_set_around_loop (x, insn, loop_start)
8347     rtx x;
8348     rtx insn;
8349     rtx loop_start;
8350{
8351  struct table_elt *src_elt;
8352
8353  /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
8354     are setting PC or CC0 or whose SET_SRC is already a register.  */
8355  if (GET_CODE (x) == SET
8356      && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
8357      && GET_CODE (SET_SRC (x)) != REG)
8358    {
8359      src_elt = lookup (SET_SRC (x),
8360			HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
8361			GET_MODE (SET_DEST (x)));
8362
8363      if (src_elt)
8364	for (src_elt = src_elt->first_same_value; src_elt;
8365	     src_elt = src_elt->next_same_value)
8366	  if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
8367	      && COST (src_elt->exp) < COST (SET_SRC (x)))
8368	    {
8369	      rtx p, set;
8370
8371	      /* Look for an insn in front of LOOP_START that sets
8372		 something in the desired mode to SET_SRC (x) before we hit
8373		 a label or CALL_INSN.  */
8374
8375	      for (p = prev_nonnote_insn (loop_start);
8376		   p && GET_CODE (p) != CALL_INSN
8377		   && GET_CODE (p) != CODE_LABEL;
8378		   p = prev_nonnote_insn  (p))
8379		if ((set = single_set (p)) != 0
8380		    && GET_CODE (SET_DEST (set)) == REG
8381		    && GET_MODE (SET_DEST (set)) == src_elt->mode
8382		    && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
8383		  {
8384		    /* We now have to ensure that nothing between P
8385		       and LOOP_START modified anything referenced in
8386		       SET_SRC (x).  We know that nothing within the loop
8387		       can modify it, or we would have invalidated it in
8388		       the hash table.  */
8389		    rtx q;
8390
8391		    cse_check_loop_start_value = SET_SRC (x);
8392		    for (q = p; q != loop_start; q = NEXT_INSN (q))
8393		      if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
8394			note_stores (PATTERN (q), cse_check_loop_start);
8395
8396		    /* If nothing was changed and we can replace our
8397		       SET_SRC, add an insn after P to copy its destination
8398		       to what we will be replacing SET_SRC with.  */
8399		    if (cse_check_loop_start_value
8400			&& validate_change (insn, &SET_SRC (x),
8401					    src_elt->exp, 0))
8402		      {
8403			/* If this creates new pseudos, this is unsafe,
8404			   because the regno of new pseudo is unsuitable
8405			   to index into reg_qty when cse_insn processes
8406			   the new insn.  Therefore, if a new pseudo was
8407			   created, discard this optimization.  */
8408			int nregs = max_reg_num ();
8409			rtx move
8410			  = gen_move_insn (src_elt->exp, SET_DEST (set));
8411			if (nregs != max_reg_num ())
8412			  {
8413			    if (! validate_change (insn, &SET_SRC (x),
8414						   SET_SRC (set), 0))
8415			      abort ();
8416			  }
8417			else
8418			  emit_insn_after (move, p);
8419		      }
8420		    break;
8421		  }
8422	    }
8423    }
8424
8425  /* Now invalidate anything modified by X.  */
8426  note_mem_written (SET_DEST (x));
8427
8428  /* See comment on similar code in cse_insn for explanation of these tests.  */
8429  if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
8430      || GET_CODE (SET_DEST (x)) == MEM)
8431    invalidate (SET_DEST (x), VOIDmode);
8432  else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
8433	   || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
8434    invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
8435}
8436
8437/* Find the end of INSN's basic block and return its range,
8438   the total number of SETs in all the insns of the block, the last insn of the
8439   block, and the branch path.
8440
8441   The branch path indicates which branches should be followed.  If a non-zero
8442   path size is specified, the block should be rescanned and a different set
8443   of branches will be taken.  The branch path is only used if
8444   FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
8445
8446   DATA is a pointer to a struct cse_basic_block_data, defined below, that is
8447   used to describe the block.  It is filled in with the information about
8448   the current block.  The incoming structure's branch path, if any, is used
8449   to construct the output branch path.  */
8450
8451void
8452cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
8453     rtx insn;
8454     struct cse_basic_block_data *data;
8455     int follow_jumps;
8456     int after_loop;
8457     int skip_blocks;
8458{
8459  rtx p = insn, q;
8460  int nsets = 0;
8461  int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
8462  rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
8463  int path_size = data->path_size;
8464  int path_entry = 0;
8465  int i;
8466
8467  /* Update the previous branch path, if any.  If the last branch was
8468     previously TAKEN, mark it NOT_TAKEN.  If it was previously NOT_TAKEN,
8469     shorten the path by one and look at the previous branch.  We know that
8470     at least one branch must have been taken if PATH_SIZE is non-zero.  */
8471  while (path_size > 0)
8472    {
8473      if (data->path[path_size - 1].status != NOT_TAKEN)
8474	{
8475	  data->path[path_size - 1].status = NOT_TAKEN;
8476	  break;
8477	}
8478      else
8479	path_size--;
8480    }
8481
8482  /* Scan to end of this basic block.  */
8483  while (p && GET_CODE (p) != CODE_LABEL)
8484    {
8485      /* Don't cse out the end of a loop.  This makes a difference
8486	 only for the unusual loops that always execute at least once;
8487	 all other loops have labels there so we will stop in any case.
8488	 Cse'ing out the end of the loop is dangerous because it
8489	 might cause an invariant expression inside the loop
8490	 to be reused after the end of the loop.  This would make it
8491	 hard to move the expression out of the loop in loop.c,
8492	 especially if it is one of several equivalent expressions
8493	 and loop.c would like to eliminate it.
8494
8495	 If we are running after loop.c has finished, we can ignore
8496	 the NOTE_INSN_LOOP_END.  */
8497
8498      if (! after_loop && GET_CODE (p) == NOTE
8499	  && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
8500	break;
8501
8502      /* Don't cse over a call to setjmp; on some machines (eg vax)
8503	 the regs restored by the longjmp come from
8504	 a later time than the setjmp.  */
8505      if (GET_CODE (p) == NOTE
8506	  && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
8507	break;
8508
8509      /* A PARALLEL can have lots of SETs in it,
8510	 especially if it is really an ASM_OPERANDS.  */
8511      if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
8512	  && GET_CODE (PATTERN (p)) == PARALLEL)
8513	nsets += XVECLEN (PATTERN (p), 0);
8514      else if (GET_CODE (p) != NOTE)
8515	nsets += 1;
8516
8517      /* Ignore insns made by CSE; they cannot affect the boundaries of
8518	 the basic block.  */
8519
8520      if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8521	high_cuid = INSN_CUID (p);
8522      if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8523	low_cuid = INSN_CUID (p);
8524
8525      /* See if this insn is in our branch path.  If it is and we are to
8526	 take it, do so.  */
8527      if (path_entry < path_size && data->path[path_entry].branch == p)
8528	{
8529	  if (data->path[path_entry].status != NOT_TAKEN)
8530	    p = JUMP_LABEL (p);
8531
8532	  /* Point to next entry in path, if any.  */
8533	  path_entry++;
8534	}
8535
8536      /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8537	 was specified, we haven't reached our maximum path length, there are
8538	 insns following the target of the jump, this is the only use of the
8539	 jump label, and the target label is preceded by a BARRIER.
8540
8541	 Alternatively, we can follow the jump if it branches around a
8542	 block of code and there are no other branches into the block.
8543	 In this case invalidate_skipped_block will be called to invalidate any
8544	 registers set in the block when following the jump.  */
8545
8546      else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
8547	       && GET_CODE (p) == JUMP_INSN
8548      	       && GET_CODE (PATTERN (p)) == SET
8549	       && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
8550	       && JUMP_LABEL (p) != 0
8551	       && LABEL_NUSES (JUMP_LABEL (p)) == 1
8552	       && NEXT_INSN (JUMP_LABEL (p)) != 0)
8553	{
8554	  for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8555	    if ((GET_CODE (q) != NOTE
8556	         || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8557	         || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8558	        && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8559	      break;
8560
8561	  /* If we ran into a BARRIER, this code is an extension of the
8562	     basic block when the branch is taken.  */
8563	  if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
8564	    {
8565	      /* Don't allow ourself to keep walking around an
8566		 always-executed loop.  */
8567	      if (next_real_insn (q) == next)
8568		{
8569		  p = NEXT_INSN (p);
8570		  continue;
8571		}
8572
8573	      /* Similarly, don't put a branch in our path more than once.  */
8574	      for (i = 0; i < path_entry; i++)
8575		if (data->path[i].branch == p)
8576		  break;
8577
8578	      if (i != path_entry)
8579		break;
8580
8581	      data->path[path_entry].branch = p;
8582	      data->path[path_entry++].status = TAKEN;
8583
8584	      /* This branch now ends our path.  It was possible that we
8585		 didn't see this branch the last time around (when the
8586		 insn in front of the target was a JUMP_INSN that was
8587		 turned into a no-op).  */
8588	      path_size = path_entry;
8589
8590	      p = JUMP_LABEL (p);
8591	      /* Mark block so we won't scan it again later.  */
8592	      PUT_MODE (NEXT_INSN (p), QImode);
8593	    }
8594	  /* Detect a branch around a block of code.  */
8595	  else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8596	    {
8597	      register rtx tmp;
8598
8599	      if (next_real_insn (q) == next)
8600		{
8601		  p = NEXT_INSN (p);
8602		  continue;
8603		}
8604
8605	      for (i = 0; i < path_entry; i++)
8606		if (data->path[i].branch == p)
8607		  break;
8608
8609	      if (i != path_entry)
8610		break;
8611
8612	      /* This is no_labels_between_p (p, q) with an added check for
8613		 reaching the end of a function (in case Q precedes P).  */
8614	      for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8615		if (GET_CODE (tmp) == CODE_LABEL)
8616		  break;
8617
8618	      if (tmp == q)
8619		{
8620		  data->path[path_entry].branch = p;
8621		  data->path[path_entry++].status = AROUND;
8622
8623		  path_size = path_entry;
8624
8625		  p = JUMP_LABEL (p);
8626		  /* Mark block so we won't scan it again later.  */
8627		  PUT_MODE (NEXT_INSN (p), QImode);
8628		}
8629	    }
8630	}
8631      p = NEXT_INSN (p);
8632    }
8633
8634  data->low_cuid = low_cuid;
8635  data->high_cuid = high_cuid;
8636  data->nsets = nsets;
8637  data->last = p;
8638
8639  /* If all jumps in the path are not taken, set our path length to zero
8640     so a rescan won't be done.  */
8641  for (i = path_size - 1; i >= 0; i--)
8642    if (data->path[i].status != NOT_TAKEN)
8643      break;
8644
8645  if (i == -1)
8646    data->path_size = 0;
8647  else
8648    data->path_size = path_size;
8649
8650  /* End the current branch path.  */
8651  data->path[path_size].branch = 0;
8652}
8653
8654/* Perform cse on the instructions of a function.
8655   F is the first instruction.
8656   NREGS is one plus the highest pseudo-reg number used in the instruction.
8657
8658   AFTER_LOOP is 1 if this is the cse call done after loop optimization
8659   (only if -frerun-cse-after-loop).
8660
8661   Returns 1 if jump_optimize should be redone due to simplifications
8662   in conditional jump instructions.  */
8663
8664int
8665cse_main (f, nregs, after_loop, file)
8666     rtx f;
8667     int nregs;
8668     int after_loop;
8669     FILE *file;
8670{
8671  struct cse_basic_block_data val;
8672  register rtx insn = f;
8673  register int i;
8674
8675  cse_jumps_altered = 0;
8676  recorded_label_ref = 0;
8677  constant_pool_entries_cost = 0;
8678  val.path_size = 0;
8679
8680  init_recog ();
8681  init_alias_analysis ();
8682
8683  max_reg = nregs;
8684
8685  max_insn_uid = get_max_uid ();
8686
8687  reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8688  reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8689
8690#ifdef LOAD_EXTEND_OP
8691
8692  /* Allocate scratch rtl here.  cse_insn will fill in the memory reference
8693     and change the code and mode as appropriate.  */
8694  memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
8695#endif
8696
8697  /* Discard all the free elements of the previous function
8698     since they are allocated in the temporarily obstack.  */
8699  bzero ((char *) table, sizeof table);
8700  free_element_chain = 0;
8701  n_elements_made = 0;
8702
8703  /* Find the largest uid.  */
8704
8705  max_uid = get_max_uid ();
8706  uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
8707  bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
8708
8709  /* Compute the mapping from uids to cuids.
8710     CUIDs are numbers assigned to insns, like uids,
8711     except that cuids increase monotonically through the code.
8712     Don't assign cuids to line-number NOTEs, so that the distance in cuids
8713     between two insns is not affected by -g.  */
8714
8715  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8716    {
8717      if (GET_CODE (insn) != NOTE
8718	  || NOTE_LINE_NUMBER (insn) < 0)
8719	INSN_CUID (insn) = ++i;
8720      else
8721	/* Give a line number note the same cuid as preceding insn.  */
8722	INSN_CUID (insn) = i;
8723    }
8724
8725  /* Initialize which registers are clobbered by calls.  */
8726
8727  CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8728
8729  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8730    if ((call_used_regs[i]
8731	 /* Used to check !fixed_regs[i] here, but that isn't safe;
8732	    fixed regs are still call-clobbered, and sched can get
8733	    confused if they can "live across calls".
8734
8735	    The frame pointer is always preserved across calls.  The arg
8736	    pointer is if it is fixed.  The stack pointer usually is, unless
8737	    RETURN_POPS_ARGS, in which case an explicit CLOBBER
8738	    will be present.  If we are generating PIC code, the PIC offset
8739	    table register is preserved across calls.  */
8740
8741	 && i != STACK_POINTER_REGNUM
8742	 && i != FRAME_POINTER_REGNUM
8743#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8744	 && i != HARD_FRAME_POINTER_REGNUM
8745#endif
8746#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8747	 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8748#endif
8749#if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
8750	 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8751#endif
8752	 )
8753	|| global_regs[i])
8754      SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8755
8756  /* Loop over basic blocks.
8757     Compute the maximum number of qty's needed for each basic block
8758     (which is 2 for each SET).  */
8759  insn = f;
8760  while (insn)
8761    {
8762      cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8763			      flag_cse_skip_blocks);
8764
8765      /* If this basic block was already processed or has no sets, skip it.  */
8766      if (val.nsets == 0 || GET_MODE (insn) == QImode)
8767	{
8768	  PUT_MODE (insn, VOIDmode);
8769	  insn = (val.last ? NEXT_INSN (val.last) : 0);
8770	  val.path_size = 0;
8771	  continue;
8772	}
8773
8774      cse_basic_block_start = val.low_cuid;
8775      cse_basic_block_end = val.high_cuid;
8776      max_qty = val.nsets * 2;
8777
8778      if (file)
8779	fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
8780		 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8781		 val.nsets);
8782
8783      /* Make MAX_QTY bigger to give us room to optimize
8784	 past the end of this basic block, if that should prove useful.  */
8785      if (max_qty < 500)
8786	max_qty = 500;
8787
8788      max_qty += max_reg;
8789
8790      /* If this basic block is being extended by following certain jumps,
8791         (see `cse_end_of_basic_block'), we reprocess the code from the start.
8792         Otherwise, we start after this basic block.  */
8793      if (val.path_size > 0)
8794        cse_basic_block (insn, val.last, val.path, 0);
8795      else
8796	{
8797	  int old_cse_jumps_altered = cse_jumps_altered;
8798	  rtx temp;
8799
8800	  /* When cse changes a conditional jump to an unconditional
8801	     jump, we want to reprocess the block, since it will give
8802	     us a new branch path to investigate.  */
8803	  cse_jumps_altered = 0;
8804	  temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8805	  if (cse_jumps_altered == 0
8806	      || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8807	    insn = temp;
8808
8809	  cse_jumps_altered |= old_cse_jumps_altered;
8810	}
8811
8812#ifdef USE_C_ALLOCA
8813      alloca (0);
8814#endif
8815    }
8816
8817  /* Tell refers_to_mem_p that qty_const info is not available.  */
8818  qty_const = 0;
8819
8820  if (max_elements_made < n_elements_made)
8821    max_elements_made = n_elements_made;
8822
8823  return cse_jumps_altered || recorded_label_ref;
8824}
8825
8826/* Process a single basic block.  FROM and TO and the limits of the basic
8827   block.  NEXT_BRANCH points to the branch path when following jumps or
8828   a null path when not following jumps.
8829
8830   AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8831   loop.  This is true when we are being called for the last time on a
8832   block and this CSE pass is before loop.c.  */
8833
8834static rtx
8835cse_basic_block (from, to, next_branch, around_loop)
8836     register rtx from, to;
8837     struct branch_path *next_branch;
8838     int around_loop;
8839{
8840  register rtx insn;
8841  int to_usage = 0;
8842  rtx libcall_insn = NULL_RTX;
8843  int num_insns = 0;
8844
8845  /* Each of these arrays is undefined before max_reg, so only allocate
8846     the space actually needed and adjust the start below.  */
8847
8848  qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8849  qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8850  qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8851  qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8852  qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8853  qty_comparison_code
8854    = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8855  qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8856  qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8857
8858  qty_first_reg -= max_reg;
8859  qty_last_reg -= max_reg;
8860  qty_mode -= max_reg;
8861  qty_const -= max_reg;
8862  qty_const_insn -= max_reg;
8863  qty_comparison_code -= max_reg;
8864  qty_comparison_qty -= max_reg;
8865  qty_comparison_const -= max_reg;
8866
8867  new_basic_block ();
8868
8869  /* TO might be a label.  If so, protect it from being deleted.  */
8870  if (to != 0 && GET_CODE (to) == CODE_LABEL)
8871    ++LABEL_NUSES (to);
8872
8873  for (insn = from; insn != to; insn = NEXT_INSN (insn))
8874    {
8875      register enum rtx_code code = GET_CODE (insn);
8876
8877      /* If we have processed 1,000 insns, flush the hash table to
8878	 avoid extreme quadratic behavior.  We must not include NOTEs
8879	 in the count since there may be more or them when generating
8880	 debugging information.  If we clear the table at different
8881	 times, code generated with -g -O might be different than code
8882	 generated with -O but not -g.
8883
8884	 ??? This is a real kludge and needs to be done some other way.
8885	 Perhaps for 2.9.  */
8886      if (code != NOTE && num_insns++ > 1000)
8887	{
8888	  flush_hash_table ();
8889	  num_insns = 0;
8890	}
8891
8892      /* See if this is a branch that is part of the path.  If so, and it is
8893	 to be taken, do so.  */
8894      if (next_branch->branch == insn)
8895	{
8896	  enum taken status = next_branch++->status;
8897	  if (status != NOT_TAKEN)
8898	    {
8899	      if (status == TAKEN)
8900		record_jump_equiv (insn, 1);
8901	      else
8902		invalidate_skipped_block (NEXT_INSN (insn));
8903
8904	      /* Set the last insn as the jump insn; it doesn't affect cc0.
8905		 Then follow this branch.  */
8906#ifdef HAVE_cc0
8907	      prev_insn_cc0 = 0;
8908#endif
8909	      prev_insn = insn;
8910	      insn = JUMP_LABEL (insn);
8911	      continue;
8912	    }
8913	}
8914
8915      if (GET_MODE (insn) == QImode)
8916	PUT_MODE (insn, VOIDmode);
8917
8918      if (GET_RTX_CLASS (code) == 'i')
8919	{
8920	  rtx p;
8921
8922	  /* Process notes first so we have all notes in canonical forms when
8923	     looking for duplicate operations.  */
8924
8925	  if (REG_NOTES (insn))
8926	    REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
8927
8928	  /* Track when we are inside in LIBCALL block.  Inside such a block,
8929	     we do not want to record destinations.  The last insn of a
8930	     LIBCALL block is not considered to be part of the block, since
8931	     its destination is the result of the block and hence should be
8932	     recorded.  */
8933
8934	  if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
8935	    libcall_insn = XEXP (p, 0);
8936	  else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8937	    libcall_insn = NULL_RTX;
8938
8939	  cse_insn (insn, libcall_insn);
8940	}
8941
8942      /* If INSN is now an unconditional jump, skip to the end of our
8943	 basic block by pretending that we just did the last insn in the
8944	 basic block.  If we are jumping to the end of our block, show
8945	 that we can have one usage of TO.  */
8946
8947      if (simplejump_p (insn))
8948	{
8949	  if (to == 0)
8950	    return 0;
8951
8952	  if (JUMP_LABEL (insn) == to)
8953	    to_usage = 1;
8954
8955	  /* Maybe TO was deleted because the jump is unconditional.
8956	     If so, there is nothing left in this basic block.  */
8957	  /* ??? Perhaps it would be smarter to set TO
8958	     to whatever follows this insn,
8959	     and pretend the basic block had always ended here.  */
8960	  if (INSN_DELETED_P (to))
8961	    break;
8962
8963	  insn = PREV_INSN (to);
8964	}
8965
8966      /* See if it is ok to keep on going past the label
8967	 which used to end our basic block.  Remember that we incremented
8968	 the count of that label, so we decrement it here.  If we made
8969	 a jump unconditional, TO_USAGE will be one; in that case, we don't
8970	 want to count the use in that jump.  */
8971
8972      if (to != 0 && NEXT_INSN (insn) == to
8973	  && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8974	{
8975	  struct cse_basic_block_data val;
8976	  rtx prev;
8977
8978	  insn = NEXT_INSN (to);
8979
8980	  /* If TO was the last insn in the function, we are done.  */
8981	  if (insn == 0)
8982	    return 0;
8983
8984	  /* If TO was preceded by a BARRIER we are done with this block
8985	     because it has no continuation.  */
8986	  prev = prev_nonnote_insn (to);
8987	  if (prev && GET_CODE (prev) == BARRIER)
8988	    return insn;
8989
8990	  /* Find the end of the following block.  Note that we won't be
8991	     following branches in this case.  */
8992	  to_usage = 0;
8993	  val.path_size = 0;
8994	  cse_end_of_basic_block (insn, &val, 0, 0, 0);
8995
8996	  /* If the tables we allocated have enough space left
8997	     to handle all the SETs in the next basic block,
8998	     continue through it.  Otherwise, return,
8999	     and that block will be scanned individually.  */
9000	  if (val.nsets * 2 + next_qty > max_qty)
9001	    break;
9002
9003	  cse_basic_block_start = val.low_cuid;
9004	  cse_basic_block_end = val.high_cuid;
9005	  to = val.last;
9006
9007	  /* Prevent TO from being deleted if it is a label.  */
9008	  if (to != 0 && GET_CODE (to) == CODE_LABEL)
9009	    ++LABEL_NUSES (to);
9010
9011	  /* Back up so we process the first insn in the extension.  */
9012	  insn = PREV_INSN (insn);
9013	}
9014    }
9015
9016  if (next_qty > max_qty)
9017    abort ();
9018
9019  /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
9020     the previous insn is the only insn that branches to the head of a loop,
9021     we can cse into the loop.  Don't do this if we changed the jump
9022     structure of a loop unless we aren't going to be following jumps.  */
9023
9024  if ((cse_jumps_altered == 0
9025       || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
9026      && around_loop && to != 0
9027      && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
9028      && GET_CODE (PREV_INSN (to)) == JUMP_INSN
9029      && JUMP_LABEL (PREV_INSN (to)) != 0
9030      && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
9031    cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
9032
9033  return to ? NEXT_INSN (to) : 0;
9034}
9035
9036/* Count the number of times registers are used (not set) in X.
9037   COUNTS is an array in which we accumulate the count, INCR is how much
9038   we count each register usage.
9039
9040   Don't count a usage of DEST, which is the SET_DEST of a SET which
9041   contains X in its SET_SRC.  This is because such a SET does not
9042   modify the liveness of DEST.  */
9043
9044static void
9045count_reg_usage (x, counts, dest, incr)
9046     rtx x;
9047     int *counts;
9048     rtx dest;
9049     int incr;
9050{
9051  enum rtx_code code;
9052  char *fmt;
9053  int i, j;
9054
9055  if (x == 0)
9056    return;
9057
9058  switch (code = GET_CODE (x))
9059    {
9060    case REG:
9061      if (x != dest)
9062	counts[REGNO (x)] += incr;
9063      return;
9064
9065    case PC:
9066    case CC0:
9067    case CONST:
9068    case CONST_INT:
9069    case CONST_DOUBLE:
9070    case SYMBOL_REF:
9071    case LABEL_REF:
9072      return;
9073
9074    case CLOBBER:
9075      /* If we are clobbering a MEM, mark any registers inside the address
9076         as being used.  */
9077      if (GET_CODE (XEXP (x, 0)) == MEM)
9078	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
9079      return;
9080
9081    case SET:
9082      /* Unless we are setting a REG, count everything in SET_DEST.  */
9083      if (GET_CODE (SET_DEST (x)) != REG)
9084	count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
9085
9086      /* If SRC has side-effects, then we can't delete this insn, so the
9087	 usage of SET_DEST inside SRC counts.
9088
9089	 ??? Strictly-speaking, we might be preserving this insn
9090	 because some other SET has side-effects, but that's hard
9091	 to do and can't happen now.  */
9092      count_reg_usage (SET_SRC (x), counts,
9093		       side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
9094		       incr);
9095      return;
9096
9097    case CALL_INSN:
9098      count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
9099
9100      /* ... falls through ...  */
9101    case INSN:
9102    case JUMP_INSN:
9103      count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
9104
9105      /* Things used in a REG_EQUAL note aren't dead since loop may try to
9106	 use them.  */
9107
9108      count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
9109      return;
9110
9111    case EXPR_LIST:
9112    case INSN_LIST:
9113      if (REG_NOTE_KIND (x) == REG_EQUAL
9114	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
9115	count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
9116      count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
9117      return;
9118
9119    default:
9120      break;
9121    }
9122
9123  fmt = GET_RTX_FORMAT (code);
9124  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9125    {
9126      if (fmt[i] == 'e')
9127	count_reg_usage (XEXP (x, i), counts, dest, incr);
9128      else if (fmt[i] == 'E')
9129	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9130	  count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
9131    }
9132}
9133
9134/* Scan all the insns and delete any that are dead; i.e., they store a register
9135   that is never used or they copy a register to itself.
9136
9137   This is used to remove insns made obviously dead by cse, loop or other
9138   optimizations.  It improves the heuristics in loop since it won't try to
9139   move dead invariants out of loops or make givs for dead quantities.  The
9140   remaining passes of the compilation are also sped up.  */
9141
9142void
9143delete_trivially_dead_insns (insns, nreg)
9144     rtx insns;
9145     int nreg;
9146{
9147  int *counts = (int *) alloca (nreg * sizeof (int));
9148  rtx insn, prev;
9149#ifdef HAVE_cc0
9150  rtx tem;
9151#endif
9152  int i;
9153  int in_libcall = 0, dead_libcall = 0;
9154
9155  /* First count the number of times each register is used.  */
9156  bzero ((char *) counts, sizeof (int) * nreg);
9157  for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
9158    count_reg_usage (insn, counts, NULL_RTX, 1);
9159
9160  /* Go from the last insn to the first and delete insns that only set unused
9161     registers or copy a register to itself.  As we delete an insn, remove
9162     usage counts for registers it uses.  */
9163  for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
9164    {
9165      int live_insn = 0;
9166      rtx note;
9167
9168      prev = prev_real_insn (insn);
9169
9170      /* Don't delete any insns that are part of a libcall block unless
9171	 we can delete the whole libcall block.
9172
9173	 Flow or loop might get confused if we did that.  Remember
9174	 that we are scanning backwards.  */
9175      if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
9176	{
9177	  in_libcall = 1;
9178	  live_insn = 1;
9179	  dead_libcall = 0;
9180
9181	  /* See if there's a REG_EQUAL note on this insn and try to
9182	     replace the source with the REG_EQUAL expression.
9183
9184	     We assume that insns with REG_RETVALs can only be reg->reg
9185	     copies at this point.  */
9186	  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
9187	  if (note)
9188	    {
9189	      rtx set = single_set (insn);
9190	      if (set
9191		  && validate_change (insn, &SET_SRC (set), XEXP (note, 0), 0))
9192		{
9193		  remove_note (insn,
9194			       find_reg_note (insn, REG_RETVAL, NULL_RTX));
9195		  dead_libcall = 1;
9196		}
9197	    }
9198	}
9199      else if (in_libcall)
9200	live_insn = ! dead_libcall;
9201      else if (GET_CODE (PATTERN (insn)) == SET)
9202	{
9203	  if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
9204	      && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
9205	    ;
9206
9207#ifdef HAVE_cc0
9208	  else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
9209		   && ! side_effects_p (SET_SRC (PATTERN (insn)))
9210		   && ((tem = next_nonnote_insn (insn)) == 0
9211		       || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9212		       || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9213	    ;
9214#endif
9215	  else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
9216		   || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
9217		   || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
9218		   || side_effects_p (SET_SRC (PATTERN (insn))))
9219	    live_insn = 1;
9220	}
9221      else if (GET_CODE (PATTERN (insn)) == PARALLEL)
9222	for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
9223	  {
9224	    rtx elt = XVECEXP (PATTERN (insn), 0, i);
9225
9226	    if (GET_CODE (elt) == SET)
9227	      {
9228		if (GET_CODE (SET_DEST (elt)) == REG
9229		    && SET_DEST (elt) == SET_SRC (elt))
9230		  ;
9231
9232#ifdef HAVE_cc0
9233		else if (GET_CODE (SET_DEST (elt)) == CC0
9234			 && ! side_effects_p (SET_SRC (elt))
9235			 && ((tem = next_nonnote_insn (insn)) == 0
9236			     || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9237			     || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9238		  ;
9239#endif
9240		else if (GET_CODE (SET_DEST (elt)) != REG
9241			 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
9242			 || counts[REGNO (SET_DEST (elt))] != 0
9243			 || side_effects_p (SET_SRC (elt)))
9244		  live_insn = 1;
9245	      }
9246	    else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
9247	      live_insn = 1;
9248	  }
9249      else
9250	live_insn = 1;
9251
9252      /* If this is a dead insn, delete it and show registers in it aren't
9253	 being used.  */
9254
9255      if (! live_insn)
9256	{
9257	  count_reg_usage (insn, counts, NULL_RTX, -1);
9258	  delete_insn (insn);
9259	}
9260
9261      if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
9262	{
9263	  in_libcall = 0;
9264	  dead_libcall = 0;
9265	}
9266    }
9267}
9268