1/* Common subexpression elimination for GNU compiler.
2   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000 Free Software Foundation, Inc.
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING.  If not, write to
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA.  */
21
22
23#include "config.h"
24/* stdio.h must precede rtl.h for FFS.  */
25#include "system.h"
26#include <setjmp.h>
27
28#include "rtl.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "flags.h"
32#include "real.h"
33#include "insn-config.h"
34#include "recog.h"
35#include "expr.h"
36#include "toplev.h"
37#include "output.h"
38#include "splay-tree.h"
39
40/* The basic idea of common subexpression elimination is to go
41   through the code, keeping a record of expressions that would
42   have the same value at the current scan point, and replacing
43   expressions encountered with the cheapest equivalent expression.
44
45   It is too complicated to keep track of the different possibilities
46   when control paths merge in this code; so, at each label, we forget all
47   that is known and start fresh.  This can be described as processing each
48   extended basic block separately.  We have a separate pass to perform
49   global CSE.
50
51   Note CSE can turn a conditional or computed jump into a nop or
52   an unconditional jump.  When this occurs we arrange to run the jump
53   optimizer after CSE to delete the unreachable code.
54
55   We use two data structures to record the equivalent expressions:
56   a hash table for most expressions, and several vectors together
57   with "quantity numbers" to record equivalent (pseudo) registers.
58
59   The use of the special data structure for registers is desirable
60   because it is faster.  It is possible because registers references
61   contain a fairly small number, the register number, taken from
62   a contiguously allocated series, and two register references are
63   identical if they have the same number.  General expressions
64   do not have any such thing, so the only way to retrieve the
65   information recorded on an expression other than a register
66   is to keep it in a hash table.
67
68Registers and "quantity numbers":
69
70   At the start of each basic block, all of the (hardware and pseudo)
71   registers used in the function are given distinct quantity
72   numbers to indicate their contents.  During scan, when the code
73   copies one register into another, we copy the quantity number.
74   When a register is loaded in any other way, we allocate a new
75   quantity number to describe the value generated by this operation.
76   `reg_qty' records what quantity a register is currently thought
77   of as containing.
78
79   All real quantity numbers are greater than or equal to `max_reg'.
80   If register N has not been assigned a quantity, reg_qty[N] will equal N.
81
82   Quantity numbers below `max_reg' do not exist and none of the `qty_...'
83   variables should be referenced with an index below `max_reg'.
84
85   We also maintain a bidirectional chain of registers for each
86   quantity number.  `qty_first_reg', `qty_last_reg',
87   `reg_next_eqv' and `reg_prev_eqv' hold these chains.
88
89   The first register in a chain is the one whose lifespan is least local.
90   Among equals, it is the one that was seen first.
91   We replace any equivalent register with that one.
92
93   If two registers have the same quantity number, it must be true that
94   REG expressions with `qty_mode' must be in the hash table for both
95   registers and must be in the same class.
96
97   The converse is not true.  Since hard registers may be referenced in
98   any mode, two REG expressions might be equivalent in the hash table
99   but not have the same quantity number if the quantity number of one
100   of the registers is not the same mode as those expressions.
101
102Constants and quantity numbers
103
104   When a quantity has a known constant value, that value is stored
105   in the appropriate element of qty_const.  This is in addition to
106   putting the constant in the hash table as is usual for non-regs.
107
108   Whether a reg or a constant is preferred is determined by the configuration
109   macro CONST_COSTS and will often depend on the constant value.  In any
110   event, expressions containing constants can be simplified, by fold_rtx.
111
112   When a quantity has a known nearly constant value (such as an address
113   of a stack slot), that value is stored in the appropriate element
114   of qty_const.
115
116   Integer constants don't have a machine mode.  However, cse
117   determines the intended machine mode from the destination
118   of the instruction that moves the constant.  The machine mode
119   is recorded in the hash table along with the actual RTL
120   constant expression so that different modes are kept separate.
121
122Other expressions:
123
124   To record known equivalences among expressions in general
125   we use a hash table called `table'.  It has a fixed number of buckets
126   that contain chains of `struct table_elt' elements for expressions.
127   These chains connect the elements whose expressions have the same
128   hash codes.
129
130   Other chains through the same elements connect the elements which
131   currently have equivalent values.
132
133   Register references in an expression are canonicalized before hashing
134   the expression.  This is done using `reg_qty' and `qty_first_reg'.
135   The hash code of a register reference is computed using the quantity
136   number, not the register number.
137
138   When the value of an expression changes, it is necessary to remove from the
139   hash table not just that expression but all expressions whose values
140   could be different as a result.
141
142     1. If the value changing is in memory, except in special cases
143     ANYTHING referring to memory could be changed.  That is because
144     nobody knows where a pointer does not point.
145     The function `invalidate_memory' removes what is necessary.
146
147     The special cases are when the address is constant or is
148     a constant plus a fixed register such as the frame pointer
149     or a static chain pointer.  When such addresses are stored in,
150     we can tell exactly which other such addresses must be invalidated
151     due to overlap.  `invalidate' does this.
152     All expressions that refer to non-constant
153     memory addresses are also invalidated.  `invalidate_memory' does this.
154
155     2. If the value changing is a register, all expressions
156     containing references to that register, and only those,
157     must be removed.
158
159   Because searching the entire hash table for expressions that contain
160   a register is very slow, we try to figure out when it isn't necessary.
161   Precisely, this is necessary only when expressions have been
162   entered in the hash table using this register, and then the value has
163   changed, and then another expression wants to be added to refer to
164   the register's new value.  This sequence of circumstances is rare
165   within any one basic block.
166
167   The vectors `reg_tick' and `reg_in_table' are used to detect this case.
168   reg_tick[i] is incremented whenever a value is stored in register i.
169   reg_in_table[i] holds -1 if no references to register i have been
170   entered in the table; otherwise, it contains the value reg_tick[i] had
171   when the references were entered.  If we want to enter a reference
172   and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
173   Until we want to enter a new entry, the mere fact that the two vectors
174   don't match makes the entries be ignored if anyone tries to match them.
175
176   Registers themselves are entered in the hash table as well as in
177   the equivalent-register chains.  However, the vectors `reg_tick'
178   and `reg_in_table' do not apply to expressions which are simple
179   register references.  These expressions are removed from the table
180   immediately when they become invalid, and this can be done even if
181   we do not immediately search for all the expressions that refer to
182   the register.
183
184   A CLOBBER rtx in an instruction invalidates its operand for further
185   reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
186   invalidates everything that resides in memory.
187
188Related expressions:
189
190   Constant expressions that differ only by an additive integer
191   are called related.  When a constant expression is put in
192   the table, the related expression with no constant term
193   is also entered.  These are made to point at each other
194   so that it is possible to find out if there exists any
195   register equivalent to an expression related to a given expression.  */
196
197/* One plus largest register number used in this function.  */
198
199static int max_reg;
200
201/* One plus largest instruction UID used in this function at time of
202   cse_main call.  */
203
204static int max_insn_uid;
205
206/* Length of vectors indexed by quantity number.
207   We know in advance we will not need a quantity number this big.  */
208
209static int max_qty;
210
211/* Next quantity number to be allocated.
212   This is 1 + the largest number needed so far.  */
213
214static int next_qty;
215
216/* Indexed by quantity number, gives the first (or last) register
217   in the chain of registers that currently contain this quantity.  */
218
219static int *qty_first_reg;
220static int *qty_last_reg;
221
222/* Index by quantity number, gives the mode of the quantity.  */
223
224static enum machine_mode *qty_mode;
225
226/* Indexed by quantity number, gives the rtx of the constant value of the
227   quantity, or zero if it does not have a known value.
228   A sum of the frame pointer (or arg pointer) plus a constant
229   can also be entered here.  */
230
231static rtx *qty_const;
232
233/* Indexed by qty number, gives the insn that stored the constant value
234   recorded in `qty_const'.  */
235
236static rtx *qty_const_insn;
237
238/* The next three variables are used to track when a comparison between a
239   quantity and some constant or register has been passed.  In that case, we
240   know the results of the comparison in case we see it again.  These variables
241   record a comparison that is known to be true.  */
242
243/* Indexed by qty number, gives the rtx code of a comparison with a known
244   result involving this quantity.  If none, it is UNKNOWN.  */
245static enum rtx_code *qty_comparison_code;
246
247/* Indexed by qty number, gives the constant being compared against in a
248   comparison of known result.  If no such comparison, it is undefined.
249   If the comparison is not with a constant, it is zero.  */
250
251static rtx *qty_comparison_const;
252
253/* Indexed by qty number, gives the quantity being compared against in a
254   comparison of known result.  If no such comparison, if it undefined.
255   If the comparison is not with a register, it is -1.  */
256
257static int *qty_comparison_qty;
258
259#ifdef HAVE_cc0
260/* For machines that have a CC0, we do not record its value in the hash
261   table since its use is guaranteed to be the insn immediately following
262   its definition and any other insn is presumed to invalidate it.
263
264   Instead, we store below the value last assigned to CC0.  If it should
265   happen to be a constant, it is stored in preference to the actual
266   assigned value.  In case it is a constant, we store the mode in which
267   the constant should be interpreted.  */
268
269static rtx prev_insn_cc0;
270static enum machine_mode prev_insn_cc0_mode;
271#endif
272
273/* Previous actual insn.  0 if at first insn of basic block.  */
274
275static rtx prev_insn;
276
277/* Insn being scanned.  */
278
279static rtx this_insn;
280
281/* Index by register number, gives the number of the next (or
282   previous) register in the chain of registers sharing the same
283   value.
284
285   Or -1 if this register is at the end of the chain.
286
287   If reg_qty[N] == N, reg_next_eqv[N] is undefined.  */
288
289static int *reg_next_eqv;
290static int *reg_prev_eqv;
291
292struct cse_reg_info {
293  union {
294    /* The number of times the register has been altered in the current
295       basic block.  */
296    int reg_tick;
297
298    /* The next cse_reg_info structure in the free list.  */
299    struct cse_reg_info* next;
300  } variant;
301
302  /* The REG_TICK value at which rtx's containing this register are
303     valid in the hash table.  If this does not equal the current
304     reg_tick value, such expressions existing in the hash table are
305     invalid.  */
306  int reg_in_table;
307
308  /* The quantity number of the register's current contents.  */
309  int reg_qty;
310};
311
312/* A free list of cse_reg_info entries.  */
313static struct cse_reg_info *cse_reg_info_free_list;
314
315/* A mapping from registers to cse_reg_info data structures.  */
316static splay_tree cse_reg_info_tree;
317
318/* The last lookup we did into the cse_reg_info_tree.  This allows us
319   to cache repeated lookups.  */
320static int cached_regno;
321static struct cse_reg_info *cached_cse_reg_info;
322
323/* A HARD_REG_SET containing all the hard registers for which there is
324   currently a REG expression in the hash table.  Note the difference
325   from the above variables, which indicate if the REG is mentioned in some
326   expression in the table.  */
327
328static HARD_REG_SET hard_regs_in_table;
329
330/* A HARD_REG_SET containing all the hard registers that are invalidated
331   by a CALL_INSN.  */
332
333static HARD_REG_SET regs_invalidated_by_call;
334
335/* CUID of insn that starts the basic block currently being cse-processed.  */
336
337static int cse_basic_block_start;
338
339/* CUID of insn that ends the basic block currently being cse-processed.  */
340
341static int cse_basic_block_end;
342
343/* Vector mapping INSN_UIDs to cuids.
344   The cuids are like uids but increase monotonically always.
345   We use them to see whether a reg is used outside a given basic block.  */
346
347static int *uid_cuid;
348
349/* Highest UID in UID_CUID.  */
350static int max_uid;
351
352/* Get the cuid of an insn.  */
353
354#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
355
356/* Nonzero if cse has altered conditional jump insns
357   in such a way that jump optimization should be redone.  */
358
359static int cse_jumps_altered;
360
361/* Nonzero if we put a LABEL_REF into the hash table.  Since we may have put
362   it into an INSN without a REG_LABEL, we have to rerun jump after CSE
363   to put in the note.  */
364static int recorded_label_ref;
365
366/* canon_hash stores 1 in do_not_record
367   if it notices a reference to CC0, PC, or some other volatile
368   subexpression.  */
369
370static int do_not_record;
371
372#ifdef LOAD_EXTEND_OP
373
374/* Scratch rtl used when looking for load-extended copy of a MEM.  */
375static rtx memory_extend_rtx;
376#endif
377
378/* canon_hash stores 1 in hash_arg_in_memory
379   if it notices a reference to memory within the expression being hashed.  */
380
381static int hash_arg_in_memory;
382
383/* canon_hash stores 1 in hash_arg_in_struct
384   if it notices a reference to memory that's part of a structure.  */
385
386static int hash_arg_in_struct;
387
388/* The hash table contains buckets which are chains of `struct table_elt's,
389   each recording one expression's information.
390   That expression is in the `exp' field.
391
392   Those elements with the same hash code are chained in both directions
393   through the `next_same_hash' and `prev_same_hash' fields.
394
395   Each set of expressions with equivalent values
396   are on a two-way chain through the `next_same_value'
397   and `prev_same_value' fields, and all point with
398   the `first_same_value' field at the first element in
399   that chain.  The chain is in order of increasing cost.
400   Each element's cost value is in its `cost' field.
401
402   The `in_memory' field is nonzero for elements that
403   involve any reference to memory.  These elements are removed
404   whenever a write is done to an unidentified location in memory.
405   To be safe, we assume that a memory address is unidentified unless
406   the address is either a symbol constant or a constant plus
407   the frame pointer or argument pointer.
408
409   The `in_struct' field is nonzero for elements that
410   involve any reference to memory inside a structure or array.
411
412   The `related_value' field is used to connect related expressions
413   (that differ by adding an integer).
414   The related expressions are chained in a circular fashion.
415   `related_value' is zero for expressions for which this
416   chain is not useful.
417
418   The `cost' field stores the cost of this element's expression.
419
420   The `is_const' flag is set if the element is a constant (including
421   a fixed address).
422
423   The `flag' field is used as a temporary during some search routines.
424
425   The `mode' field is usually the same as GET_MODE (`exp'), but
426   if `exp' is a CONST_INT and has no machine mode then the `mode'
427   field is the mode it was being used as.  Each constant is
428   recorded separately for each mode it is used with.  */
429
430
431struct table_elt
432{
433  rtx exp;
434  struct table_elt *next_same_hash;
435  struct table_elt *prev_same_hash;
436  struct table_elt *next_same_value;
437  struct table_elt *prev_same_value;
438  struct table_elt *first_same_value;
439  struct table_elt *related_value;
440  int cost;
441  enum machine_mode mode;
442  char in_memory;
443  char in_struct;
444  char is_const;
445  char flag;
446};
447
448/* We don't want a lot of buckets, because we rarely have very many
449   things stored in the hash table, and a lot of buckets slows
450   down a lot of loops that happen frequently.  */
451#define NBUCKETS 31
452
453/* Compute hash code of X in mode M.  Special-case case where X is a pseudo
454   register (hard registers may require `do_not_record' to be set).  */
455
456#define HASH(X, M)	\
457 (GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
458  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) % NBUCKETS	\
459  : canon_hash (X, M) % NBUCKETS)
460
461/* Determine whether register number N is considered a fixed register for CSE.
462   It is desirable to replace other regs with fixed regs, to reduce need for
463   non-fixed hard regs.
464   A reg wins if it is either the frame pointer or designated as fixed,
465   but not if it is an overlapping register.  */
466#ifdef OVERLAPPING_REGNO_P
467#define FIXED_REGNO_P(N)  \
468  (((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
469    || fixed_regs[N] || global_regs[N])	  \
470   && ! OVERLAPPING_REGNO_P ((N)))
471#else
472#define FIXED_REGNO_P(N)  \
473  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
474   || fixed_regs[N] || global_regs[N])
475#endif
476
477/* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
478   hard registers and pointers into the frame are the cheapest with a cost
479   of 0.  Next come pseudos with a cost of one and other hard registers with
480   a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
481
482#define CHEAP_REGNO(N) \
483  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM 	\
484   || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM	     	\
485   || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) 	\
486   || ((N) < FIRST_PSEUDO_REGISTER					\
487       && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
488
489/* A register is cheap if it is a user variable assigned to the register
490   or if its register number always corresponds to a cheap register.  */
491
492#define CHEAP_REG(N) \
493  ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER)	\
494   || CHEAP_REGNO (REGNO (N)))
495
496#define COST(X)								\
497  (GET_CODE (X) == REG							\
498   ? (CHEAP_REG (X) ? 0							\
499      : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1				\
500      : 2)								\
501   : notreg_cost(X))
502
503/* Get the info associated with register N.  */
504
505#define GET_CSE_REG_INFO(N) 			\
506  (((N) == cached_regno && cached_cse_reg_info)	\
507   ? cached_cse_reg_info : get_cse_reg_info ((N)))
508
509/* Get the number of times this register has been updated in this
510   basic block.  */
511
512#define REG_TICK(N) ((GET_CSE_REG_INFO (N))->variant.reg_tick)
513
514/* Get the point at which REG was recorded in the table.  */
515
516#define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
517
518/* Get the quantity number for REG.  */
519
520#define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
521
522/* Determine if the quantity number for register X represents a valid index
523   into the `qty_...' variables.  */
524
525#define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (N))
526
527#ifdef ADDRESS_COST
528/* The ADDRESS_COST macro does not deal with ADDRESSOF nodes.  But,
529   during CSE, such nodes are present.  Using an ADDRESSOF node which
530   refers to the address of a REG is a good thing because we can then
531   turn (MEM (ADDRESSSOF (REG))) into just plain REG.  */
532#define CSE_ADDRESS_COST(RTX)					\
533  ((GET_CODE (RTX) == ADDRESSOF && REG_P (XEXP ((RTX), 0)))	\
534   ? -1 : ADDRESS_COST(RTX))
535#endif
536
537static struct table_elt *table[NBUCKETS];
538
539/* Chain of `struct table_elt's made so far for this function
540   but currently removed from the table.  */
541
542static struct table_elt *free_element_chain;
543
544/* Number of `struct table_elt' structures made so far for this function.  */
545
546static int n_elements_made;
547
548/* Maximum value `n_elements_made' has had so far in this compilation
549   for functions previously processed.  */
550
551static int max_elements_made;
552
553/* Surviving equivalence class when two equivalence classes are merged
554   by recording the effects of a jump in the last insn.  Zero if the
555   last insn was not a conditional jump.  */
556
557static struct table_elt *last_jump_equiv_class;
558
559/* Set to the cost of a constant pool reference if one was found for a
560   symbolic constant.  If this was found, it means we should try to
561   convert constants into constant pool entries if they don't fit in
562   the insn.  */
563
564static int constant_pool_entries_cost;
565
566/* Define maximum length of a branch path.  */
567
568#define PATHLENGTH	10
569
570/* This data describes a block that will be processed by cse_basic_block.  */
571
572struct cse_basic_block_data {
573  /* Lowest CUID value of insns in block.  */
574  int low_cuid;
575  /* Highest CUID value of insns in block.  */
576  int high_cuid;
577  /* Total number of SETs in block.  */
578  int nsets;
579  /* Last insn in the block.  */
580  rtx last;
581  /* Size of current branch path, if any.  */
582  int path_size;
583  /* Current branch path, indicating which branches will be taken.  */
584  struct branch_path {
585    /* The branch insn.  */
586    rtx branch;
587    /* Whether it should be taken or not.  AROUND is the same as taken
588       except that it is used when the destination label is not preceded
589       by a BARRIER.  */
590    enum taken {TAKEN, NOT_TAKEN, AROUND} status;
591  } path[PATHLENGTH];
592};
593
594/* Nonzero if X has the form (PLUS frame-pointer integer).  We check for
595   virtual regs here because the simplify_*_operation routines are called
596   by integrate.c, which is called before virtual register instantiation.  */
597
598#define FIXED_BASE_PLUS_P(X)					\
599  ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx	\
600   || (X) == arg_pointer_rtx					\
601   || (X) == virtual_stack_vars_rtx				\
602   || (X) == virtual_incoming_args_rtx				\
603   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
604       && (XEXP (X, 0) == frame_pointer_rtx			\
605	   || XEXP (X, 0) == hard_frame_pointer_rtx		\
606	   || XEXP (X, 0) == arg_pointer_rtx			\
607	   || XEXP (X, 0) == virtual_stack_vars_rtx		\
608	   || XEXP (X, 0) == virtual_incoming_args_rtx))	\
609   || GET_CODE (X) == ADDRESSOF)
610
611/* Similar, but also allows reference to the stack pointer.
612
613   This used to include FIXED_BASE_PLUS_P, however, we can't assume that
614   arg_pointer_rtx by itself is nonzero, because on at least one machine,
615   the i960, the arg pointer is zero when it is unused.  */
616
617#define NONZERO_BASE_PLUS_P(X)					\
618  ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx	\
619   || (X) == virtual_stack_vars_rtx				\
620   || (X) == virtual_incoming_args_rtx				\
621   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
622       && (XEXP (X, 0) == frame_pointer_rtx			\
623	   || XEXP (X, 0) == hard_frame_pointer_rtx		\
624	   || XEXP (X, 0) == arg_pointer_rtx			\
625	   || XEXP (X, 0) == virtual_stack_vars_rtx		\
626	   || XEXP (X, 0) == virtual_incoming_args_rtx))	\
627   || (X) == stack_pointer_rtx					\
628   || (X) == virtual_stack_dynamic_rtx				\
629   || (X) == virtual_outgoing_args_rtx				\
630   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
631       && (XEXP (X, 0) == stack_pointer_rtx			\
632	   || XEXP (X, 0) == virtual_stack_dynamic_rtx		\
633	   || XEXP (X, 0) == virtual_outgoing_args_rtx))	\
634   || GET_CODE (X) == ADDRESSOF)
635
636static int notreg_cost		PROTO((rtx));
637static void new_basic_block	PROTO((void));
638static void make_new_qty	PROTO((int));
639static void make_regs_eqv	PROTO((int, int));
640static void delete_reg_equiv	PROTO((int));
641static int mention_regs		PROTO((rtx));
642static int insert_regs		PROTO((rtx, struct table_elt *, int));
643static void free_element	PROTO((struct table_elt *));
644static void remove_from_table	PROTO((struct table_elt *, unsigned));
645static struct table_elt *get_element PROTO((void));
646static struct table_elt *lookup	PROTO((rtx, unsigned, enum machine_mode)),
647       *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
648static rtx lookup_as_function	PROTO((rtx, enum rtx_code));
649static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
650				       enum machine_mode));
651static void merge_equiv_classes PROTO((struct table_elt *,
652				       struct table_elt *));
653static void invalidate		PROTO((rtx, enum machine_mode));
654static int cse_rtx_varies_p	PROTO((rtx));
655static void remove_invalid_refs	PROTO((int));
656static void remove_invalid_subreg_refs	PROTO((int, int, enum machine_mode));
657static void rehash_using_reg	PROTO((rtx));
658static void invalidate_memory	PROTO((void));
659static void invalidate_for_call	PROTO((void));
660static rtx use_related_value	PROTO((rtx, struct table_elt *));
661static unsigned canon_hash	PROTO((rtx, enum machine_mode));
662static unsigned safe_hash	PROTO((rtx, enum machine_mode));
663static int exp_equiv_p		PROTO((rtx, rtx, int, int));
664static void set_nonvarying_address_components PROTO((rtx, int, rtx *,
665						     HOST_WIDE_INT *,
666						     HOST_WIDE_INT *));
667static int refers_to_p		PROTO((rtx, rtx));
668static rtx canon_reg		PROTO((rtx, rtx));
669static void find_best_addr	PROTO((rtx, rtx *));
670static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
671						 enum machine_mode *,
672						 enum machine_mode *));
673static rtx cse_gen_binary	PROTO((enum rtx_code, enum machine_mode,
674				       rtx, rtx));
675static rtx simplify_plus_minus	PROTO((enum rtx_code, enum machine_mode,
676				       rtx, rtx));
677static rtx fold_rtx		PROTO((rtx, rtx));
678static rtx equiv_constant	PROTO((rtx));
679static void record_jump_equiv	PROTO((rtx, int));
680static void record_jump_cond	PROTO((enum rtx_code, enum machine_mode,
681				       rtx, rtx, int));
682static void cse_insn		PROTO((rtx, rtx));
683static int note_mem_written	PROTO((rtx));
684static void invalidate_from_clobbers PROTO((rtx));
685static rtx cse_process_notes	PROTO((rtx, rtx));
686static void cse_around_loop	PROTO((rtx));
687static void invalidate_skipped_set PROTO((rtx, rtx));
688static void invalidate_skipped_block PROTO((rtx));
689static void cse_check_loop_start PROTO((rtx, rtx));
690static void cse_set_around_loop	PROTO((rtx, rtx, rtx));
691static rtx cse_basic_block	PROTO((rtx, rtx, struct branch_path *, int));
692static void count_reg_usage	PROTO((rtx, int *, rtx, int));
693extern void dump_class          PROTO((struct table_elt*));
694static void check_fold_consts	PROTO((PTR));
695static struct cse_reg_info* get_cse_reg_info PROTO((int));
696static void free_cse_reg_info   PROTO((splay_tree_value));
697static void flush_hash_table	PROTO((void));
698
699extern int rtx_equal_function_value_matters;
700
701/* Dump the expressions in the equivalence class indicated by CLASSP.
702   This function is used only for debugging.  */
703void
704dump_class (classp)
705     struct table_elt *classp;
706{
707  struct table_elt *elt;
708
709  fprintf (stderr, "Equivalence chain for ");
710  print_rtl (stderr, classp->exp);
711  fprintf (stderr, ": \n");
712
713  for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
714    {
715      print_rtl (stderr, elt->exp);
716      fprintf (stderr, "\n");
717    }
718}
719
720/* Return an estimate of the cost of computing rtx X.
721   One use is in cse, to decide which expression to keep in the hash table.
722   Another is in rtl generation, to pick the cheapest way to multiply.
723   Other uses like the latter are expected in the future.  */
724
725/* Internal function, to compute cost when X is not a register; called
726   from COST macro to keep it simple.  */
727
728static int
729notreg_cost (x)
730     rtx x;
731{
732  return ((GET_CODE (x) == SUBREG
733	   && GET_CODE (SUBREG_REG (x)) == REG
734	   && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
735	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
736	   && (GET_MODE_SIZE (GET_MODE (x))
737	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
738	   && subreg_lowpart_p (x)
739	   && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
740				     GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
741	  ? (CHEAP_REG (SUBREG_REG (x)) ? 0
742	     : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
743		: 2))
744	  : rtx_cost (x, SET) * 2);
745}
746
747/* Return the right cost to give to an operation
748   to make the cost of the corresponding register-to-register instruction
749   N times that of a fast register-to-register instruction.  */
750
751#define COSTS_N_INSNS(N) ((N) * 4 - 2)
752
753int
754rtx_cost (x, outer_code)
755     rtx x;
756     enum rtx_code outer_code ATTRIBUTE_UNUSED;
757{
758  register int i, j;
759  register enum rtx_code code;
760  register char *fmt;
761  register int total;
762
763  if (x == 0)
764    return 0;
765
766  /* Compute the default costs of certain things.
767     Note that RTX_COSTS can override the defaults.  */
768
769  code = GET_CODE (x);
770  switch (code)
771    {
772    case MULT:
773      /* Count multiplication by 2**n as a shift,
774	 because if we are considering it, we would output it as a shift.  */
775      if (GET_CODE (XEXP (x, 1)) == CONST_INT
776	  && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
777	total = 2;
778      else
779	total = COSTS_N_INSNS (5);
780      break;
781    case DIV:
782    case UDIV:
783    case MOD:
784    case UMOD:
785      total = COSTS_N_INSNS (7);
786      break;
787    case USE:
788      /* Used in loop.c and combine.c as a marker.  */
789      total = 0;
790      break;
791    case ASM_OPERANDS:
792      /* We don't want these to be used in substitutions because
793	 we have no way of validating the resulting insn.  So assign
794	 anything containing an ASM_OPERANDS a very high cost.  */
795      total = 1000;
796      break;
797    default:
798      total = 2;
799    }
800
801  switch (code)
802    {
803    case REG:
804      return ! CHEAP_REG (x);
805
806    case SUBREG:
807      /* If we can't tie these modes, make this expensive.  The larger
808	 the mode, the more expensive it is.  */
809      if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
810	return COSTS_N_INSNS (2
811			      + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
812      return 2;
813#ifdef RTX_COSTS
814      RTX_COSTS (x, code, outer_code);
815#endif
816#ifdef CONST_COSTS
817      CONST_COSTS (x, code, outer_code);
818#endif
819
820    default:
821#ifdef DEFAULT_RTX_COSTS
822      DEFAULT_RTX_COSTS(x, code, outer_code);
823#endif
824      break;
825    }
826
827  /* Sum the costs of the sub-rtx's, plus cost of this operation,
828     which is already in total.  */
829
830  fmt = GET_RTX_FORMAT (code);
831  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
832    if (fmt[i] == 'e')
833      total += rtx_cost (XEXP (x, i), code);
834    else if (fmt[i] == 'E')
835      for (j = 0; j < XVECLEN (x, i); j++)
836	total += rtx_cost (XVECEXP (x, i, j), code);
837
838  return total;
839}
840
841static struct cse_reg_info *
842get_cse_reg_info (regno)
843     int regno;
844{
845  struct cse_reg_info *cri;
846  splay_tree_node n;
847
848  /* See if we already have this entry.  */
849  n = splay_tree_lookup (cse_reg_info_tree,
850			(splay_tree_key) regno);
851  if (n)
852    cri = (struct cse_reg_info *) (n->value);
853  else
854    {
855      /* Get a new cse_reg_info structure.  */
856      if (cse_reg_info_free_list)
857	{
858	  cri = cse_reg_info_free_list;
859	  cse_reg_info_free_list = cri->variant.next;
860	}
861      else
862	cri = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
863
864      /* Initialize it.  */
865      cri->variant.reg_tick = 0;
866      cri->reg_in_table = -1;
867      cri->reg_qty = regno;
868
869      splay_tree_insert (cse_reg_info_tree,
870			 (splay_tree_key) regno,
871			 (splay_tree_value) cri);
872    }
873
874  /* Cache this lookup; we tend to be looking up information about the
875     same register several times in a row.  */
876  cached_regno = regno;
877  cached_cse_reg_info = cri;
878
879  return cri;
880}
881
882static void
883free_cse_reg_info (v)
884     splay_tree_value v;
885{
886  struct cse_reg_info *cri = (struct cse_reg_info *) v;
887
888  cri->variant.next = cse_reg_info_free_list;
889  cse_reg_info_free_list = cri;
890}
891
892/* Clear the hash table and initialize each register with its own quantity,
893   for a new basic block.  */
894
895static void
896new_basic_block ()
897{
898  register int i;
899
900  next_qty = max_reg;
901
902  if (cse_reg_info_tree)
903    {
904      splay_tree_delete (cse_reg_info_tree);
905      cached_cse_reg_info = 0;
906    }
907
908  cse_reg_info_tree = splay_tree_new (splay_tree_compare_ints, 0,
909				      free_cse_reg_info);
910
911  CLEAR_HARD_REG_SET (hard_regs_in_table);
912
913  /* The per-quantity values used to be initialized here, but it is
914     much faster to initialize each as it is made in `make_new_qty'.  */
915
916  for (i = 0; i < NBUCKETS; i++)
917    {
918      register struct table_elt *this, *next;
919      for (this = table[i]; this; this = next)
920	{
921	  next = this->next_same_hash;
922	  free_element (this);
923	}
924    }
925
926  bzero ((char *) table, sizeof table);
927
928  prev_insn = 0;
929
930#ifdef HAVE_cc0
931  prev_insn_cc0 = 0;
932#endif
933}
934
935/* Say that register REG contains a quantity not in any register before
936   and initialize that quantity.  */
937
938static void
939make_new_qty (reg)
940     register int reg;
941{
942  register int q;
943
944  if (next_qty >= max_qty)
945    abort ();
946
947  q = REG_QTY (reg) = next_qty++;
948  qty_first_reg[q] = reg;
949  qty_last_reg[q] = reg;
950  qty_const[q] = qty_const_insn[q] = 0;
951  qty_comparison_code[q] = UNKNOWN;
952
953  reg_next_eqv[reg] = reg_prev_eqv[reg] = -1;
954}
955
956/* Make reg NEW equivalent to reg OLD.
957   OLD is not changing; NEW is.  */
958
959static void
960make_regs_eqv (new, old)
961     register int new, old;
962{
963  register int lastr, firstr;
964  register int q = REG_QTY (old);
965
966  /* Nothing should become eqv until it has a "non-invalid" qty number.  */
967  if (! REGNO_QTY_VALID_P (old))
968    abort ();
969
970  REG_QTY (new) = q;
971  firstr = qty_first_reg[q];
972  lastr = qty_last_reg[q];
973
974  /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
975     hard regs.  Among pseudos, if NEW will live longer than any other reg
976     of the same qty, and that is beyond the current basic block,
977     make it the new canonical replacement for this qty.  */
978  if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
979      /* Certain fixed registers might be of the class NO_REGS.  This means
980	 that not only can they not be allocated by the compiler, but
981	 they cannot be used in substitutions or canonicalizations
982	 either.  */
983      && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
984      && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
985	  || (new >= FIRST_PSEUDO_REGISTER
986	      && (firstr < FIRST_PSEUDO_REGISTER
987		  || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
988		       || (uid_cuid[REGNO_FIRST_UID (new)]
989			   < cse_basic_block_start))
990		      && (uid_cuid[REGNO_LAST_UID (new)]
991			  > uid_cuid[REGNO_LAST_UID (firstr)]))))))
992    {
993      reg_prev_eqv[firstr] = new;
994      reg_next_eqv[new] = firstr;
995      reg_prev_eqv[new] = -1;
996      qty_first_reg[q] = new;
997    }
998  else
999    {
1000      /* If NEW is a hard reg (known to be non-fixed), insert at end.
1001	 Otherwise, insert before any non-fixed hard regs that are at the
1002	 end.  Registers of class NO_REGS cannot be used as an
1003	 equivalent for anything.  */
1004      while (lastr < FIRST_PSEUDO_REGISTER && reg_prev_eqv[lastr] >= 0
1005	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1006	     && new >= FIRST_PSEUDO_REGISTER)
1007	lastr = reg_prev_eqv[lastr];
1008      reg_next_eqv[new] = reg_next_eqv[lastr];
1009      if (reg_next_eqv[lastr] >= 0)
1010	reg_prev_eqv[reg_next_eqv[lastr]] = new;
1011      else
1012	qty_last_reg[q] = new;
1013      reg_next_eqv[lastr] = new;
1014      reg_prev_eqv[new] = lastr;
1015    }
1016}
1017
1018/* Remove REG from its equivalence class.  */
1019
1020static void
1021delete_reg_equiv (reg)
1022     register int reg;
1023{
1024  register int q = REG_QTY (reg);
1025  register int p, n;
1026
1027  /* If invalid, do nothing.  */
1028  if (q == reg)
1029    return;
1030
1031  p = reg_prev_eqv[reg];
1032  n = reg_next_eqv[reg];
1033
1034  if (n != -1)
1035    reg_prev_eqv[n] = p;
1036  else
1037    qty_last_reg[q] = p;
1038  if (p != -1)
1039    reg_next_eqv[p] = n;
1040  else
1041    qty_first_reg[q] = n;
1042
1043  REG_QTY (reg) = reg;
1044}
1045
1046/* Remove any invalid expressions from the hash table
1047   that refer to any of the registers contained in expression X.
1048
1049   Make sure that newly inserted references to those registers
1050   as subexpressions will be considered valid.
1051
1052   mention_regs is not called when a register itself
1053   is being stored in the table.
1054
1055   Return 1 if we have done something that may have changed the hash code
1056   of X.  */
1057
1058static int
1059mention_regs (x)
1060     rtx x;
1061{
1062  register enum rtx_code code;
1063  register int i, j;
1064  register char *fmt;
1065  register int changed = 0;
1066
1067  if (x == 0)
1068    return 0;
1069
1070  code = GET_CODE (x);
1071  if (code == REG)
1072    {
1073      register int regno = REGNO (x);
1074      register int endregno
1075	= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1076		   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1077      int i;
1078
1079      for (i = regno; i < endregno; i++)
1080	{
1081	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1082	    remove_invalid_refs (i);
1083
1084	  REG_IN_TABLE (i) = REG_TICK (i);
1085	}
1086
1087      return 0;
1088    }
1089
1090  /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1091     pseudo if they don't use overlapping words.  We handle only pseudos
1092     here for simplicity.  */
1093  if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1094      && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1095    {
1096      int i = REGNO (SUBREG_REG (x));
1097
1098      if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1099	{
1100	  /* If reg_tick has been incremented more than once since
1101	     reg_in_table was last set, that means that the entire
1102	     register has been set before, so discard anything memorized
1103	     for the entrire register, including all SUBREG expressions.  */
1104	  if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1105	    remove_invalid_refs (i);
1106	  else
1107	    remove_invalid_subreg_refs (i, SUBREG_WORD (x), GET_MODE (x));
1108	}
1109
1110      REG_IN_TABLE (i) = REG_TICK (i);
1111      return 0;
1112    }
1113
1114  /* If X is a comparison or a COMPARE and either operand is a register
1115     that does not have a quantity, give it one.  This is so that a later
1116     call to record_jump_equiv won't cause X to be assigned a different
1117     hash code and not found in the table after that call.
1118
1119     It is not necessary to do this here, since rehash_using_reg can
1120     fix up the table later, but doing this here eliminates the need to
1121     call that expensive function in the most common case where the only
1122     use of the register is in the comparison.  */
1123
1124  if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1125    {
1126      if (GET_CODE (XEXP (x, 0)) == REG
1127	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1128	if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
1129	  {
1130	    rehash_using_reg (XEXP (x, 0));
1131	    changed = 1;
1132	  }
1133
1134      if (GET_CODE (XEXP (x, 1)) == REG
1135	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1136	if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
1137	  {
1138	    rehash_using_reg (XEXP (x, 1));
1139	    changed = 1;
1140	  }
1141    }
1142
1143  fmt = GET_RTX_FORMAT (code);
1144  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1145    if (fmt[i] == 'e')
1146      changed |= mention_regs (XEXP (x, i));
1147    else if (fmt[i] == 'E')
1148      for (j = 0; j < XVECLEN (x, i); j++)
1149	changed |= mention_regs (XVECEXP (x, i, j));
1150
1151  return changed;
1152}
1153
1154/* Update the register quantities for inserting X into the hash table
1155   with a value equivalent to CLASSP.
1156   (If the class does not contain a REG, it is irrelevant.)
1157   If MODIFIED is nonzero, X is a destination; it is being modified.
1158   Note that delete_reg_equiv should be called on a register
1159   before insert_regs is done on that register with MODIFIED != 0.
1160
1161   Nonzero value means that elements of reg_qty have changed
1162   so X's hash code may be different.  */
1163
1164static int
1165insert_regs (x, classp, modified)
1166     rtx x;
1167     struct table_elt *classp;
1168     int modified;
1169{
1170  if (GET_CODE (x) == REG)
1171    {
1172      register int regno = REGNO (x);
1173
1174      /* If REGNO is in the equivalence table already but is of the
1175	 wrong mode for that equivalence, don't do anything here.  */
1176
1177      if (REGNO_QTY_VALID_P (regno)
1178	  && qty_mode[REG_QTY (regno)] != GET_MODE (x))
1179	return 0;
1180
1181      if (modified || ! REGNO_QTY_VALID_P (regno))
1182	{
1183	  if (classp)
1184	    for (classp = classp->first_same_value;
1185		 classp != 0;
1186		 classp = classp->next_same_value)
1187	      if (GET_CODE (classp->exp) == REG
1188		  && GET_MODE (classp->exp) == GET_MODE (x))
1189		{
1190		  make_regs_eqv (regno, REGNO (classp->exp));
1191		  return 1;
1192		}
1193
1194	  make_new_qty (regno);
1195	  qty_mode[REG_QTY (regno)] = GET_MODE (x);
1196	  return 1;
1197	}
1198
1199      return 0;
1200    }
1201
1202  /* If X is a SUBREG, we will likely be inserting the inner register in the
1203     table.  If that register doesn't have an assigned quantity number at
1204     this point but does later, the insertion that we will be doing now will
1205     not be accessible because its hash code will have changed.  So assign
1206     a quantity number now.  */
1207
1208  else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1209	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1210    {
1211      int regno = REGNO (SUBREG_REG (x));
1212
1213      insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1214      /* Mention_regs checks if REG_TICK is exactly one larger than
1215	 REG_IN_TABLE to find out if there was only a single preceding
1216	 invalidation - for the SUBREG - or another one, which would be
1217	 for the full register.  Since we don't invalidate the SUBREG
1218	 here first, we might have to bump up REG_TICK so that mention_regs
1219	 will do the right thing.  */
1220      if (REG_IN_TABLE (regno) >= 0
1221	  && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1222	REG_TICK (regno)++;
1223      mention_regs (x);
1224      return 1;
1225    }
1226  else
1227    return mention_regs (x);
1228}
1229
1230/* Look in or update the hash table.  */
1231
1232/* Put the element ELT on the list of free elements.  */
1233
1234static void
1235free_element (elt)
1236     struct table_elt *elt;
1237{
1238  elt->next_same_hash = free_element_chain;
1239  free_element_chain = elt;
1240}
1241
1242/* Return an element that is free for use.  */
1243
1244static struct table_elt *
1245get_element ()
1246{
1247  struct table_elt *elt = free_element_chain;
1248  if (elt)
1249    {
1250      free_element_chain = elt->next_same_hash;
1251      return elt;
1252    }
1253  n_elements_made++;
1254  return (struct table_elt *) oballoc (sizeof (struct table_elt));
1255}
1256
1257/* Remove table element ELT from use in the table.
1258   HASH is its hash code, made using the HASH macro.
1259   It's an argument because often that is known in advance
1260   and we save much time not recomputing it.  */
1261
1262static void
1263remove_from_table (elt, hash)
1264     register struct table_elt *elt;
1265     unsigned hash;
1266{
1267  if (elt == 0)
1268    return;
1269
1270  /* Mark this element as removed.  See cse_insn.  */
1271  elt->first_same_value = 0;
1272
1273  /* Remove the table element from its equivalence class.  */
1274
1275  {
1276    register struct table_elt *prev = elt->prev_same_value;
1277    register struct table_elt *next = elt->next_same_value;
1278
1279    if (next) next->prev_same_value = prev;
1280
1281    if (prev)
1282      prev->next_same_value = next;
1283    else
1284      {
1285	register struct table_elt *newfirst = next;
1286	while (next)
1287	  {
1288	    next->first_same_value = newfirst;
1289	    next = next->next_same_value;
1290	  }
1291      }
1292  }
1293
1294  /* Remove the table element from its hash bucket.  */
1295
1296  {
1297    register struct table_elt *prev = elt->prev_same_hash;
1298    register struct table_elt *next = elt->next_same_hash;
1299
1300    if (next) next->prev_same_hash = prev;
1301
1302    if (prev)
1303      prev->next_same_hash = next;
1304    else if (table[hash] == elt)
1305      table[hash] = next;
1306    else
1307      {
1308	/* This entry is not in the proper hash bucket.  This can happen
1309	   when two classes were merged by `merge_equiv_classes'.  Search
1310	   for the hash bucket that it heads.  This happens only very
1311	   rarely, so the cost is acceptable.  */
1312	for (hash = 0; hash < NBUCKETS; hash++)
1313	  if (table[hash] == elt)
1314	    table[hash] = next;
1315      }
1316  }
1317
1318  /* Remove the table element from its related-value circular chain.  */
1319
1320  if (elt->related_value != 0 && elt->related_value != elt)
1321    {
1322      register struct table_elt *p = elt->related_value;
1323      while (p->related_value != elt)
1324	p = p->related_value;
1325      p->related_value = elt->related_value;
1326      if (p->related_value == p)
1327	p->related_value = 0;
1328    }
1329
1330  free_element (elt);
1331}
1332
1333/* Look up X in the hash table and return its table element,
1334   or 0 if X is not in the table.
1335
1336   MODE is the machine-mode of X, or if X is an integer constant
1337   with VOIDmode then MODE is the mode with which X will be used.
1338
1339   Here we are satisfied to find an expression whose tree structure
1340   looks like X.  */
1341
1342static struct table_elt *
1343lookup (x, hash, mode)
1344     rtx x;
1345     unsigned hash;
1346     enum machine_mode mode;
1347{
1348  register struct table_elt *p;
1349
1350  for (p = table[hash]; p; p = p->next_same_hash)
1351    if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1352			    || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1353      return p;
1354
1355  return 0;
1356}
1357
1358/* Like `lookup' but don't care whether the table element uses invalid regs.
1359   Also ignore discrepancies in the machine mode of a register.  */
1360
1361static struct table_elt *
1362lookup_for_remove (x, hash, mode)
1363     rtx x;
1364     unsigned hash;
1365     enum machine_mode mode;
1366{
1367  register struct table_elt *p;
1368
1369  if (GET_CODE (x) == REG)
1370    {
1371      int regno = REGNO (x);
1372      /* Don't check the machine mode when comparing registers;
1373	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1374      for (p = table[hash]; p; p = p->next_same_hash)
1375	if (GET_CODE (p->exp) == REG
1376	    && REGNO (p->exp) == regno)
1377	  return p;
1378    }
1379  else
1380    {
1381      for (p = table[hash]; p; p = p->next_same_hash)
1382	if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1383	  return p;
1384    }
1385
1386  return 0;
1387}
1388
1389/* Look for an expression equivalent to X and with code CODE.
1390   If one is found, return that expression.  */
1391
1392static rtx
1393lookup_as_function (x, code)
1394     rtx x;
1395     enum rtx_code code;
1396{
1397  register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS,
1398					 GET_MODE (x));
1399  /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1400     long as we are narrowing.  So if we looked in vain for a mode narrower
1401     than word_mode before, look for word_mode now.  */
1402  if (p == 0 && code == CONST_INT
1403      && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1404    {
1405      x = copy_rtx (x);
1406      PUT_MODE (x, word_mode);
1407      p = lookup (x, safe_hash (x, VOIDmode) % NBUCKETS, word_mode);
1408    }
1409
1410  if (p == 0)
1411    return 0;
1412
1413  for (p = p->first_same_value; p; p = p->next_same_value)
1414    {
1415      if (GET_CODE (p->exp) == code
1416	  /* Make sure this is a valid entry in the table.  */
1417	  && exp_equiv_p (p->exp, p->exp, 1, 0))
1418	return p->exp;
1419    }
1420
1421  return 0;
1422}
1423
1424/* Insert X in the hash table, assuming HASH is its hash code
1425   and CLASSP is an element of the class it should go in
1426   (or 0 if a new class should be made).
1427   It is inserted at the proper position to keep the class in
1428   the order cheapest first.
1429
1430   MODE is the machine-mode of X, or if X is an integer constant
1431   with VOIDmode then MODE is the mode with which X will be used.
1432
1433   For elements of equal cheapness, the most recent one
1434   goes in front, except that the first element in the list
1435   remains first unless a cheaper element is added.  The order of
1436   pseudo-registers does not matter, as canon_reg will be called to
1437   find the cheapest when a register is retrieved from the table.
1438
1439   The in_memory field in the hash table element is set to 0.
1440   The caller must set it nonzero if appropriate.
1441
1442   You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1443   and if insert_regs returns a nonzero value
1444   you must then recompute its hash code before calling here.
1445
1446   If necessary, update table showing constant values of quantities.  */
1447
1448#define CHEAPER(X,Y)   ((X)->cost < (Y)->cost)
1449
1450static struct table_elt *
1451insert (x, classp, hash, mode)
1452     register rtx x;
1453     register struct table_elt *classp;
1454     unsigned hash;
1455     enum machine_mode mode;
1456{
1457  register struct table_elt *elt;
1458
1459  /* If X is a register and we haven't made a quantity for it,
1460     something is wrong.  */
1461  if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1462    abort ();
1463
1464  /* If X is a hard register, show it is being put in the table.  */
1465  if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1466    {
1467      int regno = REGNO (x);
1468      int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1469      int i;
1470
1471      for (i = regno; i < endregno; i++)
1472	    SET_HARD_REG_BIT (hard_regs_in_table, i);
1473    }
1474
1475  /* If X is a label, show we recorded it.  */
1476  if (GET_CODE (x) == LABEL_REF
1477      || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1478	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1479    recorded_label_ref = 1;
1480
1481  /* Put an element for X into the right hash bucket.  */
1482
1483  elt = get_element ();
1484  elt->exp = x;
1485  elt->cost = COST (x);
1486  elt->next_same_value = 0;
1487  elt->prev_same_value = 0;
1488  elt->next_same_hash = table[hash];
1489  elt->prev_same_hash = 0;
1490  elt->related_value = 0;
1491  elt->in_memory = 0;
1492  elt->mode = mode;
1493  elt->is_const = (CONSTANT_P (x)
1494		   /* GNU C++ takes advantage of this for `this'
1495		      (and other const values).  */
1496		   || (RTX_UNCHANGING_P (x)
1497		       && GET_CODE (x) == REG
1498		       && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1499		   || FIXED_BASE_PLUS_P (x));
1500
1501  if (table[hash])
1502    table[hash]->prev_same_hash = elt;
1503  table[hash] = elt;
1504
1505  /* Put it into the proper value-class.  */
1506  if (classp)
1507    {
1508      classp = classp->first_same_value;
1509      if (CHEAPER (elt, classp))
1510	/* Insert at the head of the class */
1511	{
1512	  register struct table_elt *p;
1513	  elt->next_same_value = classp;
1514	  classp->prev_same_value = elt;
1515	  elt->first_same_value = elt;
1516
1517	  for (p = classp; p; p = p->next_same_value)
1518	    p->first_same_value = elt;
1519	}
1520      else
1521	{
1522	  /* Insert not at head of the class.  */
1523	  /* Put it after the last element cheaper than X.  */
1524	  register struct table_elt *p, *next;
1525	  for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1526	       p = next);
1527	  /* Put it after P and before NEXT.  */
1528	  elt->next_same_value = next;
1529	  if (next)
1530	    next->prev_same_value = elt;
1531	  elt->prev_same_value = p;
1532	  p->next_same_value = elt;
1533	  elt->first_same_value = classp;
1534	}
1535    }
1536  else
1537    elt->first_same_value = elt;
1538
1539  /* If this is a constant being set equivalent to a register or a register
1540     being set equivalent to a constant, note the constant equivalence.
1541
1542     If this is a constant, it cannot be equivalent to a different constant,
1543     and a constant is the only thing that can be cheaper than a register.  So
1544     we know the register is the head of the class (before the constant was
1545     inserted).
1546
1547     If this is a register that is not already known equivalent to a
1548     constant, we must check the entire class.
1549
1550     If this is a register that is already known equivalent to an insn,
1551     update `qty_const_insn' to show that `this_insn' is the latest
1552     insn making that quantity equivalent to the constant.  */
1553
1554  if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1555      && GET_CODE (x) != REG)
1556    {
1557      qty_const[REG_QTY (REGNO (classp->exp))]
1558	= gen_lowpart_if_possible (qty_mode[REG_QTY (REGNO (classp->exp))], x);
1559      qty_const_insn[REG_QTY (REGNO (classp->exp))] = this_insn;
1560    }
1561
1562  else if (GET_CODE (x) == REG && classp && ! qty_const[REG_QTY (REGNO (x))]
1563	   && ! elt->is_const)
1564    {
1565      register struct table_elt *p;
1566
1567      for (p = classp; p != 0; p = p->next_same_value)
1568	{
1569	  if (p->is_const && GET_CODE (p->exp) != REG)
1570	    {
1571	      qty_const[REG_QTY (REGNO (x))]
1572		= gen_lowpart_if_possible (GET_MODE (x), p->exp);
1573	      qty_const_insn[REG_QTY (REGNO (x))] = this_insn;
1574	      break;
1575	    }
1576	}
1577    }
1578
1579  else if (GET_CODE (x) == REG && qty_const[REG_QTY (REGNO (x))]
1580	   && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))])
1581    qty_const_insn[REG_QTY (REGNO (x))] = this_insn;
1582
1583  /* If this is a constant with symbolic value,
1584     and it has a term with an explicit integer value,
1585     link it up with related expressions.  */
1586  if (GET_CODE (x) == CONST)
1587    {
1588      rtx subexp = get_related_value (x);
1589      unsigned subhash;
1590      struct table_elt *subelt, *subelt_prev;
1591
1592      if (subexp != 0)
1593	{
1594	  /* Get the integer-free subexpression in the hash table.  */
1595	  subhash = safe_hash (subexp, mode) % NBUCKETS;
1596	  subelt = lookup (subexp, subhash, mode);
1597	  if (subelt == 0)
1598	    subelt = insert (subexp, NULL_PTR, subhash, mode);
1599	  /* Initialize SUBELT's circular chain if it has none.  */
1600	  if (subelt->related_value == 0)
1601	    subelt->related_value = subelt;
1602	  /* Find the element in the circular chain that precedes SUBELT.  */
1603	  subelt_prev = subelt;
1604	  while (subelt_prev->related_value != subelt)
1605	    subelt_prev = subelt_prev->related_value;
1606	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1607	     This way the element that follows SUBELT is the oldest one.  */
1608	  elt->related_value = subelt_prev->related_value;
1609	  subelt_prev->related_value = elt;
1610	}
1611    }
1612
1613  return elt;
1614}
1615
1616/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1617   CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1618   the two classes equivalent.
1619
1620   CLASS1 will be the surviving class; CLASS2 should not be used after this
1621   call.
1622
1623   Any invalid entries in CLASS2 will not be copied.  */
1624
1625static void
1626merge_equiv_classes (class1, class2)
1627     struct table_elt *class1, *class2;
1628{
1629  struct table_elt *elt, *next, *new;
1630
1631  /* Ensure we start with the head of the classes.  */
1632  class1 = class1->first_same_value;
1633  class2 = class2->first_same_value;
1634
1635  /* If they were already equal, forget it.  */
1636  if (class1 == class2)
1637    return;
1638
1639  for (elt = class2; elt; elt = next)
1640    {
1641      unsigned hash;
1642      rtx exp = elt->exp;
1643      enum machine_mode mode = elt->mode;
1644
1645      next = elt->next_same_value;
1646
1647      /* Remove old entry, make a new one in CLASS1's class.
1648	 Don't do this for invalid entries as we cannot find their
1649	 hash code (it also isn't necessary).  */
1650      if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1651	{
1652	  hash_arg_in_memory = 0;
1653	  hash_arg_in_struct = 0;
1654	  hash = HASH (exp, mode);
1655
1656	  if (GET_CODE (exp) == REG)
1657	    delete_reg_equiv (REGNO (exp));
1658
1659	  remove_from_table (elt, hash);
1660
1661	  if (insert_regs (exp, class1, 0))
1662	    {
1663	      rehash_using_reg (exp);
1664	      hash = HASH (exp, mode);
1665	    }
1666	  new = insert (exp, class1, hash, mode);
1667	  new->in_memory = hash_arg_in_memory;
1668	  new->in_struct = hash_arg_in_struct;
1669	}
1670    }
1671}
1672
1673
1674/* Flush the entire hash table.  */
1675
1676static void
1677flush_hash_table ()
1678{
1679  int i;
1680  struct table_elt *p;
1681
1682  for (i = 0; i < NBUCKETS; i++)
1683    for (p = table[i]; p; p = table[i])
1684      {
1685	/* Note that invalidate can remove elements
1686	   after P in the current hash chain.  */
1687	if (GET_CODE (p->exp) == REG)
1688	  invalidate (p->exp, p->mode);
1689	else
1690	  remove_from_table (p, i);
1691      }
1692}
1693
1694
1695/* Remove from the hash table, or mark as invalid,
1696   all expressions whose values could be altered by storing in X.
1697   X is a register, a subreg, or a memory reference with nonvarying address
1698   (because, when a memory reference with a varying address is stored in,
1699   all memory references are removed by invalidate_memory
1700   so specific invalidation is superfluous).
1701   FULL_MODE, if not VOIDmode, indicates that this much should be invalidated
1702   instead of just the amount indicated by the mode of X.  This is only used
1703   for bitfield stores into memory.
1704
1705   A nonvarying address may be just a register or just
1706   a symbol reference, or it may be either of those plus
1707   a numeric offset.  */
1708
1709static void
1710invalidate (x, full_mode)
1711     rtx x;
1712     enum machine_mode full_mode;
1713{
1714  register int i;
1715  register struct table_elt *p;
1716
1717  /* If X is a register, dependencies on its contents
1718     are recorded through the qty number mechanism.
1719     Just change the qty number of the register,
1720     mark it as invalid for expressions that refer to it,
1721     and remove it itself.  */
1722
1723  if (GET_CODE (x) == REG)
1724    {
1725      register int regno = REGNO (x);
1726      register unsigned hash = HASH (x, GET_MODE (x));
1727
1728      /* Remove REGNO from any quantity list it might be on and indicate
1729	 that its value might have changed.  If it is a pseudo, remove its
1730	 entry from the hash table.
1731
1732	 For a hard register, we do the first two actions above for any
1733	 additional hard registers corresponding to X.  Then, if any of these
1734	 registers are in the table, we must remove any REG entries that
1735	 overlap these registers.  */
1736
1737      delete_reg_equiv (regno);
1738      REG_TICK (regno)++;
1739
1740      if (regno >= FIRST_PSEUDO_REGISTER)
1741	{
1742	  /* Because a register can be referenced in more than one mode,
1743	     we might have to remove more than one table entry.  */
1744
1745	  struct table_elt *elt;
1746
1747	  while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1748	    remove_from_table (elt, hash);
1749	}
1750      else
1751	{
1752	  HOST_WIDE_INT in_table
1753	    = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1754	  int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1755	  int tregno, tendregno;
1756	  register struct table_elt *p, *next;
1757
1758	  CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1759
1760	  for (i = regno + 1; i < endregno; i++)
1761	    {
1762	      in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1763	      CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1764	      delete_reg_equiv (i);
1765	      REG_TICK (i)++;
1766	    }
1767
1768	  if (in_table)
1769	    for (hash = 0; hash < NBUCKETS; hash++)
1770	      for (p = table[hash]; p; p = next)
1771		{
1772		  next = p->next_same_hash;
1773
1774		  if (GET_CODE (p->exp) != REG
1775		      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1776		    continue;
1777
1778		  tregno = REGNO (p->exp);
1779		  tendregno
1780		    = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1781		  if (tendregno > regno && tregno < endregno)
1782		    remove_from_table (p, hash);
1783		}
1784	}
1785
1786      return;
1787    }
1788
1789  if (GET_CODE (x) == SUBREG)
1790    {
1791      if (GET_CODE (SUBREG_REG (x)) != REG)
1792	abort ();
1793      invalidate (SUBREG_REG (x), VOIDmode);
1794      return;
1795    }
1796
1797  /* If X is a parallel, invalidate all of its elements.  */
1798
1799  if (GET_CODE (x) == PARALLEL)
1800    {
1801      for (i = XVECLEN (x, 0) - 1; i >= 0 ; --i)
1802	invalidate (XVECEXP (x, 0, i), VOIDmode);
1803      return;
1804    }
1805
1806  /* If X is an expr_list, this is part of a disjoint return value;
1807     extract the location in question ignoring the offset.  */
1808
1809  if (GET_CODE (x) == EXPR_LIST)
1810    {
1811      invalidate (XEXP (x, 0), VOIDmode);
1812      return;
1813    }
1814
1815  /* X is not a register; it must be a memory reference with
1816     a nonvarying address.  Remove all hash table elements
1817     that refer to overlapping pieces of memory.  */
1818
1819  if (GET_CODE (x) != MEM)
1820    abort ();
1821
1822  if (full_mode == VOIDmode)
1823    full_mode = GET_MODE (x);
1824
1825  for (i = 0; i < NBUCKETS; i++)
1826    {
1827      register struct table_elt *next;
1828      for (p = table[i]; p; p = next)
1829	{
1830	  next = p->next_same_hash;
1831	  /* Invalidate ASM_OPERANDS which reference memory (this is easier
1832	     than checking all the aliases).  */
1833	  if (p->in_memory
1834	      && (GET_CODE (p->exp) != MEM
1835		  || true_dependence (x, full_mode, p->exp, cse_rtx_varies_p)))
1836	    remove_from_table (p, i);
1837	}
1838    }
1839}
1840
1841/* Remove all expressions that refer to register REGNO,
1842   since they are already invalid, and we are about to
1843   mark that register valid again and don't want the old
1844   expressions to reappear as valid.  */
1845
1846static void
1847remove_invalid_refs (regno)
1848     int regno;
1849{
1850  register int i;
1851  register struct table_elt *p, *next;
1852
1853  for (i = 0; i < NBUCKETS; i++)
1854    for (p = table[i]; p; p = next)
1855      {
1856	next = p->next_same_hash;
1857	if (GET_CODE (p->exp) != REG
1858	    && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1859	  remove_from_table (p, i);
1860      }
1861}
1862
1863/* Likewise for a subreg with subreg_reg WORD and mode MODE.  */
1864static void
1865remove_invalid_subreg_refs (regno, word, mode)
1866     int regno;
1867     int word;
1868     enum machine_mode mode;
1869{
1870  register int i;
1871  register struct table_elt *p, *next;
1872  int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
1873
1874  for (i = 0; i < NBUCKETS; i++)
1875    for (p = table[i]; p; p = next)
1876      {
1877	rtx exp;
1878	next = p->next_same_hash;
1879
1880	exp = p->exp;
1881	if (GET_CODE (p->exp) != REG
1882	    && (GET_CODE (exp) != SUBREG
1883		|| GET_CODE (SUBREG_REG (exp)) != REG
1884		|| REGNO (SUBREG_REG (exp)) != regno
1885		|| (((SUBREG_WORD (exp)
1886		      + (GET_MODE_SIZE (GET_MODE (exp)) - 1) / UNITS_PER_WORD)
1887		     >= word)
1888		 && SUBREG_WORD (exp) <= end))
1889	    && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1890	  remove_from_table (p, i);
1891      }
1892}
1893
1894/* Recompute the hash codes of any valid entries in the hash table that
1895   reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1896
1897   This is called when we make a jump equivalence.  */
1898
1899static void
1900rehash_using_reg (x)
1901     rtx x;
1902{
1903  unsigned int i;
1904  struct table_elt *p, *next;
1905  unsigned hash;
1906
1907  if (GET_CODE (x) == SUBREG)
1908    x = SUBREG_REG (x);
1909
1910  /* If X is not a register or if the register is known not to be in any
1911     valid entries in the table, we have no work to do.  */
1912
1913  if (GET_CODE (x) != REG
1914      || REG_IN_TABLE (REGNO (x)) < 0
1915      || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1916    return;
1917
1918  /* Scan all hash chains looking for valid entries that mention X.
1919     If we find one and it is in the wrong hash chain, move it.  We can skip
1920     objects that are registers, since they are handled specially.  */
1921
1922  for (i = 0; i < NBUCKETS; i++)
1923    for (p = table[i]; p; p = next)
1924      {
1925	next = p->next_same_hash;
1926	if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1927	    && exp_equiv_p (p->exp, p->exp, 1, 0)
1928	    && i != (hash = safe_hash (p->exp, p->mode) % NBUCKETS))
1929	  {
1930	    if (p->next_same_hash)
1931	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
1932
1933	    if (p->prev_same_hash)
1934	      p->prev_same_hash->next_same_hash = p->next_same_hash;
1935	    else
1936	      table[i] = p->next_same_hash;
1937
1938	    p->next_same_hash = table[hash];
1939	    p->prev_same_hash = 0;
1940	    if (table[hash])
1941	      table[hash]->prev_same_hash = p;
1942	    table[hash] = p;
1943	  }
1944      }
1945}
1946
1947/* Remove from the hash table any expression that is a call-clobbered
1948   register.  Also update their TICK values.  */
1949
1950static void
1951invalidate_for_call ()
1952{
1953  int regno, endregno;
1954  int i;
1955  unsigned hash;
1956  struct table_elt *p, *next;
1957  int in_table = 0;
1958
1959  /* Go through all the hard registers.  For each that is clobbered in
1960     a CALL_INSN, remove the register from quantity chains and update
1961     reg_tick if defined.  Also see if any of these registers is currently
1962     in the table.  */
1963
1964  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1965    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1966      {
1967	delete_reg_equiv (regno);
1968	if (REG_TICK (regno) >= 0)
1969	  REG_TICK (regno)++;
1970
1971	in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1972      }
1973
1974  /* In the case where we have no call-clobbered hard registers in the
1975     table, we are done.  Otherwise, scan the table and remove any
1976     entry that overlaps a call-clobbered register.  */
1977
1978  if (in_table)
1979    for (hash = 0; hash < NBUCKETS; hash++)
1980      for (p = table[hash]; p; p = next)
1981	{
1982	  next = p->next_same_hash;
1983
1984	  if (p->in_memory)
1985	    {
1986	      remove_from_table (p, hash);
1987	      continue;
1988	    }
1989
1990	  if (GET_CODE (p->exp) != REG
1991	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1992	    continue;
1993
1994	  regno = REGNO (p->exp);
1995	  endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1996
1997	  for (i = regno; i < endregno; i++)
1998	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1999	      {
2000		remove_from_table (p, hash);
2001		break;
2002	      }
2003	}
2004}
2005
2006/* Given an expression X of type CONST,
2007   and ELT which is its table entry (or 0 if it
2008   is not in the hash table),
2009   return an alternate expression for X as a register plus integer.
2010   If none can be found, return 0.  */
2011
2012static rtx
2013use_related_value (x, elt)
2014     rtx x;
2015     struct table_elt *elt;
2016{
2017  register struct table_elt *relt = 0;
2018  register struct table_elt *p, *q;
2019  HOST_WIDE_INT offset;
2020
2021  /* First, is there anything related known?
2022     If we have a table element, we can tell from that.
2023     Otherwise, must look it up.  */
2024
2025  if (elt != 0 && elt->related_value != 0)
2026    relt = elt;
2027  else if (elt == 0 && GET_CODE (x) == CONST)
2028    {
2029      rtx subexp = get_related_value (x);
2030      if (subexp != 0)
2031	relt = lookup (subexp,
2032		       safe_hash (subexp, GET_MODE (subexp)) % NBUCKETS,
2033		       GET_MODE (subexp));
2034    }
2035
2036  if (relt == 0)
2037    return 0;
2038
2039  /* Search all related table entries for one that has an
2040     equivalent register.  */
2041
2042  p = relt;
2043  while (1)
2044    {
2045      /* This loop is strange in that it is executed in two different cases.
2046	 The first is when X is already in the table.  Then it is searching
2047	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2048	 X is not in the table.  Then RELT points to a class for the related
2049	 value.
2050
2051	 Ensure that, whatever case we are in, that we ignore classes that have
2052	 the same value as X.  */
2053
2054      if (rtx_equal_p (x, p->exp))
2055	q = 0;
2056      else
2057	for (q = p->first_same_value; q; q = q->next_same_value)
2058	  if (GET_CODE (q->exp) == REG)
2059	    break;
2060
2061      if (q)
2062	break;
2063
2064      p = p->related_value;
2065
2066      /* We went all the way around, so there is nothing to be found.
2067	 Alternatively, perhaps RELT was in the table for some other reason
2068	 and it has no related values recorded.  */
2069      if (p == relt || p == 0)
2070	break;
2071    }
2072
2073  if (q == 0)
2074    return 0;
2075
2076  offset = (get_integer_term (x) - get_integer_term (p->exp));
2077  /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2078  return plus_constant (q->exp, offset);
2079}
2080
2081/* Hash an rtx.  We are careful to make sure the value is never negative.
2082   Equivalent registers hash identically.
2083   MODE is used in hashing for CONST_INTs only;
2084   otherwise the mode of X is used.
2085
2086   Store 1 in do_not_record if any subexpression is volatile.
2087
2088   Store 1 in hash_arg_in_memory if X contains a MEM rtx
2089   which does not have the RTX_UNCHANGING_P bit set.
2090   In this case, also store 1 in hash_arg_in_struct
2091   if there is a MEM rtx which has the MEM_IN_STRUCT_P bit set.
2092
2093   Note that cse_insn knows that the hash code of a MEM expression
2094   is just (int) MEM plus the hash code of the address.  */
2095
2096static unsigned
2097canon_hash (x, mode)
2098     rtx x;
2099     enum machine_mode mode;
2100{
2101  register int i, j;
2102  register unsigned hash = 0;
2103  register enum rtx_code code;
2104  register char *fmt;
2105
2106  /* repeat is used to turn tail-recursion into iteration.  */
2107 repeat:
2108  if (x == 0)
2109    return hash;
2110
2111  code = GET_CODE (x);
2112  switch (code)
2113    {
2114    case REG:
2115      {
2116	register int regno = REGNO (x);
2117
2118	/* On some machines, we can't record any non-fixed hard register,
2119	   because extending its life will cause reload problems.  We
2120	   consider ap, fp, and sp to be fixed for this purpose.
2121
2122	   We also consider CCmode registers to be fixed for this purpose;
2123	   failure to do so leads to failure to simplify 0<100 type of
2124	   conditionals.
2125
2126	   On all machines, we can't record any global registers.  */
2127
2128	if (regno < FIRST_PSEUDO_REGISTER
2129	    && (global_regs[regno]
2130		|| (SMALL_REGISTER_CLASSES
2131		    && ! fixed_regs[regno]
2132		    && regno != FRAME_POINTER_REGNUM
2133		    && regno != HARD_FRAME_POINTER_REGNUM
2134		    && regno != ARG_POINTER_REGNUM
2135		    && regno != STACK_POINTER_REGNUM
2136		    && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
2137	  {
2138	    do_not_record = 1;
2139	    return 0;
2140	  }
2141	hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2142	return hash;
2143      }
2144
2145    /* We handle SUBREG of a REG specially because the underlying
2146       reg changes its hash value with every value change; we don't
2147       want to have to forget unrelated subregs when one subreg changes.  */
2148    case SUBREG:
2149      {
2150	if (GET_CODE (SUBREG_REG (x)) == REG)
2151	  {
2152	    hash += (((unsigned) SUBREG << 7)
2153		     + REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2154	    return hash;
2155	  }
2156	break;
2157      }
2158
2159    case CONST_INT:
2160      {
2161	unsigned HOST_WIDE_INT tem = INTVAL (x);
2162	hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2163	return hash;
2164      }
2165
2166    case CONST_DOUBLE:
2167      /* This is like the general case, except that it only counts
2168	 the integers representing the constant.  */
2169      hash += (unsigned) code + (unsigned) GET_MODE (x);
2170      if (GET_MODE (x) != VOIDmode)
2171	for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2172	  {
2173	    unsigned tem = XINT (x, i);
2174	    hash += tem;
2175	  }
2176      else
2177	hash += ((unsigned) CONST_DOUBLE_LOW (x)
2178		 + (unsigned) CONST_DOUBLE_HIGH (x));
2179      return hash;
2180
2181      /* Assume there is only one rtx object for any given label.  */
2182    case LABEL_REF:
2183      hash
2184	+= ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2185      return hash;
2186
2187    case SYMBOL_REF:
2188      hash
2189	+= ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2190      return hash;
2191
2192    case MEM:
2193      if (MEM_VOLATILE_P (x))
2194	{
2195	  do_not_record = 1;
2196	  return 0;
2197	}
2198      if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2199	{
2200	  hash_arg_in_memory = 1;
2201	  if (MEM_IN_STRUCT_P (x)) hash_arg_in_struct = 1;
2202	}
2203      /* Now that we have already found this special case,
2204	 might as well speed it up as much as possible.  */
2205      hash += (unsigned) MEM;
2206      x = XEXP (x, 0);
2207      goto repeat;
2208
2209    case PRE_DEC:
2210    case PRE_INC:
2211    case POST_DEC:
2212    case POST_INC:
2213    case PC:
2214    case CC0:
2215    case CALL:
2216    case UNSPEC_VOLATILE:
2217      do_not_record = 1;
2218      return 0;
2219
2220    case ASM_OPERANDS:
2221      if (MEM_VOLATILE_P (x))
2222	{
2223	  do_not_record = 1;
2224	  return 0;
2225	}
2226      break;
2227
2228    default:
2229      break;
2230    }
2231
2232  i = GET_RTX_LENGTH (code) - 1;
2233  hash += (unsigned) code + (unsigned) GET_MODE (x);
2234  fmt = GET_RTX_FORMAT (code);
2235  for (; i >= 0; i--)
2236    {
2237      if (fmt[i] == 'e')
2238	{
2239	  rtx tem = XEXP (x, i);
2240
2241	  /* If we are about to do the last recursive call
2242	     needed at this level, change it into iteration.
2243	     This function  is called enough to be worth it.  */
2244	  if (i == 0)
2245	    {
2246	      x = tem;
2247	      goto repeat;
2248	    }
2249	  hash += canon_hash (tem, 0);
2250	}
2251      else if (fmt[i] == 'E')
2252	for (j = 0; j < XVECLEN (x, i); j++)
2253	  hash += canon_hash (XVECEXP (x, i, j), 0);
2254      else if (fmt[i] == 's')
2255	{
2256	  register unsigned char *p = (unsigned char *) XSTR (x, i);
2257	  if (p)
2258	    while (*p)
2259	      hash += *p++;
2260	}
2261      else if (fmt[i] == 'i')
2262	{
2263	  register unsigned tem = XINT (x, i);
2264	  hash += tem;
2265	}
2266      else if (fmt[i] == '0')
2267	/* unused */;
2268      else
2269	abort ();
2270    }
2271  return hash;
2272}
2273
2274/* Like canon_hash but with no side effects.  */
2275
2276static unsigned
2277safe_hash (x, mode)
2278     rtx x;
2279     enum machine_mode mode;
2280{
2281  int save_do_not_record = do_not_record;
2282  int save_hash_arg_in_memory = hash_arg_in_memory;
2283  int save_hash_arg_in_struct = hash_arg_in_struct;
2284  unsigned hash = canon_hash (x, mode);
2285  hash_arg_in_memory = save_hash_arg_in_memory;
2286  hash_arg_in_struct = save_hash_arg_in_struct;
2287  do_not_record = save_do_not_record;
2288  return hash;
2289}
2290
2291/* Return 1 iff X and Y would canonicalize into the same thing,
2292   without actually constructing the canonicalization of either one.
2293   If VALIDATE is nonzero,
2294   we assume X is an expression being processed from the rtl
2295   and Y was found in the hash table.  We check register refs
2296   in Y for being marked as valid.
2297
2298   If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2299   that is known to be in the register.  Ordinarily, we don't allow them
2300   to match, because letting them match would cause unpredictable results
2301   in all the places that search a hash table chain for an equivalent
2302   for a given value.  A possible equivalent that has different structure
2303   has its hash code computed from different data.  Whether the hash code
2304   is the same as that of the given value is pure luck.  */
2305
2306static int
2307exp_equiv_p (x, y, validate, equal_values)
2308     rtx x, y;
2309     int validate;
2310     int equal_values;
2311{
2312  register int i, j;
2313  register enum rtx_code code;
2314  register char *fmt;
2315
2316  /* Note: it is incorrect to assume an expression is equivalent to itself
2317     if VALIDATE is nonzero.  */
2318  if (x == y && !validate)
2319    return 1;
2320  if (x == 0 || y == 0)
2321    return x == y;
2322
2323  code = GET_CODE (x);
2324  if (code != GET_CODE (y))
2325    {
2326      if (!equal_values)
2327	return 0;
2328
2329      /* If X is a constant and Y is a register or vice versa, they may be
2330	 equivalent.  We only have to validate if Y is a register.  */
2331      if (CONSTANT_P (x) && GET_CODE (y) == REG
2332	  && REGNO_QTY_VALID_P (REGNO (y))
2333	  && GET_MODE (y) == qty_mode[REG_QTY (REGNO (y))]
2334	  && rtx_equal_p (x, qty_const[REG_QTY (REGNO (y))])
2335	  && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2336	return 1;
2337
2338      if (CONSTANT_P (y) && code == REG
2339	  && REGNO_QTY_VALID_P (REGNO (x))
2340	  && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))]
2341	  && rtx_equal_p (y, qty_const[REG_QTY (REGNO (x))]))
2342	return 1;
2343
2344      return 0;
2345    }
2346
2347  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2348  if (GET_MODE (x) != GET_MODE (y))
2349    return 0;
2350
2351  switch (code)
2352    {
2353    case PC:
2354    case CC0:
2355      return x == y;
2356
2357    case CONST_INT:
2358      return INTVAL (x) == INTVAL (y);
2359
2360    case LABEL_REF:
2361      return XEXP (x, 0) == XEXP (y, 0);
2362
2363    case SYMBOL_REF:
2364      return XSTR (x, 0) == XSTR (y, 0);
2365
2366    case REG:
2367      {
2368	int regno = REGNO (y);
2369	int endregno
2370	  = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2371		     : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2372	int i;
2373
2374	/* If the quantities are not the same, the expressions are not
2375	   equivalent.  If there are and we are not to validate, they
2376	   are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2377
2378	if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2379	  return 0;
2380
2381	if (! validate)
2382	  return 1;
2383
2384	for (i = regno; i < endregno; i++)
2385	  if (REG_IN_TABLE (i) != REG_TICK (i))
2386	    return 0;
2387
2388	return 1;
2389      }
2390
2391    /*  For commutative operations, check both orders.  */
2392    case PLUS:
2393    case MULT:
2394    case AND:
2395    case IOR:
2396    case XOR:
2397    case NE:
2398    case EQ:
2399      return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2400	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2401			       validate, equal_values))
2402	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2403			       validate, equal_values)
2404		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2405				  validate, equal_values)));
2406
2407    default:
2408      break;
2409    }
2410
2411  /* Compare the elements.  If any pair of corresponding elements
2412     fail to match, return 0 for the whole things.  */
2413
2414  fmt = GET_RTX_FORMAT (code);
2415  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2416    {
2417      switch (fmt[i])
2418	{
2419	case 'e':
2420	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2421	    return 0;
2422	  break;
2423
2424	case 'E':
2425	  if (XVECLEN (x, i) != XVECLEN (y, i))
2426	    return 0;
2427	  for (j = 0; j < XVECLEN (x, i); j++)
2428	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2429			       validate, equal_values))
2430	      return 0;
2431	  break;
2432
2433	case 's':
2434	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2435	    return 0;
2436	  break;
2437
2438	case 'i':
2439	  if (XINT (x, i) != XINT (y, i))
2440	    return 0;
2441	  break;
2442
2443	case 'w':
2444	  if (XWINT (x, i) != XWINT (y, i))
2445	    return 0;
2446	break;
2447
2448	case '0':
2449	  break;
2450
2451	default:
2452	  abort ();
2453	}
2454      }
2455
2456  return 1;
2457}
2458
2459/* Return 1 iff any subexpression of X matches Y.
2460   Here we do not require that X or Y be valid (for registers referred to)
2461   for being in the hash table.  */
2462
2463static int
2464refers_to_p (x, y)
2465     rtx x, y;
2466{
2467  register int i;
2468  register enum rtx_code code;
2469  register char *fmt;
2470
2471 repeat:
2472  if (x == y)
2473    return 1;
2474  if (x == 0 || y == 0)
2475    return 0;
2476
2477  code = GET_CODE (x);
2478  /* If X as a whole has the same code as Y, they may match.
2479     If so, return 1.  */
2480  if (code == GET_CODE (y))
2481    {
2482      if (exp_equiv_p (x, y, 0, 1))
2483	return 1;
2484    }
2485
2486  /* X does not match, so try its subexpressions.  */
2487
2488  fmt = GET_RTX_FORMAT (code);
2489  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2490    if (fmt[i] == 'e')
2491      {
2492	if (i == 0)
2493	  {
2494	    x = XEXP (x, 0);
2495	    goto repeat;
2496	  }
2497	else
2498	  if (refers_to_p (XEXP (x, i), y))
2499	    return 1;
2500      }
2501    else if (fmt[i] == 'E')
2502      {
2503	int j;
2504	for (j = 0; j < XVECLEN (x, i); j++)
2505	  if (refers_to_p (XVECEXP (x, i, j), y))
2506	    return 1;
2507      }
2508
2509  return 0;
2510}
2511
2512/* Given ADDR and SIZE (a memory address, and the size of the memory reference),
2513   set PBASE, PSTART, and PEND which correspond to the base of the address,
2514   the starting offset, and ending offset respectively.
2515
2516   ADDR is known to be a nonvarying address.  */
2517
2518/* ??? Despite what the comments say, this function is in fact frequently
2519   passed varying addresses.  This does not appear to cause any problems.  */
2520
2521static void
2522set_nonvarying_address_components (addr, size, pbase, pstart, pend)
2523     rtx addr;
2524     int size;
2525     rtx *pbase;
2526     HOST_WIDE_INT *pstart, *pend;
2527{
2528  rtx base;
2529  HOST_WIDE_INT start, end;
2530
2531  base = addr;
2532  start = 0;
2533  end = 0;
2534
2535  if (flag_pic && GET_CODE (base) == PLUS
2536      && XEXP (base, 0) == pic_offset_table_rtx)
2537    base = XEXP (base, 1);
2538
2539  /* Registers with nonvarying addresses usually have constant equivalents;
2540     but the frame pointer register is also possible.  */
2541  if (GET_CODE (base) == REG
2542      && qty_const != 0
2543      && REGNO_QTY_VALID_P (REGNO (base))
2544      && qty_mode[REG_QTY (REGNO (base))] == GET_MODE (base)
2545      && qty_const[REG_QTY (REGNO (base))] != 0)
2546    base = qty_const[REG_QTY (REGNO (base))];
2547  else if (GET_CODE (base) == PLUS
2548	   && GET_CODE (XEXP (base, 1)) == CONST_INT
2549	   && GET_CODE (XEXP (base, 0)) == REG
2550	   && qty_const != 0
2551	   && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2552	   && (qty_mode[REG_QTY (REGNO (XEXP (base, 0)))]
2553	       == GET_MODE (XEXP (base, 0)))
2554	   && qty_const[REG_QTY (REGNO (XEXP (base, 0)))])
2555    {
2556      start = INTVAL (XEXP (base, 1));
2557      base = qty_const[REG_QTY (REGNO (XEXP (base, 0)))];
2558    }
2559  /* This can happen as the result of virtual register instantiation,
2560     if the initial offset is too large to be a valid address.  */
2561  else if (GET_CODE (base) == PLUS
2562	   && GET_CODE (XEXP (base, 0)) == REG
2563	   && GET_CODE (XEXP (base, 1)) == REG
2564	   && qty_const != 0
2565	   && REGNO_QTY_VALID_P (REGNO (XEXP (base, 0)))
2566	   && (qty_mode[REG_QTY (REGNO (XEXP (base, 0)))]
2567	       == GET_MODE (XEXP (base, 0)))
2568	   && qty_const[REG_QTY (REGNO (XEXP (base, 0)))]
2569	   && REGNO_QTY_VALID_P (REGNO (XEXP (base, 1)))
2570	   && (qty_mode[REG_QTY (REGNO (XEXP (base, 1)))]
2571	       == GET_MODE (XEXP (base, 1)))
2572	   && qty_const[REG_QTY (REGNO (XEXP (base, 1)))])
2573    {
2574      rtx tem = qty_const[REG_QTY (REGNO (XEXP (base, 1)))];
2575      base = qty_const[REG_QTY (REGNO (XEXP (base, 0)))];
2576
2577      /* One of the two values must be a constant.  */
2578      if (GET_CODE (base) != CONST_INT)
2579	{
2580	  if (GET_CODE (tem) != CONST_INT)
2581	    abort ();
2582	  start = INTVAL (tem);
2583	}
2584      else
2585	{
2586	  start = INTVAL (base);
2587	  base = tem;
2588	}
2589    }
2590
2591  /* Handle everything that we can find inside an address that has been
2592     viewed as constant.  */
2593
2594  while (1)
2595    {
2596      /* If no part of this switch does a "continue", the code outside
2597	 will exit this loop.  */
2598
2599      switch (GET_CODE (base))
2600	{
2601	case LO_SUM:
2602	  /* By definition, operand1 of a LO_SUM is the associated constant
2603	     address.  Use the associated constant address as the base
2604	     instead.  */
2605	  base = XEXP (base, 1);
2606	  continue;
2607
2608	case CONST:
2609	  /* Strip off CONST.  */
2610	  base = XEXP (base, 0);
2611	  continue;
2612
2613	case PLUS:
2614	  if (GET_CODE (XEXP (base, 1)) == CONST_INT)
2615	    {
2616	      start += INTVAL (XEXP (base, 1));
2617	      base = XEXP (base, 0);
2618	      continue;
2619	    }
2620	  break;
2621
2622	case AND:
2623	  /* Handle the case of an AND which is the negative of a power of
2624	     two.  This is used to represent unaligned memory operations.  */
2625	  if (GET_CODE (XEXP (base, 1)) == CONST_INT
2626	      && exact_log2 (- INTVAL (XEXP (base, 1))) > 0)
2627	    {
2628	      set_nonvarying_address_components (XEXP (base, 0), size,
2629						 pbase, pstart, pend);
2630
2631	      /* Assume the worst misalignment.  START is affected, but not
2632		 END, so compensate but adjusting SIZE.  Don't lose any
2633		 constant we already had.  */
2634
2635	      size = *pend - *pstart - INTVAL (XEXP (base, 1)) - 1;
2636	      start += *pstart + INTVAL (XEXP (base, 1)) + 1;
2637	      end += *pend;
2638	      base = *pbase;
2639	    }
2640	  break;
2641
2642	default:
2643	  break;
2644	}
2645
2646      break;
2647    }
2648
2649  if (GET_CODE (base) == CONST_INT)
2650    {
2651      start += INTVAL (base);
2652      base = const0_rtx;
2653    }
2654
2655  end = start + size;
2656
2657  /* Set the return values.  */
2658  *pbase = base;
2659  *pstart = start;
2660  *pend = end;
2661}
2662
2663/* Return 1 if X has a value that can vary even between two
2664   executions of the program.  0 means X can be compared reliably
2665   against certain constants or near-constants.  */
2666
2667static int
2668cse_rtx_varies_p (x)
2669     register rtx x;
2670{
2671  /* We need not check for X and the equivalence class being of the same
2672     mode because if X is equivalent to a constant in some mode, it
2673     doesn't vary in any mode.  */
2674
2675  if (GET_CODE (x) == REG
2676      && REGNO_QTY_VALID_P (REGNO (x))
2677      && GET_MODE (x) == qty_mode[REG_QTY (REGNO (x))]
2678      && qty_const[REG_QTY (REGNO (x))] != 0)
2679    return 0;
2680
2681  if (GET_CODE (x) == PLUS
2682      && GET_CODE (XEXP (x, 1)) == CONST_INT
2683      && GET_CODE (XEXP (x, 0)) == REG
2684      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2685      && (GET_MODE (XEXP (x, 0))
2686	  == qty_mode[REG_QTY (REGNO (XEXP (x, 0)))])
2687      && qty_const[REG_QTY (REGNO (XEXP (x, 0)))])
2688    return 0;
2689
2690  /* This can happen as the result of virtual register instantiation, if
2691     the initial constant is too large to be a valid address.  This gives
2692     us a three instruction sequence, load large offset into a register,
2693     load fp minus a constant into a register, then a MEM which is the
2694     sum of the two `constant' registers.  */
2695  if (GET_CODE (x) == PLUS
2696      && GET_CODE (XEXP (x, 0)) == REG
2697      && GET_CODE (XEXP (x, 1)) == REG
2698      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2699      && (GET_MODE (XEXP (x, 0))
2700	  == qty_mode[REG_QTY (REGNO (XEXP (x, 0)))])
2701      && qty_const[REG_QTY (REGNO (XEXP (x, 0)))]
2702      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1)))
2703      && (GET_MODE (XEXP (x, 1))
2704	  == qty_mode[REG_QTY (REGNO (XEXP (x, 1)))])
2705      && qty_const[REG_QTY (REGNO (XEXP (x, 1)))])
2706    return 0;
2707
2708  return rtx_varies_p (x);
2709}
2710
2711/* Canonicalize an expression:
2712   replace each register reference inside it
2713   with the "oldest" equivalent register.
2714
2715   If INSN is non-zero and we are replacing a pseudo with a hard register
2716   or vice versa, validate_change is used to ensure that INSN remains valid
2717   after we make our substitution.  The calls are made with IN_GROUP non-zero
2718   so apply_change_group must be called upon the outermost return from this
2719   function (unless INSN is zero).  The result of apply_change_group can
2720   generally be discarded since the changes we are making are optional.  */
2721
2722static rtx
2723canon_reg (x, insn)
2724     rtx x;
2725     rtx insn;
2726{
2727  register int i;
2728  register enum rtx_code code;
2729  register char *fmt;
2730
2731  if (x == 0)
2732    return x;
2733
2734  code = GET_CODE (x);
2735  switch (code)
2736    {
2737    case PC:
2738    case CC0:
2739    case CONST:
2740    case CONST_INT:
2741    case CONST_DOUBLE:
2742    case SYMBOL_REF:
2743    case LABEL_REF:
2744    case ADDR_VEC:
2745    case ADDR_DIFF_VEC:
2746      return x;
2747
2748    case REG:
2749      {
2750	register int first;
2751
2752	/* Never replace a hard reg, because hard regs can appear
2753	   in more than one machine mode, and we must preserve the mode
2754	   of each occurrence.  Also, some hard regs appear in
2755	   MEMs that are shared and mustn't be altered.  Don't try to
2756	   replace any reg that maps to a reg of class NO_REGS.  */
2757	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2758	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2759	  return x;
2760
2761	first = qty_first_reg[REG_QTY (REGNO (x))];
2762	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2763		: REGNO_REG_CLASS (first) == NO_REGS ? x
2764		: gen_rtx_REG (qty_mode[REG_QTY (REGNO (x))], first));
2765      }
2766
2767    default:
2768      break;
2769    }
2770
2771  fmt = GET_RTX_FORMAT (code);
2772  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2773    {
2774      register int j;
2775
2776      if (fmt[i] == 'e')
2777	{
2778	  rtx new = canon_reg (XEXP (x, i), insn);
2779	  int insn_code;
2780
2781	  /* If replacing pseudo with hard reg or vice versa, ensure the
2782	     insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2783	  if (insn != 0 && new != 0
2784	      && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2785	      && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2786		   != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2787		  || (insn_code = recog_memoized (insn)) < 0
2788		  || insn_n_dups[insn_code] > 0))
2789	    validate_change (insn, &XEXP (x, i), new, 1);
2790	  else
2791	    XEXP (x, i) = new;
2792	}
2793      else if (fmt[i] == 'E')
2794	for (j = 0; j < XVECLEN (x, i); j++)
2795	  XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2796    }
2797
2798  return x;
2799}
2800
2801/* LOC is a location within INSN that is an operand address (the contents of
2802   a MEM).  Find the best equivalent address to use that is valid for this
2803   insn.
2804
2805   On most CISC machines, complicated address modes are costly, and rtx_cost
2806   is a good approximation for that cost.  However, most RISC machines have
2807   only a few (usually only one) memory reference formats.  If an address is
2808   valid at all, it is often just as cheap as any other address.  Hence, for
2809   RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2810   costs of various addresses.  For two addresses of equal cost, choose the one
2811   with the highest `rtx_cost' value as that has the potential of eliminating
2812   the most insns.  For equal costs, we choose the first in the equivalence
2813   class.  Note that we ignore the fact that pseudo registers are cheaper
2814   than hard registers here because we would also prefer the pseudo registers.
2815  */
2816
2817static void
2818find_best_addr (insn, loc)
2819     rtx insn;
2820     rtx *loc;
2821{
2822  struct table_elt *elt;
2823  rtx addr = *loc;
2824#ifdef ADDRESS_COST
2825  struct table_elt *p;
2826  int found_better = 1;
2827#endif
2828  int save_do_not_record = do_not_record;
2829  int save_hash_arg_in_memory = hash_arg_in_memory;
2830  int save_hash_arg_in_struct = hash_arg_in_struct;
2831  int addr_volatile;
2832  int regno;
2833  unsigned hash;
2834
2835  /* Do not try to replace constant addresses or addresses of local and
2836     argument slots.  These MEM expressions are made only once and inserted
2837     in many instructions, as well as being used to control symbol table
2838     output.  It is not safe to clobber them.
2839
2840     There are some uncommon cases where the address is already in a register
2841     for some reason, but we cannot take advantage of that because we have
2842     no easy way to unshare the MEM.  In addition, looking up all stack
2843     addresses is costly.  */
2844  if ((GET_CODE (addr) == PLUS
2845       && GET_CODE (XEXP (addr, 0)) == REG
2846       && GET_CODE (XEXP (addr, 1)) == CONST_INT
2847       && (regno = REGNO (XEXP (addr, 0)),
2848	   regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2849	   || regno == ARG_POINTER_REGNUM))
2850      || (GET_CODE (addr) == REG
2851	  && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2852	      || regno == HARD_FRAME_POINTER_REGNUM
2853	      || regno == ARG_POINTER_REGNUM))
2854      || GET_CODE (addr) == ADDRESSOF
2855      || CONSTANT_ADDRESS_P (addr))
2856    return;
2857
2858  /* If this address is not simply a register, try to fold it.  This will
2859     sometimes simplify the expression.  Many simplifications
2860     will not be valid, but some, usually applying the associative rule, will
2861     be valid and produce better code.  */
2862  if (GET_CODE (addr) != REG)
2863    {
2864      rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2865
2866      if (1
2867#ifdef ADDRESS_COST
2868	  && (CSE_ADDRESS_COST (folded) < CSE_ADDRESS_COST (addr)
2869	      || (CSE_ADDRESS_COST (folded) == CSE_ADDRESS_COST (addr)
2870		  && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
2871#else
2872	  && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
2873#endif
2874	  && validate_change (insn, loc, folded, 0))
2875	addr = folded;
2876    }
2877
2878  /* If this address is not in the hash table, we can't look for equivalences
2879     of the whole address.  Also, ignore if volatile.  */
2880
2881  do_not_record = 0;
2882  hash = HASH (addr, Pmode);
2883  addr_volatile = do_not_record;
2884  do_not_record = save_do_not_record;
2885  hash_arg_in_memory = save_hash_arg_in_memory;
2886  hash_arg_in_struct = save_hash_arg_in_struct;
2887
2888  if (addr_volatile)
2889    return;
2890
2891  elt = lookup (addr, hash, Pmode);
2892
2893#ifndef ADDRESS_COST
2894  if (elt)
2895    {
2896      int our_cost = elt->cost;
2897
2898      /* Find the lowest cost below ours that works.  */
2899      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2900	if (elt->cost < our_cost
2901	    && (GET_CODE (elt->exp) == REG
2902		|| exp_equiv_p (elt->exp, elt->exp, 1, 0))
2903	    && validate_change (insn, loc,
2904				canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2905	  return;
2906    }
2907#else
2908
2909  if (elt)
2910    {
2911      /* We need to find the best (under the criteria documented above) entry
2912	 in the class that is valid.  We use the `flag' field to indicate
2913	 choices that were invalid and iterate until we can't find a better
2914	 one that hasn't already been tried.  */
2915
2916      for (p = elt->first_same_value; p; p = p->next_same_value)
2917	p->flag = 0;
2918
2919      while (found_better)
2920	{
2921	  int best_addr_cost = CSE_ADDRESS_COST (*loc);
2922	  int best_rtx_cost = (elt->cost + 1) >> 1;
2923	  struct table_elt *best_elt = elt;
2924
2925	  found_better = 0;
2926	  for (p = elt->first_same_value; p; p = p->next_same_value)
2927	    if (! p->flag)
2928	      {
2929		if ((GET_CODE (p->exp) == REG
2930		     || exp_equiv_p (p->exp, p->exp, 1, 0))
2931		    && (CSE_ADDRESS_COST (p->exp) < best_addr_cost
2932			|| (CSE_ADDRESS_COST (p->exp) == best_addr_cost
2933			    && (p->cost + 1) >> 1 > best_rtx_cost)))
2934		  {
2935		    found_better = 1;
2936		    best_addr_cost = CSE_ADDRESS_COST (p->exp);
2937		    best_rtx_cost = (p->cost + 1) >> 1;
2938		    best_elt = p;
2939		  }
2940	      }
2941
2942	  if (found_better)
2943	    {
2944	      if (validate_change (insn, loc,
2945				   canon_reg (copy_rtx (best_elt->exp),
2946					      NULL_RTX), 0))
2947		return;
2948	      else
2949		best_elt->flag = 1;
2950	    }
2951	}
2952    }
2953
2954  /* If the address is a binary operation with the first operand a register
2955     and the second a constant, do the same as above, but looking for
2956     equivalences of the register.  Then try to simplify before checking for
2957     the best address to use.  This catches a few cases:  First is when we
2958     have REG+const and the register is another REG+const.  We can often merge
2959     the constants and eliminate one insn and one register.  It may also be
2960     that a machine has a cheap REG+REG+const.  Finally, this improves the
2961     code on the Alpha for unaligned byte stores.  */
2962
2963  if (flag_expensive_optimizations
2964      && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2965	  || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2966      && GET_CODE (XEXP (*loc, 0)) == REG
2967      && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2968    {
2969      rtx c = XEXP (*loc, 1);
2970
2971      do_not_record = 0;
2972      hash = HASH (XEXP (*loc, 0), Pmode);
2973      do_not_record = save_do_not_record;
2974      hash_arg_in_memory = save_hash_arg_in_memory;
2975      hash_arg_in_struct = save_hash_arg_in_struct;
2976
2977      elt = lookup (XEXP (*loc, 0), hash, Pmode);
2978      if (elt == 0)
2979	return;
2980
2981      /* We need to find the best (under the criteria documented above) entry
2982	 in the class that is valid.  We use the `flag' field to indicate
2983	 choices that were invalid and iterate until we can't find a better
2984	 one that hasn't already been tried.  */
2985
2986      for (p = elt->first_same_value; p; p = p->next_same_value)
2987	p->flag = 0;
2988
2989      while (found_better)
2990	{
2991	  int best_addr_cost = CSE_ADDRESS_COST (*loc);
2992	  int best_rtx_cost = (COST (*loc) + 1) >> 1;
2993	  struct table_elt *best_elt = elt;
2994	  rtx best_rtx = *loc;
2995	  int count;
2996
2997	  /* This is at worst case an O(n^2) algorithm, so limit our search
2998	     to the first 32 elements on the list.  This avoids trouble
2999	     compiling code with very long basic blocks that can easily
3000	     call cse_gen_binary so many times that we run out of memory.  */
3001
3002	  found_better = 0;
3003	  for (p = elt->first_same_value, count = 0;
3004	       p && count < 32;
3005	       p = p->next_same_value, count++)
3006	    if (! p->flag
3007		&& (GET_CODE (p->exp) == REG
3008		    || exp_equiv_p (p->exp, p->exp, 1, 0)))
3009	      {
3010		rtx new = cse_gen_binary (GET_CODE (*loc), Pmode, p->exp, c);
3011
3012		if ((CSE_ADDRESS_COST (new) < best_addr_cost
3013		    || (CSE_ADDRESS_COST (new) == best_addr_cost
3014			&& (COST (new) + 1) >> 1 > best_rtx_cost)))
3015		  {
3016		    found_better = 1;
3017		    best_addr_cost = CSE_ADDRESS_COST (new);
3018		    best_rtx_cost = (COST (new) + 1) >> 1;
3019		    best_elt = p;
3020		    best_rtx = new;
3021		  }
3022	      }
3023
3024	  if (found_better)
3025	    {
3026	      if (validate_change (insn, loc,
3027				   canon_reg (copy_rtx (best_rtx),
3028					      NULL_RTX), 0))
3029		return;
3030	      else
3031		best_elt->flag = 1;
3032	    }
3033	}
3034    }
3035#endif
3036}
3037
3038/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3039   operation (EQ, NE, GT, etc.), follow it back through the hash table and
3040   what values are being compared.
3041
3042   *PARG1 and *PARG2 are updated to contain the rtx representing the values
3043   actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
3044   was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3045   compared to produce cc0.
3046
3047   The return value is the comparison operator and is either the code of
3048   A or the code corresponding to the inverse of the comparison.  */
3049
3050static enum rtx_code
3051find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3052     enum rtx_code code;
3053     rtx *parg1, *parg2;
3054     enum machine_mode *pmode1, *pmode2;
3055{
3056  rtx arg1, arg2;
3057
3058  arg1 = *parg1, arg2 = *parg2;
3059
3060  /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
3061
3062  while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3063    {
3064      /* Set non-zero when we find something of interest.  */
3065      rtx x = 0;
3066      int reverse_code = 0;
3067      struct table_elt *p = 0;
3068
3069      /* If arg1 is a COMPARE, extract the comparison arguments from it.
3070	 On machines with CC0, this is the only case that can occur, since
3071	 fold_rtx will return the COMPARE or item being compared with zero
3072	 when given CC0.  */
3073
3074      if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3075	x = arg1;
3076
3077      /* If ARG1 is a comparison operator and CODE is testing for
3078	 STORE_FLAG_VALUE, get the inner arguments.  */
3079
3080      else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3081	{
3082	  if (code == NE
3083	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3084		  && code == LT && STORE_FLAG_VALUE == -1)
3085#ifdef FLOAT_STORE_FLAG_VALUE
3086	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3087		  && FLOAT_STORE_FLAG_VALUE < 0)
3088#endif
3089	      )
3090	    x = arg1;
3091	  else if (code == EQ
3092		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3093		       && code == GE && STORE_FLAG_VALUE == -1)
3094#ifdef FLOAT_STORE_FLAG_VALUE
3095		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3096		       && FLOAT_STORE_FLAG_VALUE < 0)
3097#endif
3098		   )
3099	    x = arg1, reverse_code = 1;
3100	}
3101
3102      /* ??? We could also check for
3103
3104	 (ne (and (eq (...) (const_int 1))) (const_int 0))
3105
3106	 and related forms, but let's wait until we see them occurring.  */
3107
3108      if (x == 0)
3109	/* Look up ARG1 in the hash table and see if it has an equivalence
3110	   that lets us see what is being compared.  */
3111	p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) % NBUCKETS,
3112		    GET_MODE (arg1));
3113      if (p) p = p->first_same_value;
3114
3115      for (; p; p = p->next_same_value)
3116	{
3117	  enum machine_mode inner_mode = GET_MODE (p->exp);
3118
3119	  /* If the entry isn't valid, skip it.  */
3120	  if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3121	    continue;
3122
3123	  if (GET_CODE (p->exp) == COMPARE
3124	      /* Another possibility is that this machine has a compare insn
3125		 that includes the comparison code.  In that case, ARG1 would
3126		 be equivalent to a comparison operation that would set ARG1 to
3127		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
3128		 ORIG_CODE is the actual comparison being done; if it is an EQ,
3129		 we must reverse ORIG_CODE.  On machine with a negative value
3130		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3131	      || ((code == NE
3132		   || (code == LT
3133		       && GET_MODE_CLASS (inner_mode) == MODE_INT
3134		       && (GET_MODE_BITSIZE (inner_mode)
3135			   <= HOST_BITS_PER_WIDE_INT)
3136		       && (STORE_FLAG_VALUE
3137			   & ((HOST_WIDE_INT) 1
3138			      << (GET_MODE_BITSIZE (inner_mode) - 1))))
3139#ifdef FLOAT_STORE_FLAG_VALUE
3140		   || (code == LT
3141		       && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3142		       && FLOAT_STORE_FLAG_VALUE < 0)
3143#endif
3144		   )
3145		  && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3146	    {
3147	      x = p->exp;
3148	      break;
3149	    }
3150	  else if ((code == EQ
3151		    || (code == GE
3152			&& GET_MODE_CLASS (inner_mode) == MODE_INT
3153			&& (GET_MODE_BITSIZE (inner_mode)
3154			    <= HOST_BITS_PER_WIDE_INT)
3155			&& (STORE_FLAG_VALUE
3156			    & ((HOST_WIDE_INT) 1
3157			       << (GET_MODE_BITSIZE (inner_mode) - 1))))
3158#ifdef FLOAT_STORE_FLAG_VALUE
3159		    || (code == GE
3160			&& GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3161			&& FLOAT_STORE_FLAG_VALUE < 0)
3162#endif
3163		    )
3164		   && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3165	    {
3166	      reverse_code = 1;
3167	      x = p->exp;
3168	      break;
3169	    }
3170
3171	  /* If this is fp + constant, the equivalent is a better operand since
3172	     it may let us predict the value of the comparison.  */
3173	  else if (NONZERO_BASE_PLUS_P (p->exp))
3174	    {
3175	      arg1 = p->exp;
3176	      continue;
3177	    }
3178	}
3179
3180      /* If we didn't find a useful equivalence for ARG1, we are done.
3181	 Otherwise, set up for the next iteration.  */
3182      if (x == 0)
3183	break;
3184
3185      arg1 = XEXP (x, 0),  arg2 = XEXP (x, 1);
3186      if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3187	code = GET_CODE (x);
3188
3189      if (reverse_code)
3190	code = reverse_condition (code);
3191    }
3192
3193  /* Return our results.  Return the modes from before fold_rtx
3194     because fold_rtx might produce const_int, and then it's too late.  */
3195  *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3196  *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3197
3198  return code;
3199}
3200
3201/* Try to simplify a unary operation CODE whose output mode is to be
3202   MODE with input operand OP whose mode was originally OP_MODE.
3203   Return zero if no simplification can be made.  */
3204
3205rtx
3206simplify_unary_operation (code, mode, op, op_mode)
3207     enum rtx_code code;
3208     enum machine_mode mode;
3209     rtx op;
3210     enum machine_mode op_mode;
3211{
3212  register int width = GET_MODE_BITSIZE (mode);
3213
3214  /* The order of these tests is critical so that, for example, we don't
3215     check the wrong mode (input vs. output) for a conversion operation,
3216     such as FIX.  At some point, this should be simplified.  */
3217
3218#if !defined(REAL_IS_NOT_DOUBLE) || defined(REAL_ARITHMETIC)
3219
3220  if (code == FLOAT && GET_MODE (op) == VOIDmode
3221      && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3222    {
3223      HOST_WIDE_INT hv, lv;
3224      REAL_VALUE_TYPE d;
3225
3226      if (GET_CODE (op) == CONST_INT)
3227	lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3228      else
3229	lv = CONST_DOUBLE_LOW (op),  hv = CONST_DOUBLE_HIGH (op);
3230
3231#ifdef REAL_ARITHMETIC
3232      REAL_VALUE_FROM_INT (d, lv, hv, mode);
3233#else
3234      if (hv < 0)
3235	{
3236	  d = (double) (~ hv);
3237	  d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3238		* (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3239	  d += (double) (unsigned HOST_WIDE_INT) (~ lv);
3240	  d = (- d - 1.0);
3241	}
3242      else
3243	{
3244	  d = (double) hv;
3245	  d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3246		* (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3247	  d += (double) (unsigned HOST_WIDE_INT) lv;
3248	}
3249#endif  /* REAL_ARITHMETIC */
3250      d = real_value_truncate (mode, d);
3251      return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3252    }
3253  else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
3254	   && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3255    {
3256      HOST_WIDE_INT hv, lv;
3257      REAL_VALUE_TYPE d;
3258
3259      if (GET_CODE (op) == CONST_INT)
3260	lv = INTVAL (op), hv = INTVAL (op) < 0 ? -1 : 0;
3261      else
3262	lv = CONST_DOUBLE_LOW (op),  hv = CONST_DOUBLE_HIGH (op);
3263
3264      if (op_mode == VOIDmode)
3265	{
3266	  /* We don't know how to interpret negative-looking numbers in
3267	     this case, so don't try to fold those.  */
3268	  if (hv < 0)
3269	    return 0;
3270	}
3271      else if (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT * 2)
3272	;
3273      else
3274	hv = 0, lv &= GET_MODE_MASK (op_mode);
3275
3276#ifdef REAL_ARITHMETIC
3277      REAL_VALUE_FROM_UNSIGNED_INT (d, lv, hv, mode);
3278#else
3279
3280      d = (double) (unsigned HOST_WIDE_INT) hv;
3281      d *= ((double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2))
3282	    * (double) ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)));
3283      d += (double) (unsigned HOST_WIDE_INT) lv;
3284#endif  /* REAL_ARITHMETIC */
3285      d = real_value_truncate (mode, d);
3286      return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3287    }
3288#endif
3289
3290  if (GET_CODE (op) == CONST_INT
3291      && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3292    {
3293      register HOST_WIDE_INT arg0 = INTVAL (op);
3294      register HOST_WIDE_INT val;
3295
3296      switch (code)
3297	{
3298	case NOT:
3299	  val = ~ arg0;
3300	  break;
3301
3302	case NEG:
3303	  val = - arg0;
3304	  break;
3305
3306	case ABS:
3307	  val = (arg0 >= 0 ? arg0 : - arg0);
3308	  break;
3309
3310	case FFS:
3311	  /* Don't use ffs here.  Instead, get low order bit and then its
3312	     number.  If arg0 is zero, this will return 0, as desired.  */
3313	  arg0 &= GET_MODE_MASK (mode);
3314	  val = exact_log2 (arg0 & (- arg0)) + 1;
3315	  break;
3316
3317	case TRUNCATE:
3318	  val = arg0;
3319	  break;
3320
3321	case ZERO_EXTEND:
3322	  if (op_mode == VOIDmode)
3323	    op_mode = mode;
3324	  if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3325	    {
3326	      /* If we were really extending the mode,
3327		 we would have to distinguish between zero-extension
3328		 and sign-extension.  */
3329	      if (width != GET_MODE_BITSIZE (op_mode))
3330		abort ();
3331	      val = arg0;
3332	    }
3333	  else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3334	    val = arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3335	  else
3336	    return 0;
3337	  break;
3338
3339	case SIGN_EXTEND:
3340	  if (op_mode == VOIDmode)
3341	    op_mode = mode;
3342	  if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT)
3343	    {
3344	      /* If we were really extending the mode,
3345		 we would have to distinguish between zero-extension
3346		 and sign-extension.  */
3347	      if (width != GET_MODE_BITSIZE (op_mode))
3348		abort ();
3349	      val = arg0;
3350	    }
3351	  else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT)
3352	    {
3353	      val
3354		= arg0 & ~((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (op_mode));
3355	      if (val
3356		  & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (op_mode) - 1)))
3357		val -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3358	    }
3359	  else
3360	    return 0;
3361	  break;
3362
3363	case SQRT:
3364	  return 0;
3365
3366	default:
3367	  abort ();
3368	}
3369
3370      /* Clear the bits that don't belong in our mode,
3371	 unless they and our sign bit are all one.
3372	 So we get either a reasonable negative value or a reasonable
3373	 unsigned value for this mode.  */
3374      if (width < HOST_BITS_PER_WIDE_INT
3375	  && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3376	      != ((HOST_WIDE_INT) (-1) << (width - 1))))
3377	val &= ((HOST_WIDE_INT) 1 << width) - 1;
3378
3379      /* If this would be an entire word for the target, but is not for
3380	 the host, then sign-extend on the host so that the number will look
3381	 the same way on the host that it would on the target.
3382
3383	 For example, when building a 64 bit alpha hosted 32 bit sparc
3384	 targeted compiler, then we want the 32 bit unsigned value -1 to be
3385	 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3386	 The later confuses the sparc backend.  */
3387
3388      if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3389	  && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3390	val |= ((HOST_WIDE_INT) (-1) << width);
3391
3392      return GEN_INT (val);
3393    }
3394
3395  /* We can do some operations on integer CONST_DOUBLEs.  Also allow
3396     for a DImode operation on a CONST_INT.  */
3397  else if (GET_MODE (op) == VOIDmode && width <= HOST_BITS_PER_INT * 2
3398	   && (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
3399    {
3400      HOST_WIDE_INT l1, h1, lv, hv;
3401
3402      if (GET_CODE (op) == CONST_DOUBLE)
3403	l1 = CONST_DOUBLE_LOW (op), h1 = CONST_DOUBLE_HIGH (op);
3404      else
3405	l1 = INTVAL (op), h1 = l1 < 0 ? -1 : 0;
3406
3407      switch (code)
3408	{
3409	case NOT:
3410	  lv = ~ l1;
3411	  hv = ~ h1;
3412	  break;
3413
3414	case NEG:
3415	  neg_double (l1, h1, &lv, &hv);
3416	  break;
3417
3418	case ABS:
3419	  if (h1 < 0)
3420	    neg_double (l1, h1, &lv, &hv);
3421	  else
3422	    lv = l1, hv = h1;
3423	  break;
3424
3425	case FFS:
3426	  hv = 0;
3427	  if (l1 == 0)
3428	    lv = HOST_BITS_PER_WIDE_INT + exact_log2 (h1 & (-h1)) + 1;
3429	  else
3430	    lv = exact_log2 (l1 & (-l1)) + 1;
3431	  break;
3432
3433	case TRUNCATE:
3434	  /* This is just a change-of-mode, so do nothing.  */
3435	  lv = l1, hv = h1;
3436	  break;
3437
3438	case ZERO_EXTEND:
3439	  if (op_mode == VOIDmode
3440	      || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3441	    return 0;
3442
3443	  hv = 0;
3444	  lv = l1 & GET_MODE_MASK (op_mode);
3445	  break;
3446
3447	case SIGN_EXTEND:
3448	  if (op_mode == VOIDmode
3449	      || GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT)
3450	    return 0;
3451	  else
3452	    {
3453	      lv = l1 & GET_MODE_MASK (op_mode);
3454	      if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
3455		  && (lv & ((HOST_WIDE_INT) 1
3456			    << (GET_MODE_BITSIZE (op_mode) - 1))) != 0)
3457		lv -= (HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (op_mode);
3458
3459	      hv = (lv < 0) ? ~ (HOST_WIDE_INT) 0 : 0;
3460	    }
3461	  break;
3462
3463	case SQRT:
3464	  return 0;
3465
3466	default:
3467	  return 0;
3468	}
3469
3470      return immed_double_const (lv, hv, mode);
3471    }
3472
3473#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3474  else if (GET_CODE (op) == CONST_DOUBLE
3475	   && GET_MODE_CLASS (mode) == MODE_FLOAT)
3476    {
3477      REAL_VALUE_TYPE d;
3478      jmp_buf handler;
3479      rtx x;
3480
3481      if (setjmp (handler))
3482	/* There used to be a warning here, but that is inadvisable.
3483	   People may want to cause traps, and the natural way
3484	   to do it should not get a warning.  */
3485	return 0;
3486
3487      set_float_handler (handler);
3488
3489      REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3490
3491      switch (code)
3492	{
3493	case NEG:
3494	  d = REAL_VALUE_NEGATE (d);
3495	  break;
3496
3497	case ABS:
3498	  if (REAL_VALUE_NEGATIVE (d))
3499	    d = REAL_VALUE_NEGATE (d);
3500	  break;
3501
3502	case FLOAT_TRUNCATE:
3503	  d = real_value_truncate (mode, d);
3504	  break;
3505
3506	case FLOAT_EXTEND:
3507	  /* All this does is change the mode.  */
3508	  break;
3509
3510	case FIX:
3511	  d = REAL_VALUE_RNDZINT (d);
3512	  break;
3513
3514	case UNSIGNED_FIX:
3515	  d = REAL_VALUE_UNSIGNED_RNDZINT (d);
3516	  break;
3517
3518	case SQRT:
3519	  return 0;
3520
3521	default:
3522	  abort ();
3523	}
3524
3525      x = CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
3526      set_float_handler (NULL_PTR);
3527      return x;
3528    }
3529
3530  else if (GET_CODE (op) == CONST_DOUBLE
3531	   && GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
3532	   && GET_MODE_CLASS (mode) == MODE_INT
3533	   && width <= HOST_BITS_PER_WIDE_INT && width > 0)
3534    {
3535      REAL_VALUE_TYPE d;
3536      jmp_buf handler;
3537      HOST_WIDE_INT val;
3538
3539      if (setjmp (handler))
3540	return 0;
3541
3542      set_float_handler (handler);
3543
3544      REAL_VALUE_FROM_CONST_DOUBLE (d, op);
3545
3546      switch (code)
3547	{
3548	case FIX:
3549	  val = REAL_VALUE_FIX (d);
3550	  break;
3551
3552	case UNSIGNED_FIX:
3553	  val = REAL_VALUE_UNSIGNED_FIX (d);
3554	  break;
3555
3556	default:
3557	  abort ();
3558	}
3559
3560      set_float_handler (NULL_PTR);
3561
3562      /* Clear the bits that don't belong in our mode,
3563	 unless they and our sign bit are all one.
3564	 So we get either a reasonable negative value or a reasonable
3565	 unsigned value for this mode.  */
3566      if (width < HOST_BITS_PER_WIDE_INT
3567	  && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
3568	      != ((HOST_WIDE_INT) (-1) << (width - 1))))
3569	val &= ((HOST_WIDE_INT) 1 << width) - 1;
3570
3571      /* If this would be an entire word for the target, but is not for
3572	 the host, then sign-extend on the host so that the number will look
3573	 the same way on the host that it would on the target.
3574
3575	 For example, when building a 64 bit alpha hosted 32 bit sparc
3576	 targeted compiler, then we want the 32 bit unsigned value -1 to be
3577	 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
3578	 The later confuses the sparc backend.  */
3579
3580      if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
3581	  && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
3582	val |= ((HOST_WIDE_INT) (-1) << width);
3583
3584      return GEN_INT (val);
3585    }
3586#endif
3587  /* This was formerly used only for non-IEEE float.
3588     eggert@twinsun.com says it is safe for IEEE also.  */
3589  else
3590    {
3591      /* There are some simplifications we can do even if the operands
3592	 aren't constant.  */
3593      switch (code)
3594	{
3595	case NEG:
3596	case NOT:
3597	  /* (not (not X)) == X, similarly for NEG.  */
3598	  if (GET_CODE (op) == code)
3599	    return XEXP (op, 0);
3600	  break;
3601
3602	case SIGN_EXTEND:
3603	  /* (sign_extend (truncate (minus (label_ref L1) (label_ref L2))))
3604	     becomes just the MINUS if its mode is MODE.  This allows
3605	     folding switch statements on machines using casesi (such as
3606	     the Vax).  */
3607	  if (GET_CODE (op) == TRUNCATE
3608	      && GET_MODE (XEXP (op, 0)) == mode
3609	      && GET_CODE (XEXP (op, 0)) == MINUS
3610	      && GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF
3611	      && GET_CODE (XEXP (XEXP (op, 0), 1)) == LABEL_REF)
3612	    return XEXP (op, 0);
3613
3614#ifdef POINTERS_EXTEND_UNSIGNED
3615	  if (! POINTERS_EXTEND_UNSIGNED
3616	      && mode == Pmode && GET_MODE (op) == ptr_mode
3617	      && CONSTANT_P (op))
3618	    return convert_memory_address (Pmode, op);
3619#endif
3620	  break;
3621
3622#ifdef POINTERS_EXTEND_UNSIGNED
3623	case ZERO_EXTEND:
3624	  if (POINTERS_EXTEND_UNSIGNED
3625	      && mode == Pmode && GET_MODE (op) == ptr_mode
3626	      && CONSTANT_P (op))
3627	    return convert_memory_address (Pmode, op);
3628	  break;
3629#endif
3630
3631	default:
3632	  break;
3633	}
3634
3635      return 0;
3636    }
3637}
3638
3639/* Simplify a binary operation CODE with result mode MODE, operating on OP0
3640   and OP1.  Return 0 if no simplification is possible.
3641
3642   Don't use this for relational operations such as EQ or LT.
3643   Use simplify_relational_operation instead.  */
3644
3645rtx
3646simplify_binary_operation (code, mode, op0, op1)
3647     enum rtx_code code;
3648     enum machine_mode mode;
3649     rtx op0, op1;
3650{
3651  register HOST_WIDE_INT arg0, arg1, arg0s, arg1s;
3652  HOST_WIDE_INT val;
3653  int width = GET_MODE_BITSIZE (mode);
3654  rtx tem;
3655
3656  /* Relational operations don't work here.  We must know the mode
3657     of the operands in order to do the comparison correctly.
3658     Assuming a full word can give incorrect results.
3659     Consider comparing 128 with -128 in QImode.  */
3660
3661  if (GET_RTX_CLASS (code) == '<')
3662    abort ();
3663
3664#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3665  if (GET_MODE_CLASS (mode) == MODE_FLOAT
3666      && GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
3667      && mode == GET_MODE (op0) && mode == GET_MODE (op1))
3668    {
3669      REAL_VALUE_TYPE f0, f1, value;
3670      jmp_buf handler;
3671
3672      if (setjmp (handler))
3673	return 0;
3674
3675      set_float_handler (handler);
3676
3677      REAL_VALUE_FROM_CONST_DOUBLE (f0, op0);
3678      REAL_VALUE_FROM_CONST_DOUBLE (f1, op1);
3679      f0 = real_value_truncate (mode, f0);
3680      f1 = real_value_truncate (mode, f1);
3681
3682#ifdef REAL_ARITHMETIC
3683#ifndef REAL_INFINITY
3684      if (code == DIV && REAL_VALUES_EQUAL (f1, dconst0))
3685	return 0;
3686#endif
3687      REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1);
3688#else
3689      switch (code)
3690	{
3691	case PLUS:
3692	  value = f0 + f1;
3693	  break;
3694	case MINUS:
3695	  value = f0 - f1;
3696	  break;
3697	case MULT:
3698	  value = f0 * f1;
3699	  break;
3700	case DIV:
3701#ifndef REAL_INFINITY
3702	  if (f1 == 0)
3703	    return 0;
3704#endif
3705	  value = f0 / f1;
3706	  break;
3707	case SMIN:
3708	  value = MIN (f0, f1);
3709	  break;
3710	case SMAX:
3711	  value = MAX (f0, f1);
3712	  break;
3713	default:
3714	  abort ();
3715	}
3716#endif
3717
3718      value = real_value_truncate (mode, value);
3719      set_float_handler (NULL_PTR);
3720      return CONST_DOUBLE_FROM_REAL_VALUE (value, mode);
3721    }
3722#endif  /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3723
3724  /* We can fold some multi-word operations.  */
3725  if (GET_MODE_CLASS (mode) == MODE_INT
3726      && width == HOST_BITS_PER_WIDE_INT * 2
3727      && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
3728      && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
3729    {
3730      HOST_WIDE_INT l1, l2, h1, h2, lv, hv;
3731
3732      if (GET_CODE (op0) == CONST_DOUBLE)
3733	l1 = CONST_DOUBLE_LOW (op0), h1 = CONST_DOUBLE_HIGH (op0);
3734      else
3735	l1 = INTVAL (op0), h1 = l1 < 0 ? -1 : 0;
3736
3737      if (GET_CODE (op1) == CONST_DOUBLE)
3738	l2 = CONST_DOUBLE_LOW (op1), h2 = CONST_DOUBLE_HIGH (op1);
3739      else
3740	l2 = INTVAL (op1), h2 = l2 < 0 ? -1 : 0;
3741
3742      switch (code)
3743	{
3744	case MINUS:
3745	  /* A - B == A + (-B).  */
3746	  neg_double (l2, h2, &lv, &hv);
3747	  l2 = lv, h2 = hv;
3748
3749	  /* .. fall through ...  */
3750
3751	case PLUS:
3752	  add_double (l1, h1, l2, h2, &lv, &hv);
3753	  break;
3754
3755	case MULT:
3756	  mul_double (l1, h1, l2, h2, &lv, &hv);
3757	  break;
3758
3759	case DIV:  case MOD:   case UDIV:  case UMOD:
3760	  /* We'd need to include tree.h to do this and it doesn't seem worth
3761	     it.  */
3762	  return 0;
3763
3764	case AND:
3765	  lv = l1 & l2, hv = h1 & h2;
3766	  break;
3767
3768	case IOR:
3769	  lv = l1 | l2, hv = h1 | h2;
3770	  break;
3771
3772	case XOR:
3773	  lv = l1 ^ l2, hv = h1 ^ h2;
3774	  break;
3775
3776	case SMIN:
3777	  if (h1 < h2
3778	      || (h1 == h2
3779		  && ((unsigned HOST_WIDE_INT) l1
3780		      < (unsigned HOST_WIDE_INT) l2)))
3781	    lv = l1, hv = h1;
3782	  else
3783	    lv = l2, hv = h2;
3784	  break;
3785
3786	case SMAX:
3787	  if (h1 > h2
3788	      || (h1 == h2
3789		  && ((unsigned HOST_WIDE_INT) l1
3790		      > (unsigned HOST_WIDE_INT) l2)))
3791	    lv = l1, hv = h1;
3792	  else
3793	    lv = l2, hv = h2;
3794	  break;
3795
3796	case UMIN:
3797	  if ((unsigned HOST_WIDE_INT) h1 < (unsigned HOST_WIDE_INT) h2
3798	      || (h1 == h2
3799		  && ((unsigned HOST_WIDE_INT) l1
3800		      < (unsigned HOST_WIDE_INT) l2)))
3801	    lv = l1, hv = h1;
3802	  else
3803	    lv = l2, hv = h2;
3804	  break;
3805
3806	case UMAX:
3807	  if ((unsigned HOST_WIDE_INT) h1 > (unsigned HOST_WIDE_INT) h2
3808	      || (h1 == h2
3809		  && ((unsigned HOST_WIDE_INT) l1
3810		      > (unsigned HOST_WIDE_INT) l2)))
3811	    lv = l1, hv = h1;
3812	  else
3813	    lv = l2, hv = h2;
3814	  break;
3815
3816	case LSHIFTRT:   case ASHIFTRT:
3817	case ASHIFT:
3818	case ROTATE:     case ROTATERT:
3819#ifdef SHIFT_COUNT_TRUNCATED
3820	  if (SHIFT_COUNT_TRUNCATED)
3821	    l2 &= (GET_MODE_BITSIZE (mode) - 1), h2 = 0;
3822#endif
3823
3824	  if (h2 != 0 || l2 < 0 || l2 >= GET_MODE_BITSIZE (mode))
3825	    return 0;
3826
3827	  if (code == LSHIFTRT || code == ASHIFTRT)
3828	    rshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv,
3829			   code == ASHIFTRT);
3830	  else if (code == ASHIFT)
3831	    lshift_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv, 1);
3832	  else if (code == ROTATE)
3833	    lrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3834	  else /* code == ROTATERT */
3835	    rrotate_double (l1, h1, l2, GET_MODE_BITSIZE (mode), &lv, &hv);
3836	  break;
3837
3838	default:
3839	  return 0;
3840	}
3841
3842      return immed_double_const (lv, hv, mode);
3843    }
3844
3845  if (GET_CODE (op0) != CONST_INT || GET_CODE (op1) != CONST_INT
3846      || width > HOST_BITS_PER_WIDE_INT || width == 0)
3847    {
3848      /* Even if we can't compute a constant result,
3849	 there are some cases worth simplifying.  */
3850
3851      switch (code)
3852	{
3853	case PLUS:
3854	  /* In IEEE floating point, x+0 is not the same as x.  Similarly
3855	     for the other optimizations below.  */
3856	  if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3857	      && FLOAT_MODE_P (mode) && ! flag_fast_math)
3858	    break;
3859
3860	  if (op1 == CONST0_RTX (mode))
3861	    return op0;
3862
3863	  /* ((-a) + b) -> (b - a) and similarly for (a + (-b)) */
3864	  if (GET_CODE (op0) == NEG)
3865	    return cse_gen_binary (MINUS, mode, op1, XEXP (op0, 0));
3866	  else if (GET_CODE (op1) == NEG)
3867	    return cse_gen_binary (MINUS, mode, op0, XEXP (op1, 0));
3868
3869	  /* Handle both-operands-constant cases.  We can only add
3870	     CONST_INTs to constants since the sum of relocatable symbols
3871	     can't be handled by most assemblers.  Don't add CONST_INT
3872	     to CONST_INT since overflow won't be computed properly if wider
3873	     than HOST_BITS_PER_WIDE_INT.  */
3874
3875	  if (CONSTANT_P (op0) && GET_MODE (op0) != VOIDmode
3876	      && GET_CODE (op1) == CONST_INT)
3877	    return plus_constant (op0, INTVAL (op1));
3878	  else if (CONSTANT_P (op1) && GET_MODE (op1) != VOIDmode
3879		   && GET_CODE (op0) == CONST_INT)
3880	    return plus_constant (op1, INTVAL (op0));
3881
3882	  /* See if this is something like X * C - X or vice versa or
3883	     if the multiplication is written as a shift.  If so, we can
3884	     distribute and make a new multiply, shift, or maybe just
3885	     have X (if C is 2 in the example above).  But don't make
3886	     real multiply if we didn't have one before.  */
3887
3888	  if (! FLOAT_MODE_P (mode))
3889	    {
3890	      HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
3891	      rtx lhs = op0, rhs = op1;
3892	      int had_mult = 0;
3893
3894	      if (GET_CODE (lhs) == NEG)
3895		coeff0 = -1, lhs = XEXP (lhs, 0);
3896	      else if (GET_CODE (lhs) == MULT
3897		       && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
3898		{
3899		  coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
3900		  had_mult = 1;
3901		}
3902	      else if (GET_CODE (lhs) == ASHIFT
3903		       && GET_CODE (XEXP (lhs, 1)) == CONST_INT
3904		       && INTVAL (XEXP (lhs, 1)) >= 0
3905		       && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
3906		{
3907		  coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
3908		  lhs = XEXP (lhs, 0);
3909		}
3910
3911	      if (GET_CODE (rhs) == NEG)
3912		coeff1 = -1, rhs = XEXP (rhs, 0);
3913	      else if (GET_CODE (rhs) == MULT
3914		       && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
3915		{
3916		  coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
3917		  had_mult = 1;
3918		}
3919	      else if (GET_CODE (rhs) == ASHIFT
3920		       && GET_CODE (XEXP (rhs, 1)) == CONST_INT
3921		       && INTVAL (XEXP (rhs, 1)) >= 0
3922		       && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
3923		{
3924		  coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
3925		  rhs = XEXP (rhs, 0);
3926		}
3927
3928	      if (rtx_equal_p (lhs, rhs))
3929		{
3930		  tem = cse_gen_binary (MULT, mode, lhs,
3931					GEN_INT (coeff0 + coeff1));
3932		  return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
3933		}
3934	    }
3935
3936	  /* If one of the operands is a PLUS or a MINUS, see if we can
3937	     simplify this by the associative law.
3938	     Don't use the associative law for floating point.
3939	     The inaccuracy makes it nonassociative,
3940	     and subtle programs can break if operations are associated.  */
3941
3942	  if (INTEGRAL_MODE_P (mode)
3943	      && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
3944		  || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
3945	      && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
3946	    return tem;
3947	  break;
3948
3949	case COMPARE:
3950#ifdef HAVE_cc0
3951	  /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3952	     using cc0, in which case we want to leave it as a COMPARE
3953	     so we can distinguish it from a register-register-copy.
3954
3955	     In IEEE floating point, x-0 is not the same as x.  */
3956
3957	  if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3958	       || ! FLOAT_MODE_P (mode) || flag_fast_math)
3959	      && op1 == CONST0_RTX (mode))
3960	    return op0;
3961#else
3962	  /* Do nothing here.  */
3963#endif
3964	  break;
3965
3966	case MINUS:
3967	  /* None of these optimizations can be done for IEEE
3968	     floating point.  */
3969	  if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
3970	      && FLOAT_MODE_P (mode) && ! flag_fast_math)
3971	    break;
3972
3973	  /* We can't assume x-x is 0 even with non-IEEE floating point,
3974	     but since it is zero except in very strange circumstances, we
3975	     will treat it as zero with -ffast-math.  */
3976	  if (rtx_equal_p (op0, op1)
3977	      && ! side_effects_p (op0)
3978	      && (! FLOAT_MODE_P (mode) || flag_fast_math))
3979	    return CONST0_RTX (mode);
3980
3981	  /* Change subtraction from zero into negation.  */
3982	  if (op0 == CONST0_RTX (mode))
3983	    return gen_rtx_NEG (mode, op1);
3984
3985	  /* (-1 - a) is ~a.  */
3986	  if (op0 == constm1_rtx)
3987	    return gen_rtx_NOT (mode, op1);
3988
3989	  /* Subtracting 0 has no effect.  */
3990	  if (op1 == CONST0_RTX (mode))
3991	    return op0;
3992
3993	  /* See if this is something like X * C - X or vice versa or
3994	     if the multiplication is written as a shift.  If so, we can
3995	     distribute and make a new multiply, shift, or maybe just
3996	     have X (if C is 2 in the example above).  But don't make
3997	     real multiply if we didn't have one before.  */
3998
3999	  if (! FLOAT_MODE_P (mode))
4000	    {
4001	      HOST_WIDE_INT coeff0 = 1, coeff1 = 1;
4002	      rtx lhs = op0, rhs = op1;
4003	      int had_mult = 0;
4004
4005	      if (GET_CODE (lhs) == NEG)
4006		coeff0 = -1, lhs = XEXP (lhs, 0);
4007	      else if (GET_CODE (lhs) == MULT
4008		       && GET_CODE (XEXP (lhs, 1)) == CONST_INT)
4009		{
4010		  coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0);
4011		  had_mult = 1;
4012		}
4013	      else if (GET_CODE (lhs) == ASHIFT
4014		       && GET_CODE (XEXP (lhs, 1)) == CONST_INT
4015		       && INTVAL (XEXP (lhs, 1)) >= 0
4016		       && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
4017		{
4018		  coeff0 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (lhs, 1));
4019		  lhs = XEXP (lhs, 0);
4020		}
4021
4022	      if (GET_CODE (rhs) == NEG)
4023		coeff1 = - 1, rhs = XEXP (rhs, 0);
4024	      else if (GET_CODE (rhs) == MULT
4025		       && GET_CODE (XEXP (rhs, 1)) == CONST_INT)
4026		{
4027		  coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0);
4028		  had_mult = 1;
4029		}
4030	      else if (GET_CODE (rhs) == ASHIFT
4031		       && GET_CODE (XEXP (rhs, 1)) == CONST_INT
4032		       && INTVAL (XEXP (rhs, 1)) >= 0
4033		       && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
4034		{
4035		  coeff1 = ((HOST_WIDE_INT) 1) << INTVAL (XEXP (rhs, 1));
4036		  rhs = XEXP (rhs, 0);
4037		}
4038
4039	      if (rtx_equal_p (lhs, rhs))
4040		{
4041		  tem = cse_gen_binary (MULT, mode, lhs,
4042					GEN_INT (coeff0 - coeff1));
4043		  return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem;
4044		}
4045	    }
4046
4047	  /* (a - (-b)) -> (a + b).  */
4048	  if (GET_CODE (op1) == NEG)
4049	    return cse_gen_binary (PLUS, mode, op0, XEXP (op1, 0));
4050
4051	  /* If one of the operands is a PLUS or a MINUS, see if we can
4052	     simplify this by the associative law.
4053	     Don't use the associative law for floating point.
4054	     The inaccuracy makes it nonassociative,
4055	     and subtle programs can break if operations are associated.  */
4056
4057	  if (INTEGRAL_MODE_P (mode)
4058	      && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
4059		  || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS)
4060	      && (tem = simplify_plus_minus (code, mode, op0, op1)) != 0)
4061	    return tem;
4062
4063	  /* Don't let a relocatable value get a negative coeff.  */
4064	  if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
4065	    return plus_constant (op0, - INTVAL (op1));
4066
4067	  /* (x - (x & y)) -> (x & ~y) */
4068	  if (GET_CODE (op1) == AND)
4069	    {
4070	     if (rtx_equal_p (op0, XEXP (op1, 0)))
4071	       return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 1)));
4072	     if (rtx_equal_p (op0, XEXP (op1, 1)))
4073	       return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 0)));
4074	   }
4075	  break;
4076
4077	case MULT:
4078	  if (op1 == constm1_rtx)
4079	    {
4080	      tem = simplify_unary_operation (NEG, mode, op0, mode);
4081
4082	      return tem ? tem : gen_rtx_NEG (mode, op0);
4083	    }
4084
4085	  /* In IEEE floating point, x*0 is not always 0.  */
4086	  if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4087	       || ! FLOAT_MODE_P (mode) || flag_fast_math)
4088	      && op1 == CONST0_RTX (mode)
4089	      && ! side_effects_p (op0))
4090	    return op1;
4091
4092	  /* In IEEE floating point, x*1 is not equivalent to x for nans.
4093	     However, ANSI says we can drop signals,
4094	     so we can do this anyway.  */
4095	  if (op1 == CONST1_RTX (mode))
4096	    return op0;
4097
4098	  /* Convert multiply by constant power of two into shift unless
4099	     we are still generating RTL.  This test is a kludge.  */
4100	  if (GET_CODE (op1) == CONST_INT
4101	      && (val = exact_log2 (INTVAL (op1))) >= 0
4102	      /* If the mode is larger than the host word size, and the
4103		 uppermost bit is set, then this isn't a power of two due
4104		 to implicit sign extension.  */
4105	      && (width <= HOST_BITS_PER_WIDE_INT
4106		  || val != HOST_BITS_PER_WIDE_INT - 1)
4107	      && ! rtx_equal_function_value_matters)
4108	    return gen_rtx_ASHIFT (mode, op0, GEN_INT (val));
4109
4110	  if (GET_CODE (op1) == CONST_DOUBLE
4111	      && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
4112	    {
4113	      REAL_VALUE_TYPE d;
4114	      jmp_buf handler;
4115	      int op1is2, op1ism1;
4116
4117	      if (setjmp (handler))
4118		return 0;
4119
4120	      set_float_handler (handler);
4121	      REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
4122	      op1is2 = REAL_VALUES_EQUAL (d, dconst2);
4123	      op1ism1 = REAL_VALUES_EQUAL (d, dconstm1);
4124	      set_float_handler (NULL_PTR);
4125
4126	      /* x*2 is x+x and x*(-1) is -x */
4127	      if (op1is2 && GET_MODE (op0) == mode)
4128		return gen_rtx_PLUS (mode, op0, copy_rtx (op0));
4129
4130	      else if (op1ism1 && GET_MODE (op0) == mode)
4131		return gen_rtx_NEG (mode, op0);
4132	    }
4133	  break;
4134
4135	case IOR:
4136	  if (op1 == const0_rtx)
4137	    return op0;
4138	  if (GET_CODE (op1) == CONST_INT
4139	      && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4140	    return op1;
4141	  if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4142	    return op0;
4143	  /* A | (~A) -> -1 */
4144	  if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4145	       || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
4146	      && ! side_effects_p (op0)
4147	      && GET_MODE_CLASS (mode) != MODE_CC)
4148	    return constm1_rtx;
4149	  break;
4150
4151	case XOR:
4152	  if (op1 == const0_rtx)
4153	    return op0;
4154	  if (GET_CODE (op1) == CONST_INT
4155	      && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4156	    return gen_rtx_NOT (mode, op0);
4157	  if (op0 == op1 && ! side_effects_p (op0)
4158	      && GET_MODE_CLASS (mode) != MODE_CC)
4159	    return const0_rtx;
4160	  break;
4161
4162	case AND:
4163	  if (op1 == const0_rtx && ! side_effects_p (op0))
4164	    return const0_rtx;
4165	  if (GET_CODE (op1) == CONST_INT
4166	      && (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
4167	    return op0;
4168	  if (op0 == op1 && ! side_effects_p (op0)
4169	      && GET_MODE_CLASS (mode) != MODE_CC)
4170	    return op0;
4171	  /* A & (~A) -> 0 */
4172	  if (((GET_CODE (op0) == NOT && rtx_equal_p (XEXP (op0, 0), op1))
4173	       || (GET_CODE (op1) == NOT && rtx_equal_p (XEXP (op1, 0), op0)))
4174	      && ! side_effects_p (op0)
4175	      && GET_MODE_CLASS (mode) != MODE_CC)
4176	    return const0_rtx;
4177	  break;
4178
4179	case UDIV:
4180	  /* Convert divide by power of two into shift (divide by 1 handled
4181	     below).  */
4182	  if (GET_CODE (op1) == CONST_INT
4183	      && (arg1 = exact_log2 (INTVAL (op1))) > 0)
4184	    return gen_rtx_LSHIFTRT (mode, op0, GEN_INT (arg1));
4185
4186	  /* ... fall through ...  */
4187
4188	case DIV:
4189	  if (op1 == CONST1_RTX (mode))
4190	    return op0;
4191
4192	  /* In IEEE floating point, 0/x is not always 0.  */
4193	  if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4194	       || ! FLOAT_MODE_P (mode) || flag_fast_math)
4195	      && op0 == CONST0_RTX (mode)
4196	      && ! side_effects_p (op1))
4197	    return op0;
4198
4199#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4200	  /* Change division by a constant into multiplication.  Only do
4201	     this with -ffast-math until an expert says it is safe in
4202	     general.  */
4203	  else if (GET_CODE (op1) == CONST_DOUBLE
4204		   && GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT
4205		   && op1 != CONST0_RTX (mode)
4206		   && flag_fast_math)
4207	    {
4208	      REAL_VALUE_TYPE d;
4209	      REAL_VALUE_FROM_CONST_DOUBLE (d, op1);
4210
4211	      if (! REAL_VALUES_EQUAL (d, dconst0))
4212		{
4213#if defined (REAL_ARITHMETIC)
4214		  REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
4215		  return gen_rtx_MULT (mode, op0,
4216				       CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
4217#else
4218		  return gen_rtx_MULT (mode, op0,
4219				       CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
4220#endif
4221		}
4222	    }
4223#endif
4224	  break;
4225
4226	case UMOD:
4227	  /* Handle modulus by power of two (mod with 1 handled below).  */
4228	  if (GET_CODE (op1) == CONST_INT
4229	      && exact_log2 (INTVAL (op1)) > 0)
4230	    return gen_rtx_AND (mode, op0, GEN_INT (INTVAL (op1) - 1));
4231
4232	  /* ... fall through ...  */
4233
4234	case MOD:
4235	  if ((op0 == const0_rtx || op1 == const1_rtx)
4236	      && ! side_effects_p (op0) && ! side_effects_p (op1))
4237	    return const0_rtx;
4238	  break;
4239
4240	case ROTATERT:
4241	case ROTATE:
4242	  /* Rotating ~0 always results in ~0.  */
4243	  if (GET_CODE (op0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
4244	      && INTVAL (op0) == GET_MODE_MASK (mode)
4245	      && ! side_effects_p (op1))
4246	    return op0;
4247
4248	  /* ... fall through ...  */
4249
4250	case ASHIFT:
4251	case ASHIFTRT:
4252	case LSHIFTRT:
4253	  if (op1 == const0_rtx)
4254	    return op0;
4255	  if (op0 == const0_rtx && ! side_effects_p (op1))
4256	    return op0;
4257	  break;
4258
4259	case SMIN:
4260	  if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4261	      && INTVAL (op1) == (HOST_WIDE_INT) 1 << (width -1)
4262	      && ! side_effects_p (op0))
4263	    return op1;
4264	  else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4265	    return op0;
4266	  break;
4267
4268	case SMAX:
4269	  if (width <= HOST_BITS_PER_WIDE_INT && GET_CODE (op1) == CONST_INT
4270	      && (INTVAL (op1)
4271		  == (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
4272	      && ! side_effects_p (op0))
4273	    return op1;
4274	  else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4275	    return op0;
4276	  break;
4277
4278	case UMIN:
4279	  if (op1 == const0_rtx && ! side_effects_p (op0))
4280	    return op1;
4281	  else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4282	    return op0;
4283	  break;
4284
4285	case UMAX:
4286	  if (op1 == constm1_rtx && ! side_effects_p (op0))
4287	    return op1;
4288	  else if (rtx_equal_p (op0, op1) && ! side_effects_p (op0))
4289	    return op0;
4290	  break;
4291
4292	default:
4293	  abort ();
4294	}
4295
4296      return 0;
4297    }
4298
4299  /* Get the integer argument values in two forms:
4300     zero-extended in ARG0, ARG1 and sign-extended in ARG0S, ARG1S.  */
4301
4302  arg0 = INTVAL (op0);
4303  arg1 = INTVAL (op1);
4304
4305  if (width < HOST_BITS_PER_WIDE_INT)
4306    {
4307      arg0 &= ((HOST_WIDE_INT) 1 << width) - 1;
4308      arg1 &= ((HOST_WIDE_INT) 1 << width) - 1;
4309
4310      arg0s = arg0;
4311      if (arg0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4312	arg0s |= ((HOST_WIDE_INT) (-1) << width);
4313
4314      arg1s = arg1;
4315      if (arg1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4316	arg1s |= ((HOST_WIDE_INT) (-1) << width);
4317    }
4318  else
4319    {
4320      arg0s = arg0;
4321      arg1s = arg1;
4322    }
4323
4324  /* Compute the value of the arithmetic.  */
4325
4326  switch (code)
4327    {
4328    case PLUS:
4329      val = arg0s + arg1s;
4330      break;
4331
4332    case MINUS:
4333      val = arg0s - arg1s;
4334      break;
4335
4336    case MULT:
4337      val = arg0s * arg1s;
4338      break;
4339
4340    case DIV:
4341      if (arg1s == 0)
4342	return 0;
4343      val = arg0s / arg1s;
4344      break;
4345
4346    case MOD:
4347      if (arg1s == 0)
4348	return 0;
4349      val = arg0s % arg1s;
4350      break;
4351
4352    case UDIV:
4353      if (arg1 == 0)
4354	return 0;
4355      val = (unsigned HOST_WIDE_INT) arg0 / arg1;
4356      break;
4357
4358    case UMOD:
4359      if (arg1 == 0)
4360	return 0;
4361      val = (unsigned HOST_WIDE_INT) arg0 % arg1;
4362      break;
4363
4364    case AND:
4365      val = arg0 & arg1;
4366      break;
4367
4368    case IOR:
4369      val = arg0 | arg1;
4370      break;
4371
4372    case XOR:
4373      val = arg0 ^ arg1;
4374      break;
4375
4376    case LSHIFTRT:
4377      /* If shift count is undefined, don't fold it; let the machine do
4378	 what it wants.  But truncate it if the machine will do that.  */
4379      if (arg1 < 0)
4380	return 0;
4381
4382#ifdef SHIFT_COUNT_TRUNCATED
4383      if (SHIFT_COUNT_TRUNCATED)
4384	arg1 %= width;
4385#endif
4386
4387      val = ((unsigned HOST_WIDE_INT) arg0) >> arg1;
4388      break;
4389
4390    case ASHIFT:
4391      if (arg1 < 0)
4392	return 0;
4393
4394#ifdef SHIFT_COUNT_TRUNCATED
4395      if (SHIFT_COUNT_TRUNCATED)
4396	arg1 %= width;
4397#endif
4398
4399      val = ((unsigned HOST_WIDE_INT) arg0) << arg1;
4400      break;
4401
4402    case ASHIFTRT:
4403      if (arg1 < 0)
4404	return 0;
4405
4406#ifdef SHIFT_COUNT_TRUNCATED
4407      if (SHIFT_COUNT_TRUNCATED)
4408	arg1 %= width;
4409#endif
4410
4411      val = arg0s >> arg1;
4412
4413      /* Bootstrap compiler may not have sign extended the right shift.
4414	 Manually extend the sign to insure bootstrap cc matches gcc.  */
4415      if (arg0s < 0 && arg1 > 0)
4416	val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1);
4417
4418      break;
4419
4420    case ROTATERT:
4421      if (arg1 < 0)
4422	return 0;
4423
4424      arg1 %= width;
4425      val = ((((unsigned HOST_WIDE_INT) arg0) << (width - arg1))
4426	     | (((unsigned HOST_WIDE_INT) arg0) >> arg1));
4427      break;
4428
4429    case ROTATE:
4430      if (arg1 < 0)
4431	return 0;
4432
4433      arg1 %= width;
4434      val = ((((unsigned HOST_WIDE_INT) arg0) << arg1)
4435	     | (((unsigned HOST_WIDE_INT) arg0) >> (width - arg1)));
4436      break;
4437
4438    case COMPARE:
4439      /* Do nothing here.  */
4440      return 0;
4441
4442    case SMIN:
4443      val = arg0s <= arg1s ? arg0s : arg1s;
4444      break;
4445
4446    case UMIN:
4447      val = ((unsigned HOST_WIDE_INT) arg0
4448	     <= (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4449      break;
4450
4451    case SMAX:
4452      val = arg0s > arg1s ? arg0s : arg1s;
4453      break;
4454
4455    case UMAX:
4456      val = ((unsigned HOST_WIDE_INT) arg0
4457	     > (unsigned HOST_WIDE_INT) arg1 ? arg0 : arg1);
4458      break;
4459
4460    default:
4461      abort ();
4462    }
4463
4464  /* Clear the bits that don't belong in our mode, unless they and our sign
4465     bit are all one.  So we get either a reasonable negative value or a
4466     reasonable unsigned value for this mode.  */
4467  if (width < HOST_BITS_PER_WIDE_INT
4468      && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
4469	  != ((HOST_WIDE_INT) (-1) << (width - 1))))
4470    val &= ((HOST_WIDE_INT) 1 << width) - 1;
4471
4472  /* If this would be an entire word for the target, but is not for
4473     the host, then sign-extend on the host so that the number will look
4474     the same way on the host that it would on the target.
4475
4476     For example, when building a 64 bit alpha hosted 32 bit sparc
4477     targeted compiler, then we want the 32 bit unsigned value -1 to be
4478     represented as a 64 bit value -1, and not as 0x00000000ffffffff.
4479     The later confuses the sparc backend.  */
4480
4481  if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
4482      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
4483    val |= ((HOST_WIDE_INT) (-1) << width);
4484
4485  return GEN_INT (val);
4486}
4487
4488/* Simplify a PLUS or MINUS, at least one of whose operands may be another
4489   PLUS or MINUS.
4490
4491   Rather than test for specific case, we do this by a brute-force method
4492   and do all possible simplifications until no more changes occur.  Then
4493   we rebuild the operation.  */
4494
4495static rtx
4496simplify_plus_minus (code, mode, op0, op1)
4497     enum rtx_code code;
4498     enum machine_mode mode;
4499     rtx op0, op1;
4500{
4501  rtx ops[8];
4502  int negs[8];
4503  rtx result, tem;
4504  int n_ops = 2, input_ops = 2, input_consts = 0, n_consts = 0;
4505  int first = 1, negate = 0, changed;
4506  int i, j;
4507
4508  bzero ((char *) ops, sizeof ops);
4509
4510  /* Set up the two operands and then expand them until nothing has been
4511     changed.  If we run out of room in our array, give up; this should
4512     almost never happen.  */
4513
4514  ops[0] = op0, ops[1] = op1, negs[0] = 0, negs[1] = (code == MINUS);
4515
4516  changed = 1;
4517  while (changed)
4518    {
4519      changed = 0;
4520
4521      for (i = 0; i < n_ops; i++)
4522	switch (GET_CODE (ops[i]))
4523	  {
4524	  case PLUS:
4525	  case MINUS:
4526	    if (n_ops == 7)
4527	      return 0;
4528
4529	    ops[n_ops] = XEXP (ops[i], 1);
4530	    negs[n_ops++] = GET_CODE (ops[i]) == MINUS ? !negs[i] : negs[i];
4531	    ops[i] = XEXP (ops[i], 0);
4532	    input_ops++;
4533	    changed = 1;
4534	    break;
4535
4536	  case NEG:
4537	    ops[i] = XEXP (ops[i], 0);
4538	    negs[i] = ! negs[i];
4539	    changed = 1;
4540	    break;
4541
4542	  case CONST:
4543	    ops[i] = XEXP (ops[i], 0);
4544	    input_consts++;
4545	    changed = 1;
4546	    break;
4547
4548	  case NOT:
4549	    /* ~a -> (-a - 1) */
4550	    if (n_ops != 7)
4551	      {
4552		ops[n_ops] = constm1_rtx;
4553		negs[n_ops++] = negs[i];
4554		ops[i] = XEXP (ops[i], 0);
4555		negs[i] = ! negs[i];
4556		changed = 1;
4557	      }
4558	    break;
4559
4560	  case CONST_INT:
4561	    if (negs[i])
4562	      ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0, changed = 1;
4563	    break;
4564
4565	  default:
4566	    break;
4567	  }
4568    }
4569
4570  /* If we only have two operands, we can't do anything.  */
4571  if (n_ops <= 2)
4572    return 0;
4573
4574  /* Now simplify each pair of operands until nothing changes.  The first
4575     time through just simplify constants against each other.  */
4576
4577  changed = 1;
4578  while (changed)
4579    {
4580      changed = first;
4581
4582      for (i = 0; i < n_ops - 1; i++)
4583	for (j = i + 1; j < n_ops; j++)
4584	  if (ops[i] != 0 && ops[j] != 0
4585	      && (! first || (CONSTANT_P (ops[i]) && CONSTANT_P (ops[j]))))
4586	    {
4587	      rtx lhs = ops[i], rhs = ops[j];
4588	      enum rtx_code ncode = PLUS;
4589
4590	      if (negs[i] && ! negs[j])
4591		lhs = ops[j], rhs = ops[i], ncode = MINUS;
4592	      else if (! negs[i] && negs[j])
4593		ncode = MINUS;
4594
4595	      tem = simplify_binary_operation (ncode, mode, lhs, rhs);
4596	      if (tem)
4597		{
4598		  ops[i] = tem, ops[j] = 0;
4599		  negs[i] = negs[i] && negs[j];
4600		  if (GET_CODE (tem) == NEG)
4601		    ops[i] = XEXP (tem, 0), negs[i] = ! negs[i];
4602
4603		  if (GET_CODE (ops[i]) == CONST_INT && negs[i])
4604		    ops[i] = GEN_INT (- INTVAL (ops[i])), negs[i] = 0;
4605		  changed = 1;
4606		}
4607	    }
4608
4609      first = 0;
4610    }
4611
4612  /* Pack all the operands to the lower-numbered entries and give up if
4613     we didn't reduce the number of operands we had.  Make sure we
4614     count a CONST as two operands.  If we have the same number of
4615     operands, but have made more CONSTs than we had, this is also
4616     an improvement, so accept it.  */
4617
4618  for (i = 0, j = 0; j < n_ops; j++)
4619    if (ops[j] != 0)
4620      {
4621	ops[i] = ops[j], negs[i++] = negs[j];
4622	if (GET_CODE (ops[j]) == CONST)
4623	  n_consts++;
4624      }
4625
4626  if (i + n_consts > input_ops
4627      || (i + n_consts == input_ops && n_consts <= input_consts))
4628    return 0;
4629
4630  n_ops = i;
4631
4632  /* If we have a CONST_INT, put it last.  */
4633  for (i = 0; i < n_ops - 1; i++)
4634    if (GET_CODE (ops[i]) == CONST_INT)
4635      {
4636	tem = ops[n_ops - 1], ops[n_ops - 1] = ops[i] , ops[i] = tem;
4637	j = negs[n_ops - 1], negs[n_ops - 1] = negs[i], negs[i] = j;
4638      }
4639
4640  /* Put a non-negated operand first.  If there aren't any, make all
4641     operands positive and negate the whole thing later.  */
4642  for (i = 0; i < n_ops && negs[i]; i++)
4643    ;
4644
4645  if (i == n_ops)
4646    {
4647      for (i = 0; i < n_ops; i++)
4648	negs[i] = 0;
4649      negate = 1;
4650    }
4651  else if (i != 0)
4652    {
4653      tem = ops[0], ops[0] = ops[i], ops[i] = tem;
4654      j = negs[0], negs[0] = negs[i], negs[i] = j;
4655    }
4656
4657  /* Now make the result by performing the requested operations.  */
4658  result = ops[0];
4659  for (i = 1; i < n_ops; i++)
4660    result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
4661
4662  return negate ? gen_rtx_NEG (mode, result) : result;
4663}
4664
4665/* Make a binary operation by properly ordering the operands and
4666   seeing if the expression folds.  */
4667
4668static rtx
4669cse_gen_binary (code, mode, op0, op1)
4670     enum rtx_code code;
4671     enum machine_mode mode;
4672     rtx op0, op1;
4673{
4674  rtx tem;
4675
4676  /* Put complex operands first and constants second if commutative.  */
4677  if (GET_RTX_CLASS (code) == 'c'
4678      && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
4679	  || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
4680	      && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
4681	  || (GET_CODE (op0) == SUBREG
4682	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
4683	      && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
4684    tem = op0, op0 = op1, op1 = tem;
4685
4686  /* If this simplifies, do it.  */
4687  tem = simplify_binary_operation (code, mode, op0, op1);
4688
4689  if (tem)
4690    return tem;
4691
4692  /* Handle addition and subtraction of CONST_INT specially.  Otherwise,
4693     just form the operation.  */
4694
4695  if (code == PLUS && GET_CODE (op1) == CONST_INT
4696      && GET_MODE (op0) != VOIDmode)
4697    return plus_constant (op0, INTVAL (op1));
4698  else if (code == MINUS && GET_CODE (op1) == CONST_INT
4699	   && GET_MODE (op0) != VOIDmode)
4700    return plus_constant (op0, - INTVAL (op1));
4701  else
4702    return gen_rtx_fmt_ee (code, mode, op0, op1);
4703}
4704
4705struct cfc_args
4706{
4707  /* Input */
4708  rtx op0, op1;
4709  /* Output */
4710  int equal, op0lt, op1lt;
4711};
4712
4713static void
4714check_fold_consts (data)
4715  PTR data;
4716{
4717  struct cfc_args * args = (struct cfc_args *) data;
4718  REAL_VALUE_TYPE d0, d1;
4719
4720  REAL_VALUE_FROM_CONST_DOUBLE (d0, args->op0);
4721  REAL_VALUE_FROM_CONST_DOUBLE (d1, args->op1);
4722  args->equal = REAL_VALUES_EQUAL (d0, d1);
4723  args->op0lt = REAL_VALUES_LESS (d0, d1);
4724  args->op1lt = REAL_VALUES_LESS (d1, d0);
4725}
4726
4727/* Like simplify_binary_operation except used for relational operators.
4728   MODE is the mode of the operands, not that of the result.  If MODE
4729   is VOIDmode, both operands must also be VOIDmode and we compare the
4730   operands in "infinite precision".
4731
4732   If no simplification is possible, this function returns zero.  Otherwise,
4733   it returns either const_true_rtx or const0_rtx.  */
4734
4735rtx
4736simplify_relational_operation (code, mode, op0, op1)
4737     enum rtx_code code;
4738     enum machine_mode mode;
4739     rtx op0, op1;
4740{
4741  int equal, op0lt, op0ltu, op1lt, op1ltu;
4742  rtx tem;
4743
4744  /* If op0 is a compare, extract the comparison arguments from it.  */
4745  if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
4746    op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4747
4748  /* We can't simplify MODE_CC values since we don't know what the
4749     actual comparison is.  */
4750  if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC
4751#ifdef HAVE_cc0
4752      || op0 == cc0_rtx
4753#endif
4754      )
4755    return 0;
4756
4757  /* For integer comparisons of A and B maybe we can simplify A - B and can
4758     then simplify a comparison of that with zero.  If A and B are both either
4759     a register or a CONST_INT, this can't help; testing for these cases will
4760     prevent infinite recursion here and speed things up.
4761
4762     If CODE is an unsigned comparison, then we can never do this optimization,
4763     because it gives an incorrect result if the subtraction wraps around zero.
4764     ANSI C defines unsigned operations such that they never overflow, and
4765     thus such cases can not be ignored.  */
4766
4767  if (INTEGRAL_MODE_P (mode) && op1 != const0_rtx
4768      && ! ((GET_CODE (op0) == REG || GET_CODE (op0) == CONST_INT)
4769	    && (GET_CODE (op1) == REG || GET_CODE (op1) == CONST_INT))
4770      && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
4771      && code != GTU && code != GEU && code != LTU && code != LEU)
4772    return simplify_relational_operation (signed_condition (code),
4773					  mode, tem, const0_rtx);
4774
4775  /* For non-IEEE floating-point, if the two operands are equal, we know the
4776     result.  */
4777  if (rtx_equal_p (op0, op1)
4778      && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4779	  || ! FLOAT_MODE_P (GET_MODE (op0)) || flag_fast_math))
4780    equal = 1, op0lt = 0, op0ltu = 0, op1lt = 0, op1ltu = 0;
4781
4782  /* If the operands are floating-point constants, see if we can fold
4783     the result.  */
4784#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4785  else if (GET_CODE (op0) == CONST_DOUBLE && GET_CODE (op1) == CONST_DOUBLE
4786	   && GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
4787    {
4788      struct cfc_args args;
4789
4790      /* Setup input for check_fold_consts() */
4791      args.op0 = op0;
4792      args.op1 = op1;
4793
4794      if (do_float_handler(check_fold_consts, (PTR) &args) == 0)
4795	/* We got an exception from check_fold_consts() */
4796	return 0;
4797
4798      /* Receive output from check_fold_consts() */
4799      equal = args.equal;
4800      op0lt = op0ltu = args.op0lt;
4801      op1lt = op1ltu = args.op1lt;
4802    }
4803#endif  /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4804
4805  /* Otherwise, see if the operands are both integers.  */
4806  else if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
4807	   && (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
4808	   && (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
4809    {
4810      int width = GET_MODE_BITSIZE (mode);
4811      HOST_WIDE_INT l0s, h0s, l1s, h1s;
4812      unsigned HOST_WIDE_INT l0u, h0u, l1u, h1u;
4813
4814      /* Get the two words comprising each integer constant.  */
4815      if (GET_CODE (op0) == CONST_DOUBLE)
4816	{
4817	  l0u = l0s = CONST_DOUBLE_LOW (op0);
4818	  h0u = h0s = CONST_DOUBLE_HIGH (op0);
4819	}
4820      else
4821	{
4822	  l0u = l0s = INTVAL (op0);
4823	  h0u = h0s = l0s < 0 ? -1 : 0;
4824	}
4825
4826      if (GET_CODE (op1) == CONST_DOUBLE)
4827	{
4828	  l1u = l1s = CONST_DOUBLE_LOW (op1);
4829	  h1u = h1s = CONST_DOUBLE_HIGH (op1);
4830	}
4831      else
4832	{
4833	  l1u = l1s = INTVAL (op1);
4834	  h1u = h1s = l1s < 0 ? -1 : 0;
4835	}
4836
4837      /* If WIDTH is nonzero and smaller than HOST_BITS_PER_WIDE_INT,
4838	 we have to sign or zero-extend the values.  */
4839      if (width != 0 && width <= HOST_BITS_PER_WIDE_INT)
4840	h0u = h1u = 0, h0s = l0s < 0 ? -1 : 0, h1s = l1s < 0 ? -1 : 0;
4841
4842      if (width != 0 && width < HOST_BITS_PER_WIDE_INT)
4843	{
4844	  l0u &= ((HOST_WIDE_INT) 1 << width) - 1;
4845	  l1u &= ((HOST_WIDE_INT) 1 << width) - 1;
4846
4847	  if (l0s & ((HOST_WIDE_INT) 1 << (width - 1)))
4848	    l0s |= ((HOST_WIDE_INT) (-1) << width);
4849
4850	  if (l1s & ((HOST_WIDE_INT) 1 << (width - 1)))
4851	    l1s |= ((HOST_WIDE_INT) (-1) << width);
4852	}
4853
4854      equal = (h0u == h1u && l0u == l1u);
4855      op0lt = (h0s < h1s || (h0s == h1s && l0s < l1s));
4856      op1lt = (h1s < h0s || (h1s == h0s && l1s < l0s));
4857      op0ltu = (h0u < h1u || (h0u == h1u && l0u < l1u));
4858      op1ltu = (h1u < h0u || (h1u == h0u && l1u < l0u));
4859    }
4860
4861  /* Otherwise, there are some code-specific tests we can make.  */
4862  else
4863    {
4864      switch (code)
4865	{
4866	case EQ:
4867	  /* References to the frame plus a constant or labels cannot
4868	     be zero, but a SYMBOL_REF can due to #pragma weak.  */
4869	  if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4870	       || GET_CODE (op0) == LABEL_REF)
4871#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4872	      /* On some machines, the ap reg can be 0 sometimes.  */
4873	      && op0 != arg_pointer_rtx
4874#endif
4875		)
4876	    return const0_rtx;
4877	  break;
4878
4879	case NE:
4880	  if (((NONZERO_BASE_PLUS_P (op0) && op1 == const0_rtx)
4881	       || GET_CODE (op0) == LABEL_REF)
4882#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4883	      && op0 != arg_pointer_rtx
4884#endif
4885	      )
4886	    return const_true_rtx;
4887	  break;
4888
4889	case GEU:
4890	  /* Unsigned values are never negative.  */
4891	  if (op1 == const0_rtx)
4892	    return const_true_rtx;
4893	  break;
4894
4895	case LTU:
4896	  if (op1 == const0_rtx)
4897	    return const0_rtx;
4898	  break;
4899
4900	case LEU:
4901	  /* Unsigned values are never greater than the largest
4902	     unsigned value.  */
4903	  if (GET_CODE (op1) == CONST_INT
4904	      && INTVAL (op1) == GET_MODE_MASK (mode)
4905	    && INTEGRAL_MODE_P (mode))
4906	  return const_true_rtx;
4907	  break;
4908
4909	case GTU:
4910	  if (GET_CODE (op1) == CONST_INT
4911	      && INTVAL (op1) == GET_MODE_MASK (mode)
4912	      && INTEGRAL_MODE_P (mode))
4913	    return const0_rtx;
4914	  break;
4915
4916	default:
4917	  break;
4918	}
4919
4920      return 0;
4921    }
4922
4923  /* If we reach here, EQUAL, OP0LT, OP0LTU, OP1LT, and OP1LTU are set
4924     as appropriate.  */
4925  switch (code)
4926    {
4927    case EQ:
4928      return equal ? const_true_rtx : const0_rtx;
4929    case NE:
4930      return ! equal ? const_true_rtx : const0_rtx;
4931    case LT:
4932      return op0lt ? const_true_rtx : const0_rtx;
4933    case GT:
4934      return op1lt ? const_true_rtx : const0_rtx;
4935    case LTU:
4936      return op0ltu ? const_true_rtx : const0_rtx;
4937    case GTU:
4938      return op1ltu ? const_true_rtx : const0_rtx;
4939    case LE:
4940      return equal || op0lt ? const_true_rtx : const0_rtx;
4941    case GE:
4942      return equal || op1lt ? const_true_rtx : const0_rtx;
4943    case LEU:
4944      return equal || op0ltu ? const_true_rtx : const0_rtx;
4945    case GEU:
4946      return equal || op1ltu ? const_true_rtx : const0_rtx;
4947    default:
4948      abort ();
4949    }
4950}
4951
4952/* Simplify CODE, an operation with result mode MODE and three operands,
4953   OP0, OP1, and OP2.  OP0_MODE was the mode of OP0 before it became
4954   a constant.  Return 0 if no simplifications is possible.  */
4955
4956rtx
4957simplify_ternary_operation (code, mode, op0_mode, op0, op1, op2)
4958     enum rtx_code code;
4959     enum machine_mode mode, op0_mode;
4960     rtx op0, op1, op2;
4961{
4962  int width = GET_MODE_BITSIZE (mode);
4963
4964  /* VOIDmode means "infinite" precision.  */
4965  if (width == 0)
4966    width = HOST_BITS_PER_WIDE_INT;
4967
4968  switch (code)
4969    {
4970    case SIGN_EXTRACT:
4971    case ZERO_EXTRACT:
4972      if (GET_CODE (op0) == CONST_INT
4973	  && GET_CODE (op1) == CONST_INT
4974	  && GET_CODE (op2) == CONST_INT
4975	  && INTVAL (op1) + INTVAL (op2) <= GET_MODE_BITSIZE (op0_mode)
4976	  && width <= HOST_BITS_PER_WIDE_INT)
4977	{
4978	  /* Extracting a bit-field from a constant */
4979	  HOST_WIDE_INT val = INTVAL (op0);
4980
4981	  if (BITS_BIG_ENDIAN)
4982	    val >>= (GET_MODE_BITSIZE (op0_mode)
4983		     - INTVAL (op2) - INTVAL (op1));
4984	  else
4985	    val >>= INTVAL (op2);
4986
4987	  if (HOST_BITS_PER_WIDE_INT != INTVAL (op1))
4988	    {
4989	      /* First zero-extend.  */
4990	      val &= ((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1;
4991	      /* If desired, propagate sign bit.  */
4992	      if (code == SIGN_EXTRACT
4993		  && (val & ((HOST_WIDE_INT) 1 << (INTVAL (op1) - 1))))
4994		val |= ~ (((HOST_WIDE_INT) 1 << INTVAL (op1)) - 1);
4995	    }
4996
4997	  /* Clear the bits that don't belong in our mode,
4998	     unless they and our sign bit are all one.
4999	     So we get either a reasonable negative value or a reasonable
5000	     unsigned value for this mode.  */
5001	  if (width < HOST_BITS_PER_WIDE_INT
5002	      && ((val & ((HOST_WIDE_INT) (-1) << (width - 1)))
5003		  != ((HOST_WIDE_INT) (-1) << (width - 1))))
5004	    val &= ((HOST_WIDE_INT) 1 << width) - 1;
5005
5006	  return GEN_INT (val);
5007	}
5008      break;
5009
5010    case IF_THEN_ELSE:
5011      if (GET_CODE (op0) == CONST_INT)
5012	return op0 != const0_rtx ? op1 : op2;
5013
5014      /* Convert a == b ? b : a to "a".  */
5015      if (GET_CODE (op0) == NE && ! side_effects_p (op0)
5016	  && rtx_equal_p (XEXP (op0, 0), op1)
5017	  && rtx_equal_p (XEXP (op0, 1), op2))
5018	return op1;
5019      else if (GET_CODE (op0) == EQ && ! side_effects_p (op0)
5020	  && rtx_equal_p (XEXP (op0, 1), op1)
5021	  && rtx_equal_p (XEXP (op0, 0), op2))
5022	return op2;
5023      else if (GET_RTX_CLASS (GET_CODE (op0)) == '<' && ! side_effects_p (op0))
5024	{
5025	  rtx temp;
5026	  temp = simplify_relational_operation (GET_CODE (op0), op0_mode,
5027						XEXP (op0, 0), XEXP (op0, 1));
5028	  /* See if any simplifications were possible.  */
5029	  if (temp == const0_rtx)
5030	    return op2;
5031	  else if (temp == const1_rtx)
5032	    return op1;
5033	}
5034      break;
5035
5036    default:
5037      abort ();
5038    }
5039
5040  return 0;
5041}
5042
5043/* If X is a nontrivial arithmetic operation on an argument
5044   for which a constant value can be determined, return
5045   the result of operating on that value, as a constant.
5046   Otherwise, return X, possibly with one or more operands
5047   modified by recursive calls to this function.
5048
5049   If X is a register whose contents are known, we do NOT
5050   return those contents here.  equiv_constant is called to
5051   perform that task.
5052
5053   INSN is the insn that we may be modifying.  If it is 0, make a copy
5054   of X before modifying it.  */
5055
5056static rtx
5057fold_rtx (x, insn)
5058     rtx x;
5059     rtx insn;
5060{
5061  register enum rtx_code code;
5062  register enum machine_mode mode;
5063  register char *fmt;
5064  register int i;
5065  rtx new = 0;
5066  int copied = 0;
5067  int must_swap = 0;
5068
5069  /* Folded equivalents of first two operands of X.  */
5070  rtx folded_arg0;
5071  rtx folded_arg1;
5072
5073  /* Constant equivalents of first three operands of X;
5074     0 when no such equivalent is known.  */
5075  rtx const_arg0;
5076  rtx const_arg1;
5077  rtx const_arg2;
5078
5079  /* The mode of the first operand of X.  We need this for sign and zero
5080     extends.  */
5081  enum machine_mode mode_arg0;
5082
5083  if (x == 0)
5084    return x;
5085
5086  mode = GET_MODE (x);
5087  code = GET_CODE (x);
5088  switch (code)
5089    {
5090    case CONST:
5091    case CONST_INT:
5092    case CONST_DOUBLE:
5093    case SYMBOL_REF:
5094    case LABEL_REF:
5095    case REG:
5096      /* No use simplifying an EXPR_LIST
5097	 since they are used only for lists of args
5098	 in a function call's REG_EQUAL note.  */
5099    case EXPR_LIST:
5100      /* Changing anything inside an ADDRESSOF is incorrect; we don't
5101	 want to (e.g.,) make (addressof (const_int 0)) just because
5102	 the location is known to be zero.  */
5103    case ADDRESSOF:
5104      return x;
5105
5106#ifdef HAVE_cc0
5107    case CC0:
5108      return prev_insn_cc0;
5109#endif
5110
5111    case PC:
5112      /* If the next insn is a CODE_LABEL followed by a jump table,
5113	 PC's value is a LABEL_REF pointing to that label.  That
5114	 lets us fold switch statements on the Vax.  */
5115      if (insn && GET_CODE (insn) == JUMP_INSN)
5116	{
5117	  rtx next = next_nonnote_insn (insn);
5118
5119	  if (next && GET_CODE (next) == CODE_LABEL
5120	      && NEXT_INSN (next) != 0
5121	      && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
5122	      && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
5123		  || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
5124	    return gen_rtx_LABEL_REF (Pmode, next);
5125	}
5126      break;
5127
5128    case SUBREG:
5129      /* See if we previously assigned a constant value to this SUBREG.  */
5130      if ((new = lookup_as_function (x, CONST_INT)) != 0
5131	  || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
5132	return new;
5133
5134      /* If this is a paradoxical SUBREG, we have no idea what value the
5135	 extra bits would have.  However, if the operand is equivalent
5136	 to a SUBREG whose operand is the same as our mode, and all the
5137	 modes are within a word, we can just use the inner operand
5138	 because these SUBREGs just say how to treat the register.
5139
5140	 Similarly if we find an integer constant.  */
5141
5142      if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
5143	{
5144	  enum machine_mode imode = GET_MODE (SUBREG_REG (x));
5145	  struct table_elt *elt;
5146
5147	  if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
5148	      && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
5149	      && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
5150				imode)) != 0)
5151	    for (elt = elt->first_same_value;
5152		 elt; elt = elt->next_same_value)
5153	      {
5154		if (CONSTANT_P (elt->exp)
5155		    && GET_MODE (elt->exp) == VOIDmode)
5156		  return elt->exp;
5157
5158		if (GET_CODE (elt->exp) == SUBREG
5159		    && GET_MODE (SUBREG_REG (elt->exp)) == mode
5160		    && exp_equiv_p (elt->exp, elt->exp, 1, 0))
5161		  return copy_rtx (SUBREG_REG (elt->exp));
5162	    }
5163
5164	  return x;
5165	}
5166
5167      /* Fold SUBREG_REG.  If it changed, see if we can simplify the SUBREG.
5168	 We might be able to if the SUBREG is extracting a single word in an
5169	 integral mode or extracting the low part.  */
5170
5171      folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
5172      const_arg0 = equiv_constant (folded_arg0);
5173      if (const_arg0)
5174	folded_arg0 = const_arg0;
5175
5176      if (folded_arg0 != SUBREG_REG (x))
5177	{
5178	  new = 0;
5179
5180	  if (GET_MODE_CLASS (mode) == MODE_INT
5181	      && GET_MODE_SIZE (mode) == UNITS_PER_WORD
5182	      && GET_MODE (SUBREG_REG (x)) != VOIDmode)
5183	    new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
5184				   GET_MODE (SUBREG_REG (x)));
5185	  if (new == 0 && subreg_lowpart_p (x))
5186	    new = gen_lowpart_if_possible (mode, folded_arg0);
5187	  if (new)
5188	    return new;
5189	}
5190
5191      /* If this is a narrowing SUBREG and our operand is a REG, see if
5192	 we can find an equivalence for REG that is an arithmetic operation
5193	 in a wider mode where both operands are paradoxical SUBREGs
5194	 from objects of our result mode.  In that case, we couldn't report
5195	 an equivalent value for that operation, since we don't know what the
5196	 extra bits will be.  But we can find an equivalence for this SUBREG
5197	 by folding that operation is the narrow mode.  This allows us to
5198	 fold arithmetic in narrow modes when the machine only supports
5199	 word-sized arithmetic.
5200
5201	 Also look for a case where we have a SUBREG whose operand is the
5202	 same as our result.  If both modes are smaller than a word, we
5203	 are simply interpreting a register in different modes and we
5204	 can use the inner value.  */
5205
5206      if (GET_CODE (folded_arg0) == REG
5207	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
5208	  && subreg_lowpart_p (x))
5209	{
5210	  struct table_elt *elt;
5211
5212	  /* We can use HASH here since we know that canon_hash won't be
5213	     called.  */
5214	  elt = lookup (folded_arg0,
5215			HASH (folded_arg0, GET_MODE (folded_arg0)),
5216			GET_MODE (folded_arg0));
5217
5218	  if (elt)
5219	    elt = elt->first_same_value;
5220
5221	  for (; elt; elt = elt->next_same_value)
5222	    {
5223	      enum rtx_code eltcode = GET_CODE (elt->exp);
5224
5225	      /* Just check for unary and binary operations.  */
5226	      if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
5227		  && GET_CODE (elt->exp) != SIGN_EXTEND
5228		  && GET_CODE (elt->exp) != ZERO_EXTEND
5229		  && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5230		  && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
5231		{
5232		  rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
5233
5234		  if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
5235		    op0 = fold_rtx (op0, NULL_RTX);
5236
5237		  op0 = equiv_constant (op0);
5238		  if (op0)
5239		    new = simplify_unary_operation (GET_CODE (elt->exp), mode,
5240						    op0, mode);
5241		}
5242	      else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
5243			|| GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
5244		       && eltcode != DIV && eltcode != MOD
5245		       && eltcode != UDIV && eltcode != UMOD
5246		       && eltcode != ASHIFTRT && eltcode != LSHIFTRT
5247		       && eltcode != ROTATE && eltcode != ROTATERT
5248		       && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
5249			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
5250				== mode))
5251			   || CONSTANT_P (XEXP (elt->exp, 0)))
5252		       && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
5253			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
5254				== mode))
5255			   || CONSTANT_P (XEXP (elt->exp, 1))))
5256		{
5257		  rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
5258		  rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
5259
5260		  if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
5261		    op0 = fold_rtx (op0, NULL_RTX);
5262
5263		  if (op0)
5264		    op0 = equiv_constant (op0);
5265
5266		  if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
5267		    op1 = fold_rtx (op1, NULL_RTX);
5268
5269		  if (op1)
5270		    op1 = equiv_constant (op1);
5271
5272		  /* If we are looking for the low SImode part of
5273		     (ashift:DI c (const_int 32)), it doesn't work
5274		     to compute that in SImode, because a 32-bit shift
5275		     in SImode is unpredictable.  We know the value is 0.  */
5276		  if (op0 && op1
5277		      && GET_CODE (elt->exp) == ASHIFT
5278		      && GET_CODE (op1) == CONST_INT
5279		      && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
5280		    {
5281		      if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
5282
5283			/* If the count fits in the inner mode's width,
5284			   but exceeds the outer mode's width,
5285			   the value will get truncated to 0
5286			   by the subreg.  */
5287			new = const0_rtx;
5288		      else
5289			/* If the count exceeds even the inner mode's width,
5290			   don't fold this expression.  */
5291			new = 0;
5292		    }
5293		  else if (op0 && op1)
5294		    new = simplify_binary_operation (GET_CODE (elt->exp), mode,
5295						     op0, op1);
5296		}
5297
5298	      else if (GET_CODE (elt->exp) == SUBREG
5299		       && GET_MODE (SUBREG_REG (elt->exp)) == mode
5300		       && (GET_MODE_SIZE (GET_MODE (folded_arg0))
5301			   <= UNITS_PER_WORD)
5302		       && exp_equiv_p (elt->exp, elt->exp, 1, 0))
5303		new = copy_rtx (SUBREG_REG (elt->exp));
5304
5305	      if (new)
5306		return new;
5307	    }
5308	}
5309
5310      return x;
5311
5312    case NOT:
5313    case NEG:
5314      /* If we have (NOT Y), see if Y is known to be (NOT Z).
5315	 If so, (NOT Y) simplifies to Z.  Similarly for NEG.  */
5316      new = lookup_as_function (XEXP (x, 0), code);
5317      if (new)
5318	return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
5319      break;
5320
5321    case MEM:
5322      /* If we are not actually processing an insn, don't try to find the
5323	 best address.  Not only don't we care, but we could modify the
5324	 MEM in an invalid way since we have no insn to validate against.  */
5325      if (insn != 0)
5326	find_best_addr (insn, &XEXP (x, 0));
5327
5328      {
5329	/* Even if we don't fold in the insn itself,
5330	   we can safely do so here, in hopes of getting a constant.  */
5331	rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
5332	rtx base = 0;
5333	HOST_WIDE_INT offset = 0;
5334
5335	if (GET_CODE (addr) == REG
5336	    && REGNO_QTY_VALID_P (REGNO (addr))
5337	    && GET_MODE (addr) == qty_mode[REG_QTY (REGNO (addr))]
5338	    && qty_const[REG_QTY (REGNO (addr))] != 0)
5339	  addr = qty_const[REG_QTY (REGNO (addr))];
5340
5341	/* If address is constant, split it into a base and integer offset.  */
5342	if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
5343	  base = addr;
5344	else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
5345		 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
5346	  {
5347	    base = XEXP (XEXP (addr, 0), 0);
5348	    offset = INTVAL (XEXP (XEXP (addr, 0), 1));
5349	  }
5350	else if (GET_CODE (addr) == LO_SUM
5351		 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
5352	  base = XEXP (addr, 1);
5353	else if (GET_CODE (addr) == ADDRESSOF)
5354	  return change_address (x, VOIDmode, addr);
5355
5356	/* If this is a constant pool reference, we can fold it into its
5357	   constant to allow better value tracking.  */
5358	if (base && GET_CODE (base) == SYMBOL_REF
5359	    && CONSTANT_POOL_ADDRESS_P (base))
5360	  {
5361	    rtx constant = get_pool_constant (base);
5362	    enum machine_mode const_mode = get_pool_mode (base);
5363	    rtx new;
5364
5365	    if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
5366	      constant_pool_entries_cost = COST (constant);
5367
5368	    /* If we are loading the full constant, we have an equivalence.  */
5369	    if (offset == 0 && mode == const_mode)
5370	      return constant;
5371
5372	    /* If this actually isn't a constant (weird!), we can't do
5373	       anything.  Otherwise, handle the two most common cases:
5374	       extracting a word from a multi-word constant, and extracting
5375	       the low-order bits.  Other cases don't seem common enough to
5376	       worry about.  */
5377	    if (! CONSTANT_P (constant))
5378	      return x;
5379
5380	    if (GET_MODE_CLASS (mode) == MODE_INT
5381		&& GET_MODE_SIZE (mode) == UNITS_PER_WORD
5382		&& offset % UNITS_PER_WORD == 0
5383		&& (new = operand_subword (constant,
5384					   offset / UNITS_PER_WORD,
5385					   0, const_mode)) != 0)
5386	      return new;
5387
5388	    if (((BYTES_BIG_ENDIAN
5389		  && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
5390		 || (! BYTES_BIG_ENDIAN && offset == 0))
5391		&& (new = gen_lowpart_if_possible (mode, constant)) != 0)
5392	      return new;
5393	  }
5394
5395	/* If this is a reference to a label at a known position in a jump
5396	   table, we also know its value.  */
5397	if (base && GET_CODE (base) == LABEL_REF)
5398	  {
5399	    rtx label = XEXP (base, 0);
5400	    rtx table_insn = NEXT_INSN (label);
5401
5402	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5403		&& GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
5404	      {
5405		rtx table = PATTERN (table_insn);
5406
5407		if (offset >= 0
5408		    && (offset / GET_MODE_SIZE (GET_MODE (table))
5409			< XVECLEN (table, 0)))
5410		  return XVECEXP (table, 0,
5411				  offset / GET_MODE_SIZE (GET_MODE (table)));
5412	      }
5413	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
5414		&& GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
5415	      {
5416		rtx table = PATTERN (table_insn);
5417
5418		if (offset >= 0
5419		    && (offset / GET_MODE_SIZE (GET_MODE (table))
5420			< XVECLEN (table, 1)))
5421		  {
5422		    offset /= GET_MODE_SIZE (GET_MODE (table));
5423		    new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
5424					 XEXP (table, 0));
5425
5426		    if (GET_MODE (table) != Pmode)
5427		      new = gen_rtx_TRUNCATE (GET_MODE (table), new);
5428
5429		    /* Indicate this is a constant.  This isn't a
5430		       valid form of CONST, but it will only be used
5431		       to fold the next insns and then discarded, so
5432		       it should be safe.
5433
5434		       Note this expression must be explicitly discarded,
5435		       by cse_insn, else it may end up in a REG_EQUAL note
5436		       and "escape" to cause problems elsewhere.  */
5437		    return gen_rtx_CONST (GET_MODE (new), new);
5438		  }
5439	      }
5440	  }
5441
5442	return x;
5443      }
5444
5445    case ASM_OPERANDS:
5446      for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
5447	validate_change (insn, &XVECEXP (x, 3, i),
5448			 fold_rtx (XVECEXP (x, 3, i), insn), 0);
5449      break;
5450
5451    default:
5452      break;
5453    }
5454
5455  const_arg0 = 0;
5456  const_arg1 = 0;
5457  const_arg2 = 0;
5458  mode_arg0 = VOIDmode;
5459
5460  /* Try folding our operands.
5461     Then see which ones have constant values known.  */
5462
5463  fmt = GET_RTX_FORMAT (code);
5464  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5465    if (fmt[i] == 'e')
5466      {
5467	rtx arg = XEXP (x, i);
5468	rtx folded_arg = arg, const_arg = 0;
5469	enum machine_mode mode_arg = GET_MODE (arg);
5470	rtx cheap_arg, expensive_arg;
5471	rtx replacements[2];
5472	int j;
5473
5474	/* Most arguments are cheap, so handle them specially.  */
5475	switch (GET_CODE (arg))
5476	  {
5477	  case REG:
5478	    /* This is the same as calling equiv_constant; it is duplicated
5479	       here for speed.  */
5480	    if (REGNO_QTY_VALID_P (REGNO (arg))
5481		&& qty_const[REG_QTY (REGNO (arg))] != 0
5482		&& GET_CODE (qty_const[REG_QTY (REGNO (arg))]) != REG
5483		&& GET_CODE (qty_const[REG_QTY (REGNO (arg))]) != PLUS)
5484	      const_arg
5485		= gen_lowpart_if_possible (GET_MODE (arg),
5486					   qty_const[REG_QTY (REGNO (arg))]);
5487	    break;
5488
5489	  case CONST:
5490	  case CONST_INT:
5491	  case SYMBOL_REF:
5492	  case LABEL_REF:
5493	  case CONST_DOUBLE:
5494	    const_arg = arg;
5495	    break;
5496
5497#ifdef HAVE_cc0
5498	  case CC0:
5499	    folded_arg = prev_insn_cc0;
5500	    mode_arg = prev_insn_cc0_mode;
5501	    const_arg = equiv_constant (folded_arg);
5502	    break;
5503#endif
5504
5505	  default:
5506	    folded_arg = fold_rtx (arg, insn);
5507	    const_arg = equiv_constant (folded_arg);
5508	  }
5509
5510	/* For the first three operands, see if the operand
5511	   is constant or equivalent to a constant.  */
5512	switch (i)
5513	  {
5514	  case 0:
5515	    folded_arg0 = folded_arg;
5516	    const_arg0 = const_arg;
5517	    mode_arg0 = mode_arg;
5518	    break;
5519	  case 1:
5520	    folded_arg1 = folded_arg;
5521	    const_arg1 = const_arg;
5522	    break;
5523	  case 2:
5524	    const_arg2 = const_arg;
5525	    break;
5526	  }
5527
5528	/* Pick the least expensive of the folded argument and an
5529	   equivalent constant argument.  */
5530	if (const_arg == 0 || const_arg == folded_arg
5531	    || COST (const_arg) > COST (folded_arg))
5532	  cheap_arg = folded_arg, expensive_arg = const_arg;
5533	else
5534	  cheap_arg = const_arg, expensive_arg = folded_arg;
5535
5536	/* Try to replace the operand with the cheapest of the two
5537	   possibilities.  If it doesn't work and this is either of the first
5538	   two operands of a commutative operation, try swapping them.
5539	   If THAT fails, try the more expensive, provided it is cheaper
5540	   than what is already there.  */
5541
5542	if (cheap_arg == XEXP (x, i))
5543	  continue;
5544
5545	if (insn == 0 && ! copied)
5546	  {
5547	    x = copy_rtx (x);
5548	    copied = 1;
5549	  }
5550
5551	replacements[0] = cheap_arg, replacements[1] = expensive_arg;
5552	for (j = 0;
5553	     j < 2 && replacements[j]
5554	     && COST (replacements[j]) < COST (XEXP (x, i));
5555	     j++)
5556	  {
5557	    if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
5558	      break;
5559
5560	    if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
5561	      {
5562		validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
5563		validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
5564
5565		if (apply_change_group ())
5566		  {
5567		    /* Swap them back to be invalid so that this loop can
5568		       continue and flag them to be swapped back later.  */
5569		    rtx tem;
5570
5571		    tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
5572				       XEXP (x, 1) = tem;
5573		    must_swap = 1;
5574		    break;
5575		  }
5576	      }
5577	  }
5578      }
5579
5580    else
5581      {
5582	if (fmt[i] == 'E')
5583	  /* Don't try to fold inside of a vector of expressions.
5584	     Doing nothing is harmless.  */
5585	  {;}
5586      }
5587
5588  /* If a commutative operation, place a constant integer as the second
5589     operand unless the first operand is also a constant integer.  Otherwise,
5590     place any constant second unless the first operand is also a constant.  */
5591
5592  if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5593    {
5594      if (must_swap || (const_arg0
5595	  		&& (const_arg1 == 0
5596	      		    || (GET_CODE (const_arg0) == CONST_INT
5597			        && GET_CODE (const_arg1) != CONST_INT))))
5598	{
5599	  register rtx tem = XEXP (x, 0);
5600
5601	  if (insn == 0 && ! copied)
5602	    {
5603	      x = copy_rtx (x);
5604	      copied = 1;
5605	    }
5606
5607	  validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
5608	  validate_change (insn, &XEXP (x, 1), tem, 1);
5609	  if (apply_change_group ())
5610	    {
5611	      tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
5612	      tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
5613	    }
5614	}
5615    }
5616
5617  /* If X is an arithmetic operation, see if we can simplify it.  */
5618
5619  switch (GET_RTX_CLASS (code))
5620    {
5621    case '1':
5622      {
5623	int is_const = 0;
5624
5625	/* We can't simplify extension ops unless we know the
5626	   original mode.  */
5627	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
5628	    && mode_arg0 == VOIDmode)
5629	  break;
5630
5631	/* If we had a CONST, strip it off and put it back later if we
5632	   fold.  */
5633	if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
5634	  is_const = 1, const_arg0 = XEXP (const_arg0, 0);
5635
5636	new = simplify_unary_operation (code, mode,
5637					const_arg0 ? const_arg0 : folded_arg0,
5638					mode_arg0);
5639	if (new != 0 && is_const)
5640	  new = gen_rtx_CONST (mode, new);
5641      }
5642      break;
5643
5644    case '<':
5645      /* See what items are actually being compared and set FOLDED_ARG[01]
5646	 to those values and CODE to the actual comparison code.  If any are
5647	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
5648	 do anything if both operands are already known to be constant.  */
5649
5650      if (const_arg0 == 0 || const_arg1 == 0)
5651	{
5652	  struct table_elt *p0, *p1;
5653	  rtx true = const_true_rtx, false = const0_rtx;
5654	  enum machine_mode mode_arg1;
5655
5656#ifdef FLOAT_STORE_FLAG_VALUE
5657	  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5658	    {
5659	      true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5660						   mode);
5661	      false = CONST0_RTX (mode);
5662	    }
5663#endif
5664
5665	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
5666				       &mode_arg0, &mode_arg1);
5667	  const_arg0 = equiv_constant (folded_arg0);
5668	  const_arg1 = equiv_constant (folded_arg1);
5669
5670	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
5671	     what kinds of things are being compared, so we can't do
5672	     anything with this comparison.  */
5673
5674	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
5675	    break;
5676
5677	  /* If we do not now have two constants being compared, see
5678	     if we can nevertheless deduce some things about the
5679	     comparison.  */
5680	  if (const_arg0 == 0 || const_arg1 == 0)
5681	    {
5682	      /* Is FOLDED_ARG0 frame-pointer plus a constant?  Or
5683		 non-explicit constant?  These aren't zero, but we
5684		 don't know their sign.  */
5685	      if (const_arg1 == const0_rtx
5686		  && (NONZERO_BASE_PLUS_P (folded_arg0)
5687#if 0  /* Sad to say, on sysvr4, #pragma weak can make a symbol address
5688	  come out as 0.  */
5689		      || GET_CODE (folded_arg0) == SYMBOL_REF
5690#endif
5691		      || GET_CODE (folded_arg0) == LABEL_REF
5692		      || GET_CODE (folded_arg0) == CONST))
5693		{
5694		  if (code == EQ)
5695		    return false;
5696		  else if (code == NE)
5697		    return true;
5698		}
5699
5700	      /* See if the two operands are the same.  We don't do this
5701		 for IEEE floating-point since we can't assume x == x
5702		 since x might be a NaN.  */
5703
5704	      if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5705		   || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
5706		  && (folded_arg0 == folded_arg1
5707		      || (GET_CODE (folded_arg0) == REG
5708			  && GET_CODE (folded_arg1) == REG
5709			  && (REG_QTY (REGNO (folded_arg0))
5710			      == REG_QTY (REGNO (folded_arg1))))
5711		      || ((p0 = lookup (folded_arg0,
5712					(safe_hash (folded_arg0, mode_arg0)
5713					 % NBUCKETS), mode_arg0))
5714			  && (p1 = lookup (folded_arg1,
5715					   (safe_hash (folded_arg1, mode_arg0)
5716					    % NBUCKETS), mode_arg0))
5717			  && p0->first_same_value == p1->first_same_value)))
5718		return ((code == EQ || code == LE || code == GE
5719			 || code == LEU || code == GEU)
5720			? true : false);
5721
5722	      /* If FOLDED_ARG0 is a register, see if the comparison we are
5723		 doing now is either the same as we did before or the reverse
5724		 (we only check the reverse if not floating-point).  */
5725	      else if (GET_CODE (folded_arg0) == REG)
5726		{
5727		  int qty = REG_QTY (REGNO (folded_arg0));
5728
5729		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0))
5730		      && (comparison_dominates_p (qty_comparison_code[qty], code)
5731			  || (comparison_dominates_p (qty_comparison_code[qty],
5732						      reverse_condition (code))
5733			      && ! FLOAT_MODE_P (mode_arg0)))
5734		      && (rtx_equal_p (qty_comparison_const[qty], folded_arg1)
5735			  || (const_arg1
5736			      && rtx_equal_p (qty_comparison_const[qty],
5737					      const_arg1))
5738			  || (GET_CODE (folded_arg1) == REG
5739			      && (REG_QTY (REGNO (folded_arg1))
5740				  == qty_comparison_qty[qty]))))
5741		    return (comparison_dominates_p (qty_comparison_code[qty],
5742						    code)
5743			    ? true : false);
5744		}
5745	    }
5746	}
5747
5748      /* If we are comparing against zero, see if the first operand is
5749	 equivalent to an IOR with a constant.  If so, we may be able to
5750	 determine the result of this comparison.  */
5751
5752      if (const_arg1 == const0_rtx)
5753	{
5754	  rtx y = lookup_as_function (folded_arg0, IOR);
5755	  rtx inner_const;
5756
5757	  if (y != 0
5758	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
5759	      && GET_CODE (inner_const) == CONST_INT
5760	      && INTVAL (inner_const) != 0)
5761	    {
5762	      int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
5763	      int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
5764			      && (INTVAL (inner_const)
5765				  & ((HOST_WIDE_INT) 1 << sign_bitnum)));
5766	      rtx true = const_true_rtx, false = const0_rtx;
5767
5768#ifdef FLOAT_STORE_FLAG_VALUE
5769	      if (GET_MODE_CLASS (mode) == MODE_FLOAT)
5770		{
5771		  true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
5772						       mode);
5773		  false = CONST0_RTX (mode);
5774		}
5775#endif
5776
5777	      switch (code)
5778		{
5779		case EQ:
5780		  return false;
5781		case NE:
5782		  return true;
5783		case LT:  case LE:
5784		  if (has_sign)
5785		    return true;
5786		  break;
5787		case GT:  case GE:
5788		  if (has_sign)
5789		    return false;
5790		  break;
5791		default:
5792		  break;
5793		}
5794	    }
5795	}
5796
5797      new = simplify_relational_operation (code, mode_arg0,
5798					   const_arg0 ? const_arg0 : folded_arg0,
5799					   const_arg1 ? const_arg1 : folded_arg1);
5800#ifdef FLOAT_STORE_FLAG_VALUE
5801      if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
5802	new = ((new == const0_rtx) ? CONST0_RTX (mode)
5803	       : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
5804#endif
5805      break;
5806
5807    case '2':
5808    case 'c':
5809      switch (code)
5810	{
5811	case PLUS:
5812	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
5813	     with that LABEL_REF as its second operand.  If so, the result is
5814	     the first operand of that MINUS.  This handles switches with an
5815	     ADDR_DIFF_VEC table.  */
5816	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
5817	    {
5818	      rtx y
5819		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
5820		  : lookup_as_function (folded_arg0, MINUS);
5821
5822	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5823		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
5824		return XEXP (y, 0);
5825
5826	      /* Now try for a CONST of a MINUS like the above.  */
5827	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
5828			: lookup_as_function (folded_arg0, CONST))) != 0
5829		  && GET_CODE (XEXP (y, 0)) == MINUS
5830		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5831		  && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
5832		return XEXP (XEXP (y, 0), 0);
5833	    }
5834
5835	  /* Likewise if the operands are in the other order.  */
5836	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
5837	    {
5838	      rtx y
5839		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
5840		  : lookup_as_function (folded_arg1, MINUS);
5841
5842	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
5843		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
5844		return XEXP (y, 0);
5845
5846	      /* Now try for a CONST of a MINUS like the above.  */
5847	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
5848			: lookup_as_function (folded_arg1, CONST))) != 0
5849		  && GET_CODE (XEXP (y, 0)) == MINUS
5850		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
5851		  && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
5852		return XEXP (XEXP (y, 0), 0);
5853	    }
5854
5855	  /* If second operand is a register equivalent to a negative
5856	     CONST_INT, see if we can find a register equivalent to the
5857	     positive constant.  Make a MINUS if so.  Don't do this for
5858	     a non-negative constant since we might then alternate between
5859	     chosing positive and negative constants.  Having the positive
5860	     constant previously-used is the more common case.  Be sure
5861	     the resulting constant is non-negative; if const_arg1 were
5862	     the smallest negative number this would overflow: depending
5863	     on the mode, this would either just be the same value (and
5864	     hence not save anything) or be incorrect.  */
5865	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
5866	      && INTVAL (const_arg1) < 0
5867	      /* This used to test
5868
5869	         - INTVAL (const_arg1) >= 0
5870
5871		 But The Sun V5.0 compilers mis-compiled that test.  So
5872		 instead we test for the problematic value in a more direct
5873		 manner and hope the Sun compilers get it correct.  */
5874	      && INTVAL (const_arg1) !=
5875	        ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
5876	      && GET_CODE (folded_arg1) == REG)
5877	    {
5878	      rtx new_const = GEN_INT (- INTVAL (const_arg1));
5879	      struct table_elt *p
5880		= lookup (new_const, safe_hash (new_const, mode) % NBUCKETS,
5881			  mode);
5882
5883	      if (p)
5884		for (p = p->first_same_value; p; p = p->next_same_value)
5885		  if (GET_CODE (p->exp) == REG)
5886		    return cse_gen_binary (MINUS, mode, folded_arg0,
5887					   canon_reg (p->exp, NULL_RTX));
5888	    }
5889	  goto from_plus;
5890
5891	case MINUS:
5892	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
5893	     If so, produce (PLUS Z C2-C).  */
5894	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
5895	    {
5896	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
5897	      if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
5898		return fold_rtx (plus_constant (copy_rtx (y),
5899						-INTVAL (const_arg1)),
5900				 NULL_RTX);
5901	    }
5902
5903	  /* ... fall through ...  */
5904
5905	from_plus:
5906	case SMIN:    case SMAX:      case UMIN:    case UMAX:
5907	case IOR:     case AND:       case XOR:
5908	case MULT:    case DIV:       case UDIV:
5909	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
5910	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
5911	     is known to be of similar form, we may be able to replace the
5912	     operation with a combined operation.  This may eliminate the
5913	     intermediate operation if every use is simplified in this way.
5914	     Note that the similar optimization done by combine.c only works
5915	     if the intermediate operation's result has only one reference.  */
5916
5917	  if (GET_CODE (folded_arg0) == REG
5918	      && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
5919	    {
5920	      int is_shift
5921		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
5922	      rtx y = lookup_as_function (folded_arg0, code);
5923	      rtx inner_const;
5924	      enum rtx_code associate_code;
5925	      rtx new_const;
5926
5927	      if (y == 0
5928		  || 0 == (inner_const
5929			   = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
5930		  || GET_CODE (inner_const) != CONST_INT
5931		  /* If we have compiled a statement like
5932		     "if (x == (x & mask1))", and now are looking at
5933		     "x & mask2", we will have a case where the first operand
5934		     of Y is the same as our first operand.  Unless we detect
5935		     this case, an infinite loop will result.  */
5936		  || XEXP (y, 0) == folded_arg0)
5937		break;
5938
5939	      /* Don't associate these operations if they are a PLUS with the
5940		 same constant and it is a power of two.  These might be doable
5941		 with a pre- or post-increment.  Similarly for two subtracts of
5942		 identical powers of two with post decrement.  */
5943
5944	      if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
5945		  && ((HAVE_PRE_INCREMENT
5946			  && exact_log2 (INTVAL (const_arg1)) >= 0)
5947		      || (HAVE_POST_INCREMENT
5948			  && exact_log2 (INTVAL (const_arg1)) >= 0)
5949		      || (HAVE_PRE_DECREMENT
5950			  && exact_log2 (- INTVAL (const_arg1)) >= 0)
5951		      || (HAVE_POST_DECREMENT
5952			  && exact_log2 (- INTVAL (const_arg1)) >= 0)))
5953		break;
5954
5955	      /* Compute the code used to compose the constants.  For example,
5956		 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT.  */
5957
5958	      associate_code
5959		= (code == MULT || code == DIV || code == UDIV ? MULT
5960		   : is_shift || code == PLUS || code == MINUS ? PLUS : code);
5961
5962	      new_const = simplify_binary_operation (associate_code, mode,
5963						     const_arg1, inner_const);
5964
5965	      if (new_const == 0)
5966		break;
5967
5968	      /* If we are associating shift operations, don't let this
5969		 produce a shift of the size of the object or larger.
5970		 This could occur when we follow a sign-extend by a right
5971		 shift on a machine that does a sign-extend as a pair
5972		 of shifts.  */
5973
5974	      if (is_shift && GET_CODE (new_const) == CONST_INT
5975		  && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
5976		{
5977		  /* As an exception, we can turn an ASHIFTRT of this
5978		     form into a shift of the number of bits - 1.  */
5979		  if (code == ASHIFTRT)
5980		    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
5981		  else
5982		    break;
5983		}
5984
5985	      y = copy_rtx (XEXP (y, 0));
5986
5987	      /* If Y contains our first operand (the most common way this
5988		 can happen is if Y is a MEM), we would do into an infinite
5989		 loop if we tried to fold it.  So don't in that case.  */
5990
5991	      if (! reg_mentioned_p (folded_arg0, y))
5992		y = fold_rtx (y, insn);
5993
5994	      return cse_gen_binary (code, mode, y, new_const);
5995	    }
5996	  break;
5997
5998	default:
5999	  break;
6000	}
6001
6002      new = simplify_binary_operation (code, mode,
6003				       const_arg0 ? const_arg0 : folded_arg0,
6004				       const_arg1 ? const_arg1 : folded_arg1);
6005      break;
6006
6007    case 'o':
6008      /* (lo_sum (high X) X) is simply X.  */
6009      if (code == LO_SUM && const_arg0 != 0
6010	  && GET_CODE (const_arg0) == HIGH
6011	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
6012	return const_arg1;
6013      break;
6014
6015    case '3':
6016    case 'b':
6017      new = simplify_ternary_operation (code, mode, mode_arg0,
6018					const_arg0 ? const_arg0 : folded_arg0,
6019					const_arg1 ? const_arg1 : folded_arg1,
6020					const_arg2 ? const_arg2 : XEXP (x, 2));
6021      break;
6022
6023    case 'x':
6024      /* Always eliminate CONSTANT_P_RTX at this stage. */
6025      if (code == CONSTANT_P_RTX)
6026	return (const_arg0 ? const1_rtx : const0_rtx);
6027      break;
6028    }
6029
6030  return new ? new : x;
6031}
6032
6033/* Return a constant value currently equivalent to X.
6034   Return 0 if we don't know one.  */
6035
6036static rtx
6037equiv_constant (x)
6038     rtx x;
6039{
6040  if (GET_CODE (x) == REG
6041      && REGNO_QTY_VALID_P (REGNO (x))
6042      && qty_const[REG_QTY (REGNO (x))])
6043    x = gen_lowpart_if_possible (GET_MODE (x), qty_const[REG_QTY (REGNO (x))]);
6044
6045  if (x == 0 || CONSTANT_P (x))
6046    return x;
6047
6048  /* If X is a MEM, try to fold it outside the context of any insn to see if
6049     it might be equivalent to a constant.  That handles the case where it
6050     is a constant-pool reference.  Then try to look it up in the hash table
6051     in case it is something whose value we have seen before.  */
6052
6053  if (GET_CODE (x) == MEM)
6054    {
6055      struct table_elt *elt;
6056
6057      x = fold_rtx (x, NULL_RTX);
6058      if (CONSTANT_P (x))
6059	return x;
6060
6061      elt = lookup (x, safe_hash (x, GET_MODE (x)) % NBUCKETS, GET_MODE (x));
6062      if (elt == 0)
6063	return 0;
6064
6065      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
6066	if (elt->is_const && CONSTANT_P (elt->exp))
6067	  return elt->exp;
6068    }
6069
6070  return 0;
6071}
6072
6073/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
6074   number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
6075   least-significant part of X.
6076   MODE specifies how big a part of X to return.
6077
6078   If the requested operation cannot be done, 0 is returned.
6079
6080   This is similar to gen_lowpart in emit-rtl.c.  */
6081
6082rtx
6083gen_lowpart_if_possible (mode, x)
6084     enum machine_mode mode;
6085     register rtx x;
6086{
6087  rtx result = gen_lowpart_common (mode, x);
6088
6089  if (result)
6090    return result;
6091  else if (GET_CODE (x) == MEM)
6092    {
6093      /* This is the only other case we handle.  */
6094      register int offset = 0;
6095      rtx new;
6096
6097      if (WORDS_BIG_ENDIAN)
6098	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
6099		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
6100      if (BYTES_BIG_ENDIAN)
6101	/* Adjust the address so that the address-after-the-data is
6102	   unchanged.  */
6103	offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
6104		   - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
6105      new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
6106      if (! memory_address_p (mode, XEXP (new, 0)))
6107	return 0;
6108      RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
6109      MEM_COPY_ATTRIBUTES (new, x);
6110      return new;
6111    }
6112  else
6113    return 0;
6114}
6115
6116/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
6117   branch.  It will be zero if not.
6118
6119   In certain cases, this can cause us to add an equivalence.  For example,
6120   if we are following the taken case of
6121   	if (i == 2)
6122   we can add the fact that `i' and '2' are now equivalent.
6123
6124   In any case, we can record that this comparison was passed.  If the same
6125   comparison is seen later, we will know its value.  */
6126
6127static void
6128record_jump_equiv (insn, taken)
6129     rtx insn;
6130     int taken;
6131{
6132  int cond_known_true;
6133  rtx op0, op1;
6134  enum machine_mode mode, mode0, mode1;
6135  int reversed_nonequality = 0;
6136  enum rtx_code code;
6137
6138  /* Ensure this is the right kind of insn.  */
6139  if (! condjump_p (insn) || simplejump_p (insn))
6140    return;
6141
6142  /* See if this jump condition is known true or false.  */
6143  if (taken)
6144    cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
6145  else
6146    cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
6147
6148  /* Get the type of comparison being done and the operands being compared.
6149     If we had to reverse a non-equality condition, record that fact so we
6150     know that it isn't valid for floating-point.  */
6151  code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
6152  op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
6153  op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
6154
6155  code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
6156  if (! cond_known_true)
6157    {
6158      reversed_nonequality = (code != EQ && code != NE);
6159      code = reverse_condition (code);
6160    }
6161
6162  /* The mode is the mode of the non-constant.  */
6163  mode = mode0;
6164  if (mode1 != VOIDmode)
6165    mode = mode1;
6166
6167  record_jump_cond (code, mode, op0, op1, reversed_nonequality);
6168}
6169
6170/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
6171   REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
6172   Make any useful entries we can with that information.  Called from
6173   above function and called recursively.  */
6174
6175static void
6176record_jump_cond (code, mode, op0, op1, reversed_nonequality)
6177     enum rtx_code code;
6178     enum machine_mode mode;
6179     rtx op0, op1;
6180     int reversed_nonequality;
6181{
6182  unsigned op0_hash, op1_hash;
6183  int op0_in_memory, op0_in_struct, op1_in_memory, op1_in_struct;
6184  struct table_elt *op0_elt, *op1_elt;
6185
6186  /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
6187     we know that they are also equal in the smaller mode (this is also
6188     true for all smaller modes whether or not there is a SUBREG, but
6189     is not worth testing for with no SUBREG).  */
6190
6191  /* Note that GET_MODE (op0) may not equal MODE.  */
6192  if (code == EQ && GET_CODE (op0) == SUBREG
6193      && (GET_MODE_SIZE (GET_MODE (op0))
6194	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
6195    {
6196      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6197      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6198
6199      record_jump_cond (code, mode, SUBREG_REG (op0),
6200			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
6201			reversed_nonequality);
6202    }
6203
6204  if (code == EQ && GET_CODE (op1) == SUBREG
6205      && (GET_MODE_SIZE (GET_MODE (op1))
6206	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
6207    {
6208      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6209      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6210
6211      record_jump_cond (code, mode, SUBREG_REG (op1),
6212			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
6213			reversed_nonequality);
6214    }
6215
6216  /* Similarly, if this is an NE comparison, and either is a SUBREG
6217     making a smaller mode, we know the whole thing is also NE.  */
6218
6219  /* Note that GET_MODE (op0) may not equal MODE;
6220     if we test MODE instead, we can get an infinite recursion
6221     alternating between two modes each wider than MODE.  */
6222
6223  if (code == NE && GET_CODE (op0) == SUBREG
6224      && subreg_lowpart_p (op0)
6225      && (GET_MODE_SIZE (GET_MODE (op0))
6226	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
6227    {
6228      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
6229      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
6230
6231      record_jump_cond (code, mode, SUBREG_REG (op0),
6232			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
6233			reversed_nonequality);
6234    }
6235
6236  if (code == NE && GET_CODE (op1) == SUBREG
6237      && subreg_lowpart_p (op1)
6238      && (GET_MODE_SIZE (GET_MODE (op1))
6239	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
6240    {
6241      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
6242      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
6243
6244      record_jump_cond (code, mode, SUBREG_REG (op1),
6245			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
6246			reversed_nonequality);
6247    }
6248
6249  /* Hash both operands.  */
6250
6251  do_not_record = 0;
6252  hash_arg_in_memory = 0;
6253  hash_arg_in_struct = 0;
6254  op0_hash = HASH (op0, mode);
6255  op0_in_memory = hash_arg_in_memory;
6256  op0_in_struct = hash_arg_in_struct;
6257
6258  if (do_not_record)
6259    return;
6260
6261  do_not_record = 0;
6262  hash_arg_in_memory = 0;
6263  hash_arg_in_struct = 0;
6264  op1_hash = HASH (op1, mode);
6265  op1_in_memory = hash_arg_in_memory;
6266  op1_in_struct = hash_arg_in_struct;
6267
6268  if (do_not_record)
6269    return;
6270
6271  /* Look up both operands.  */
6272  op0_elt = lookup (op0, op0_hash, mode);
6273  op1_elt = lookup (op1, op1_hash, mode);
6274
6275  /* If both operands are already equivalent or if they are not in the
6276     table but are identical, do nothing.  */
6277  if ((op0_elt != 0 && op1_elt != 0
6278       && op0_elt->first_same_value == op1_elt->first_same_value)
6279      || op0 == op1 || rtx_equal_p (op0, op1))
6280    return;
6281
6282  /* If we aren't setting two things equal all we can do is save this
6283     comparison.   Similarly if this is floating-point.  In the latter
6284     case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
6285     If we record the equality, we might inadvertently delete code
6286     whose intent was to change -0 to +0.  */
6287
6288  if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
6289    {
6290      /* If we reversed a floating-point comparison, if OP0 is not a
6291	 register, or if OP1 is neither a register or constant, we can't
6292	 do anything.  */
6293
6294      if (GET_CODE (op1) != REG)
6295	op1 = equiv_constant (op1);
6296
6297      if ((reversed_nonequality && FLOAT_MODE_P (mode))
6298	  || GET_CODE (op0) != REG || op1 == 0)
6299	return;
6300
6301      /* Put OP0 in the hash table if it isn't already.  This gives it a
6302	 new quantity number.  */
6303      if (op0_elt == 0)
6304	{
6305	  if (insert_regs (op0, NULL_PTR, 0))
6306	    {
6307	      rehash_using_reg (op0);
6308	      op0_hash = HASH (op0, mode);
6309
6310	      /* If OP0 is contained in OP1, this changes its hash code
6311		 as well.  Faster to rehash than to check, except
6312		 for the simple case of a constant.  */
6313	      if (! CONSTANT_P (op1))
6314		op1_hash = HASH (op1,mode);
6315	    }
6316
6317	  op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6318	  op0_elt->in_memory = op0_in_memory;
6319	  op0_elt->in_struct = op0_in_struct;
6320	}
6321
6322      qty_comparison_code[REG_QTY (REGNO (op0))] = code;
6323      if (GET_CODE (op1) == REG)
6324	{
6325	  /* Look it up again--in case op0 and op1 are the same.  */
6326	  op1_elt = lookup (op1, op1_hash, mode);
6327
6328	  /* Put OP1 in the hash table so it gets a new quantity number.  */
6329	  if (op1_elt == 0)
6330	    {
6331	      if (insert_regs (op1, NULL_PTR, 0))
6332		{
6333		  rehash_using_reg (op1);
6334		  op1_hash = HASH (op1, mode);
6335		}
6336
6337	      op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6338	      op1_elt->in_memory = op1_in_memory;
6339	      op1_elt->in_struct = op1_in_struct;
6340	    }
6341
6342	  qty_comparison_qty[REG_QTY (REGNO (op0))] = REG_QTY (REGNO (op1));
6343	  qty_comparison_const[REG_QTY (REGNO (op0))] = 0;
6344	}
6345      else
6346	{
6347	  qty_comparison_qty[REG_QTY (REGNO (op0))] = -1;
6348	  qty_comparison_const[REG_QTY (REGNO (op0))] = op1;
6349	}
6350
6351      return;
6352    }
6353
6354  /* If either side is still missing an equivalence, make it now,
6355     then merge the equivalences.  */
6356
6357  if (op0_elt == 0)
6358    {
6359      if (insert_regs (op0, NULL_PTR, 0))
6360	{
6361	  rehash_using_reg (op0);
6362	  op0_hash = HASH (op0, mode);
6363	}
6364
6365      op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
6366      op0_elt->in_memory = op0_in_memory;
6367      op0_elt->in_struct = op0_in_struct;
6368    }
6369
6370  if (op1_elt == 0)
6371    {
6372      if (insert_regs (op1, NULL_PTR, 0))
6373	{
6374	  rehash_using_reg (op1);
6375	  op1_hash = HASH (op1, mode);
6376	}
6377
6378      op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
6379      op1_elt->in_memory = op1_in_memory;
6380      op1_elt->in_struct = op1_in_struct;
6381    }
6382
6383  merge_equiv_classes (op0_elt, op1_elt);
6384  last_jump_equiv_class = op0_elt;
6385}
6386
6387/* CSE processing for one instruction.
6388   First simplify sources and addresses of all assignments
6389   in the instruction, using previously-computed equivalents values.
6390   Then install the new sources and destinations in the table
6391   of available values.
6392
6393   If LIBCALL_INSN is nonzero, don't record any equivalence made in
6394   the insn.  It means that INSN is inside libcall block.  In this
6395   case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
6396
6397/* Data on one SET contained in the instruction.  */
6398
6399struct set
6400{
6401  /* The SET rtx itself.  */
6402  rtx rtl;
6403  /* The SET_SRC of the rtx (the original value, if it is changing).  */
6404  rtx src;
6405  /* The hash-table element for the SET_SRC of the SET.  */
6406  struct table_elt *src_elt;
6407  /* Hash value for the SET_SRC.  */
6408  unsigned src_hash;
6409  /* Hash value for the SET_DEST.  */
6410  unsigned dest_hash;
6411  /* The SET_DEST, with SUBREG, etc., stripped.  */
6412  rtx inner_dest;
6413  /* Place where the pointer to the INNER_DEST was found.  */
6414  rtx *inner_dest_loc;
6415  /* Nonzero if the SET_SRC is in memory.  */
6416  char src_in_memory;
6417  /* Nonzero if the SET_SRC is in a structure.  */
6418  char src_in_struct;
6419  /* Nonzero if the SET_SRC contains something
6420     whose value cannot be predicted and understood.  */
6421  char src_volatile;
6422  /* Original machine mode, in case it becomes a CONST_INT.  */
6423  enum machine_mode mode;
6424  /* A constant equivalent for SET_SRC, if any.  */
6425  rtx src_const;
6426  /* Hash value of constant equivalent for SET_SRC.  */
6427  unsigned src_const_hash;
6428  /* Table entry for constant equivalent for SET_SRC, if any.  */
6429  struct table_elt *src_const_elt;
6430};
6431
6432static void
6433cse_insn (insn, libcall_insn)
6434     rtx insn;
6435     rtx libcall_insn;
6436{
6437  register rtx x = PATTERN (insn);
6438  register int i;
6439  rtx tem;
6440  register int n_sets = 0;
6441
6442#ifdef HAVE_cc0
6443  /* Records what this insn does to set CC0.  */
6444  rtx this_insn_cc0 = 0;
6445  enum machine_mode this_insn_cc0_mode = VOIDmode;
6446#endif
6447
6448  rtx src_eqv = 0;
6449  struct table_elt *src_eqv_elt = 0;
6450  int src_eqv_volatile;
6451  int src_eqv_in_memory;
6452  int src_eqv_in_struct;
6453  unsigned src_eqv_hash;
6454
6455  struct set *sets;
6456
6457  this_insn = insn;
6458
6459  /* Find all the SETs and CLOBBERs in this instruction.
6460     Record all the SETs in the array `set' and count them.
6461     Also determine whether there is a CLOBBER that invalidates
6462     all memory references, or all references at varying addresses.  */
6463
6464  if (GET_CODE (insn) == CALL_INSN)
6465    {
6466      for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6467	if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
6468          invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
6469    }
6470
6471  if (GET_CODE (x) == SET)
6472    {
6473      sets = (struct set *) alloca (sizeof (struct set));
6474      sets[0].rtl = x;
6475
6476      /* Ignore SETs that are unconditional jumps.
6477	 They never need cse processing, so this does not hurt.
6478	 The reason is not efficiency but rather
6479	 so that we can test at the end for instructions
6480	 that have been simplified to unconditional jumps
6481	 and not be misled by unchanged instructions
6482	 that were unconditional jumps to begin with.  */
6483      if (SET_DEST (x) == pc_rtx
6484	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
6485	;
6486
6487      /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
6488	 The hard function value register is used only once, to copy to
6489	 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
6490	 Ensure we invalidate the destination register.  On the 80386 no
6491	 other code would invalidate it since it is a fixed_reg.
6492	 We need not check the return of apply_change_group; see canon_reg.  */
6493
6494      else if (GET_CODE (SET_SRC (x)) == CALL)
6495	{
6496	  canon_reg (SET_SRC (x), insn);
6497	  apply_change_group ();
6498	  fold_rtx (SET_SRC (x), insn);
6499	  invalidate (SET_DEST (x), VOIDmode);
6500	}
6501      else
6502	n_sets = 1;
6503    }
6504  else if (GET_CODE (x) == PARALLEL)
6505    {
6506      register int lim = XVECLEN (x, 0);
6507
6508      sets = (struct set *) alloca (lim * sizeof (struct set));
6509
6510      /* Find all regs explicitly clobbered in this insn,
6511	 and ensure they are not replaced with any other regs
6512	 elsewhere in this insn.
6513	 When a reg that is clobbered is also used for input,
6514	 we should presume that that is for a reason,
6515	 and we should not substitute some other register
6516	 which is not supposed to be clobbered.
6517	 Therefore, this loop cannot be merged into the one below
6518	 because a CALL may precede a CLOBBER and refer to the
6519	 value clobbered.  We must not let a canonicalization do
6520	 anything in that case.  */
6521      for (i = 0; i < lim; i++)
6522	{
6523	  register rtx y = XVECEXP (x, 0, i);
6524	  if (GET_CODE (y) == CLOBBER)
6525	    {
6526	      rtx clobbered = XEXP (y, 0);
6527
6528	      if (GET_CODE (clobbered) == REG
6529		  || GET_CODE (clobbered) == SUBREG)
6530		invalidate (clobbered, VOIDmode);
6531	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
6532		       || GET_CODE (clobbered) == ZERO_EXTRACT)
6533		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
6534	    }
6535	}
6536
6537      for (i = 0; i < lim; i++)
6538	{
6539	  register rtx y = XVECEXP (x, 0, i);
6540	  if (GET_CODE (y) == SET)
6541	    {
6542	      /* As above, we ignore unconditional jumps and call-insns and
6543		 ignore the result of apply_change_group.  */
6544	      if (GET_CODE (SET_SRC (y)) == CALL)
6545		{
6546		  canon_reg (SET_SRC (y), insn);
6547		  apply_change_group ();
6548		  fold_rtx (SET_SRC (y), insn);
6549		  invalidate (SET_DEST (y), VOIDmode);
6550		}
6551	      else if (SET_DEST (y) == pc_rtx
6552		       && GET_CODE (SET_SRC (y)) == LABEL_REF)
6553		;
6554	      else
6555		sets[n_sets++].rtl = y;
6556	    }
6557	  else if (GET_CODE (y) == CLOBBER)
6558	    {
6559	      /* If we clobber memory, canon the address.
6560		 This does nothing when a register is clobbered
6561		 because we have already invalidated the reg.  */
6562	      if (GET_CODE (XEXP (y, 0)) == MEM)
6563		canon_reg (XEXP (y, 0), NULL_RTX);
6564	    }
6565	  else if (GET_CODE (y) == USE
6566		   && ! (GET_CODE (XEXP (y, 0)) == REG
6567			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
6568	    canon_reg (y, NULL_RTX);
6569	  else if (GET_CODE (y) == CALL)
6570	    {
6571	      /* The result of apply_change_group can be ignored; see
6572		 canon_reg.  */
6573	      canon_reg (y, insn);
6574	      apply_change_group ();
6575	      fold_rtx (y, insn);
6576	    }
6577	}
6578    }
6579  else if (GET_CODE (x) == CLOBBER)
6580    {
6581      if (GET_CODE (XEXP (x, 0)) == MEM)
6582	canon_reg (XEXP (x, 0), NULL_RTX);
6583    }
6584
6585  /* Canonicalize a USE of a pseudo register or memory location.  */
6586  else if (GET_CODE (x) == USE
6587	   && ! (GET_CODE (XEXP (x, 0)) == REG
6588		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
6589    canon_reg (XEXP (x, 0), NULL_RTX);
6590  else if (GET_CODE (x) == CALL)
6591    {
6592      /* The result of apply_change_group can be ignored; see canon_reg.  */
6593      canon_reg (x, insn);
6594      apply_change_group ();
6595      fold_rtx (x, insn);
6596    }
6597
6598  /* Store the equivalent value in SRC_EQV, if different, or if the DEST
6599     is a STRICT_LOW_PART.  The latter condition is necessary because SRC_EQV
6600     is handled specially for this case, and if it isn't set, then there will
6601     be no equivalence for the destination.  */
6602  if (n_sets == 1 && REG_NOTES (insn) != 0
6603      && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
6604      && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
6605	  || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
6606    src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
6607
6608  /* Canonicalize sources and addresses of destinations.
6609     We do this in a separate pass to avoid problems when a MATCH_DUP is
6610     present in the insn pattern.  In that case, we want to ensure that
6611     we don't break the duplicate nature of the pattern.  So we will replace
6612     both operands at the same time.  Otherwise, we would fail to find an
6613     equivalent substitution in the loop calling validate_change below.
6614
6615     We used to suppress canonicalization of DEST if it appears in SRC,
6616     but we don't do this any more.  */
6617
6618  for (i = 0; i < n_sets; i++)
6619    {
6620      rtx dest = SET_DEST (sets[i].rtl);
6621      rtx src = SET_SRC (sets[i].rtl);
6622      rtx new = canon_reg (src, insn);
6623      int insn_code;
6624
6625      if ((GET_CODE (new) == REG && GET_CODE (src) == REG
6626	   && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
6627	       != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
6628	  || (insn_code = recog_memoized (insn)) < 0
6629	  || insn_n_dups[insn_code] > 0)
6630	validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
6631      else
6632	SET_SRC (sets[i].rtl) = new;
6633
6634      if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
6635	{
6636	  validate_change (insn, &XEXP (dest, 1),
6637			   canon_reg (XEXP (dest, 1), insn), 1);
6638	  validate_change (insn, &XEXP (dest, 2),
6639			   canon_reg (XEXP (dest, 2), insn), 1);
6640	}
6641
6642      while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
6643	     || GET_CODE (dest) == ZERO_EXTRACT
6644	     || GET_CODE (dest) == SIGN_EXTRACT)
6645	dest = XEXP (dest, 0);
6646
6647      if (GET_CODE (dest) == MEM)
6648	canon_reg (dest, insn);
6649    }
6650
6651  /* Now that we have done all the replacements, we can apply the change
6652     group and see if they all work.  Note that this will cause some
6653     canonicalizations that would have worked individually not to be applied
6654     because some other canonicalization didn't work, but this should not
6655     occur often.
6656
6657     The result of apply_change_group can be ignored; see canon_reg.  */
6658
6659  apply_change_group ();
6660
6661  /* Set sets[i].src_elt to the class each source belongs to.
6662     Detect assignments from or to volatile things
6663     and set set[i] to zero so they will be ignored
6664     in the rest of this function.
6665
6666     Nothing in this loop changes the hash table or the register chains.  */
6667
6668  for (i = 0; i < n_sets; i++)
6669    {
6670      register rtx src, dest;
6671      register rtx src_folded;
6672      register struct table_elt *elt = 0, *p;
6673      enum machine_mode mode;
6674      rtx src_eqv_here;
6675      rtx src_const = 0;
6676      rtx src_related = 0;
6677      struct table_elt *src_const_elt = 0;
6678      int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
6679      int src_related_cost = 10000, src_elt_cost = 10000;
6680      /* Set non-zero if we need to call force_const_mem on with the
6681	 contents of src_folded before using it.  */
6682      int src_folded_force_flag = 0;
6683
6684      dest = SET_DEST (sets[i].rtl);
6685      src = SET_SRC (sets[i].rtl);
6686
6687      /* If SRC is a constant that has no machine mode,
6688	 hash it with the destination's machine mode.
6689	 This way we can keep different modes separate.  */
6690
6691      mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6692      sets[i].mode = mode;
6693
6694      if (src_eqv)
6695	{
6696	  enum machine_mode eqvmode = mode;
6697	  if (GET_CODE (dest) == STRICT_LOW_PART)
6698	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
6699	  do_not_record = 0;
6700	  hash_arg_in_memory = 0;
6701	  hash_arg_in_struct = 0;
6702	  src_eqv = fold_rtx (src_eqv, insn);
6703	  src_eqv_hash = HASH (src_eqv, eqvmode);
6704
6705	  /* Find the equivalence class for the equivalent expression.  */
6706
6707	  if (!do_not_record)
6708	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
6709
6710	  src_eqv_volatile = do_not_record;
6711	  src_eqv_in_memory = hash_arg_in_memory;
6712	  src_eqv_in_struct = hash_arg_in_struct;
6713	}
6714
6715      /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
6716	 value of the INNER register, not the destination.  So it is not
6717	 a valid substitution for the source.  But save it for later.  */
6718      if (GET_CODE (dest) == STRICT_LOW_PART)
6719	src_eqv_here = 0;
6720      else
6721	src_eqv_here = src_eqv;
6722
6723      /* Simplify and foldable subexpressions in SRC.  Then get the fully-
6724	 simplified result, which may not necessarily be valid.  */
6725      src_folded = fold_rtx (src, insn);
6726
6727#if 0
6728      /* ??? This caused bad code to be generated for the m68k port with -O2.
6729	 Suppose src is (CONST_INT -1), and that after truncation src_folded
6730	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
6731	 At the end we will add src and src_const to the same equivalence
6732	 class.  We now have 3 and -1 on the same equivalence class.  This
6733	 causes later instructions to be mis-optimized.  */
6734      /* If storing a constant in a bitfield, pre-truncate the constant
6735	 so we will be able to record it later.  */
6736      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
6737	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
6738	{
6739	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
6740
6741	  if (GET_CODE (src) == CONST_INT
6742	      && GET_CODE (width) == CONST_INT
6743	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
6744	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
6745	    src_folded
6746	      = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
6747					  << INTVAL (width)) - 1));
6748	}
6749#endif
6750
6751      /* Compute SRC's hash code, and also notice if it
6752	 should not be recorded at all.  In that case,
6753	 prevent any further processing of this assignment.  */
6754      do_not_record = 0;
6755      hash_arg_in_memory = 0;
6756      hash_arg_in_struct = 0;
6757
6758      sets[i].src = src;
6759      sets[i].src_hash = HASH (src, mode);
6760      sets[i].src_volatile = do_not_record;
6761      sets[i].src_in_memory = hash_arg_in_memory;
6762      sets[i].src_in_struct = hash_arg_in_struct;
6763
6764      /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
6765	 a pseudo that is set more than once, do not record SRC.  Using
6766	 SRC as a replacement for anything else will be incorrect in that
6767	 situation.  Note that this usually occurs only for stack slots,
6768	 in which case all the RTL would be referring to SRC, so we don't
6769	 lose any optimization opportunities by not having SRC in the
6770	 hash table.  */
6771
6772      if (GET_CODE (src) == MEM
6773	  && find_reg_note (insn, REG_EQUIV, src) != 0
6774	  && GET_CODE (dest) == REG
6775	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER
6776	  && REG_N_SETS (REGNO (dest)) != 1)
6777	sets[i].src_volatile = 1;
6778
6779#if 0
6780      /* It is no longer clear why we used to do this, but it doesn't
6781	 appear to still be needed.  So let's try without it since this
6782	 code hurts cse'ing widened ops.  */
6783      /* If source is a perverse subreg (such as QI treated as an SI),
6784	 treat it as volatile.  It may do the work of an SI in one context
6785	 where the extra bits are not being used, but cannot replace an SI
6786	 in general.  */
6787      if (GET_CODE (src) == SUBREG
6788	  && (GET_MODE_SIZE (GET_MODE (src))
6789	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
6790	sets[i].src_volatile = 1;
6791#endif
6792
6793      /* Locate all possible equivalent forms for SRC.  Try to replace
6794         SRC in the insn with each cheaper equivalent.
6795
6796         We have the following types of equivalents: SRC itself, a folded
6797         version, a value given in a REG_EQUAL note, or a value related
6798	 to a constant.
6799
6800         Each of these equivalents may be part of an additional class
6801         of equivalents (if more than one is in the table, they must be in
6802         the same class; we check for this).
6803
6804	 If the source is volatile, we don't do any table lookups.
6805
6806         We note any constant equivalent for possible later use in a
6807         REG_NOTE.  */
6808
6809      if (!sets[i].src_volatile)
6810	elt = lookup (src, sets[i].src_hash, mode);
6811
6812      sets[i].src_elt = elt;
6813
6814      if (elt && src_eqv_here && src_eqv_elt)
6815        {
6816          if (elt->first_same_value != src_eqv_elt->first_same_value)
6817	    {
6818	      /* The REG_EQUAL is indicating that two formerly distinct
6819		 classes are now equivalent.  So merge them.  */
6820	      merge_equiv_classes (elt, src_eqv_elt);
6821	      src_eqv_hash = HASH (src_eqv, elt->mode);
6822	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
6823	    }
6824
6825          src_eqv_here = 0;
6826        }
6827
6828      else if (src_eqv_elt)
6829        elt = src_eqv_elt;
6830
6831      /* Try to find a constant somewhere and record it in `src_const'.
6832	 Record its table element, if any, in `src_const_elt'.  Look in
6833	 any known equivalences first.  (If the constant is not in the
6834	 table, also set `sets[i].src_const_hash').  */
6835      if (elt)
6836        for (p = elt->first_same_value; p; p = p->next_same_value)
6837	  if (p->is_const)
6838	    {
6839	      src_const = p->exp;
6840	      src_const_elt = elt;
6841	      break;
6842	    }
6843
6844      if (src_const == 0
6845	  && (CONSTANT_P (src_folded)
6846	      /* Consider (minus (label_ref L1) (label_ref L2)) as
6847		 "constant" here so we will record it. This allows us
6848		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
6849	      || (GET_CODE (src_folded) == MINUS
6850		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
6851		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
6852	src_const = src_folded, src_const_elt = elt;
6853      else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
6854	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
6855
6856      /* If we don't know if the constant is in the table, get its
6857	 hash code and look it up.  */
6858      if (src_const && src_const_elt == 0)
6859	{
6860	  sets[i].src_const_hash = HASH (src_const, mode);
6861	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
6862	}
6863
6864      sets[i].src_const = src_const;
6865      sets[i].src_const_elt = src_const_elt;
6866
6867      /* If the constant and our source are both in the table, mark them as
6868	 equivalent.  Otherwise, if a constant is in the table but the source
6869	 isn't, set ELT to it.  */
6870      if (src_const_elt && elt
6871	  && src_const_elt->first_same_value != elt->first_same_value)
6872	merge_equiv_classes (elt, src_const_elt);
6873      else if (src_const_elt && elt == 0)
6874	elt = src_const_elt;
6875
6876      /* See if there is a register linearly related to a constant
6877         equivalent of SRC.  */
6878      if (src_const
6879	  && (GET_CODE (src_const) == CONST
6880	      || (src_const_elt && src_const_elt->related_value != 0)))
6881        {
6882          src_related = use_related_value (src_const, src_const_elt);
6883          if (src_related)
6884            {
6885	      struct table_elt *src_related_elt
6886		    = lookup (src_related, HASH (src_related, mode), mode);
6887	      if (src_related_elt && elt)
6888	        {
6889		  if (elt->first_same_value
6890		      != src_related_elt->first_same_value)
6891		    /* This can occur when we previously saw a CONST
6892		       involving a SYMBOL_REF and then see the SYMBOL_REF
6893		       twice.  Merge the involved classes.  */
6894		    merge_equiv_classes (elt, src_related_elt);
6895
6896	          src_related = 0;
6897		  src_related_elt = 0;
6898	        }
6899              else if (src_related_elt && elt == 0)
6900	        elt = src_related_elt;
6901	    }
6902        }
6903
6904      /* See if we have a CONST_INT that is already in a register in a
6905	 wider mode.  */
6906
6907      if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
6908	  && GET_MODE_CLASS (mode) == MODE_INT
6909	  && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
6910	{
6911	  enum machine_mode wider_mode;
6912
6913	  for (wider_mode = GET_MODE_WIDER_MODE (mode);
6914	       GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
6915	       && src_related == 0;
6916	       wider_mode = GET_MODE_WIDER_MODE (wider_mode))
6917	    {
6918	      struct table_elt *const_elt
6919		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
6920
6921	      if (const_elt == 0)
6922		continue;
6923
6924	      for (const_elt = const_elt->first_same_value;
6925		   const_elt; const_elt = const_elt->next_same_value)
6926		if (GET_CODE (const_elt->exp) == REG)
6927		  {
6928		    src_related = gen_lowpart_if_possible (mode,
6929							   const_elt->exp);
6930		    break;
6931		  }
6932	    }
6933	}
6934
6935      /* Another possibility is that we have an AND with a constant in
6936	 a mode narrower than a word.  If so, it might have been generated
6937	 as part of an "if" which would narrow the AND.  If we already
6938	 have done the AND in a wider mode, we can use a SUBREG of that
6939	 value.  */
6940
6941      if (flag_expensive_optimizations && ! src_related
6942	  && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
6943	  && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6944	{
6945	  enum machine_mode tmode;
6946	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
6947
6948	  for (tmode = GET_MODE_WIDER_MODE (mode);
6949	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6950	       tmode = GET_MODE_WIDER_MODE (tmode))
6951	    {
6952	      rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
6953	      struct table_elt *larger_elt;
6954
6955	      if (inner)
6956		{
6957		  PUT_MODE (new_and, tmode);
6958		  XEXP (new_and, 0) = inner;
6959		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
6960		  if (larger_elt == 0)
6961		    continue;
6962
6963		  for (larger_elt = larger_elt->first_same_value;
6964		       larger_elt; larger_elt = larger_elt->next_same_value)
6965		    if (GET_CODE (larger_elt->exp) == REG)
6966		      {
6967			src_related
6968			  = gen_lowpart_if_possible (mode, larger_elt->exp);
6969			break;
6970		      }
6971
6972		  if (src_related)
6973		    break;
6974		}
6975	    }
6976	}
6977
6978#ifdef LOAD_EXTEND_OP
6979      /* See if a MEM has already been loaded with a widening operation;
6980	 if it has, we can use a subreg of that.  Many CISC machines
6981	 also have such operations, but this is only likely to be
6982	 beneficial these machines.  */
6983
6984      if (flag_expensive_optimizations &&  src_related == 0
6985	  && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
6986	  && GET_MODE_CLASS (mode) == MODE_INT
6987	  && GET_CODE (src) == MEM && ! do_not_record
6988	  && LOAD_EXTEND_OP (mode) != NIL)
6989	{
6990	  enum machine_mode tmode;
6991
6992	  /* Set what we are trying to extend and the operation it might
6993	     have been extended with.  */
6994	  PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
6995	  XEXP (memory_extend_rtx, 0) = src;
6996
6997	  for (tmode = GET_MODE_WIDER_MODE (mode);
6998	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
6999	       tmode = GET_MODE_WIDER_MODE (tmode))
7000	    {
7001	      struct table_elt *larger_elt;
7002
7003	      PUT_MODE (memory_extend_rtx, tmode);
7004	      larger_elt = lookup (memory_extend_rtx,
7005				   HASH (memory_extend_rtx, tmode), tmode);
7006	      if (larger_elt == 0)
7007		continue;
7008
7009	      for (larger_elt = larger_elt->first_same_value;
7010		   larger_elt; larger_elt = larger_elt->next_same_value)
7011		if (GET_CODE (larger_elt->exp) == REG)
7012		  {
7013		    src_related = gen_lowpart_if_possible (mode,
7014							   larger_elt->exp);
7015		    break;
7016		  }
7017
7018	      if (src_related)
7019		break;
7020	    }
7021	}
7022#endif /* LOAD_EXTEND_OP */
7023
7024      if (src == src_folded)
7025        src_folded = 0;
7026
7027      /* At this point, ELT, if non-zero, points to a class of expressions
7028         equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
7029	 and SRC_RELATED, if non-zero, each contain additional equivalent
7030	 expressions.  Prune these latter expressions by deleting expressions
7031	 already in the equivalence class.
7032
7033	 Check for an equivalent identical to the destination.  If found,
7034	 this is the preferred equivalent since it will likely lead to
7035	 elimination of the insn.  Indicate this by placing it in
7036	 `src_related'.  */
7037
7038      if (elt) elt = elt->first_same_value;
7039      for (p = elt; p; p = p->next_same_value)
7040        {
7041	  enum rtx_code code = GET_CODE (p->exp);
7042
7043	  /* If the expression is not valid, ignore it.  Then we do not
7044	     have to check for validity below.  In most cases, we can use
7045	     `rtx_equal_p', since canonicalization has already been done.  */
7046	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
7047	    continue;
7048
7049	  /* Also skip paradoxical subregs, unless that's what we're
7050	     looking for.  */
7051	  if (code == SUBREG
7052	      && (GET_MODE_SIZE (GET_MODE (p->exp))
7053		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
7054	      && ! (src != 0
7055		    && GET_CODE (src) == SUBREG
7056		    && GET_MODE (src) == GET_MODE (p->exp)
7057		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
7058			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
7059	    continue;
7060
7061          if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
7062	    src = 0;
7063          else if (src_folded && GET_CODE (src_folded) == code
7064		   && rtx_equal_p (src_folded, p->exp))
7065	    src_folded = 0;
7066          else if (src_eqv_here && GET_CODE (src_eqv_here) == code
7067		   && rtx_equal_p (src_eqv_here, p->exp))
7068	    src_eqv_here = 0;
7069          else if (src_related && GET_CODE (src_related) == code
7070		   && rtx_equal_p (src_related, p->exp))
7071	    src_related = 0;
7072
7073	  /* This is the same as the destination of the insns, we want
7074	     to prefer it.  Copy it to src_related.  The code below will
7075	     then give it a negative cost.  */
7076	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
7077	    src_related = dest;
7078
7079        }
7080
7081      /* Find the cheapest valid equivalent, trying all the available
7082         possibilities.  Prefer items not in the hash table to ones
7083         that are when they are equal cost.  Note that we can never
7084         worsen an insn as the current contents will also succeed.
7085	 If we find an equivalent identical to the destination, use it as best,
7086	 since this insn will probably be eliminated in that case.  */
7087      if (src)
7088	{
7089	  if (rtx_equal_p (src, dest))
7090	    src_cost = -1;
7091	  else
7092	    src_cost = COST (src);
7093	}
7094
7095      if (src_eqv_here)
7096	{
7097	  if (rtx_equal_p (src_eqv_here, dest))
7098	    src_eqv_cost = -1;
7099	  else
7100	    src_eqv_cost = COST (src_eqv_here);
7101	}
7102
7103      if (src_folded)
7104	{
7105	  if (rtx_equal_p (src_folded, dest))
7106	    src_folded_cost = -1;
7107	  else
7108	    src_folded_cost = COST (src_folded);
7109	}
7110
7111      if (src_related)
7112	{
7113	  if (rtx_equal_p (src_related, dest))
7114	    src_related_cost = -1;
7115	  else
7116	    src_related_cost = COST (src_related);
7117	}
7118
7119      /* If this was an indirect jump insn, a known label will really be
7120	 cheaper even though it looks more expensive.  */
7121      if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
7122	src_folded = src_const, src_folded_cost = -1;
7123
7124      /* Terminate loop when replacement made.  This must terminate since
7125         the current contents will be tested and will always be valid.  */
7126      while (1)
7127        {
7128          rtx trial, old_src;
7129
7130          /* Skip invalid entries.  */
7131          while (elt && GET_CODE (elt->exp) != REG
7132	         && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7133	    elt = elt->next_same_value;
7134
7135	  /* A paradoxical subreg would be bad here: it'll be the right
7136	     size, but later may be adjusted so that the upper bits aren't
7137	     what we want.  So reject it.  */
7138	  if (elt != 0
7139	      && GET_CODE (elt->exp) == SUBREG
7140	      && (GET_MODE_SIZE (GET_MODE (elt->exp))
7141		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
7142	      /* It is okay, though, if the rtx we're trying to match
7143		 will ignore any of the bits we can't predict.  */
7144	      && ! (src != 0
7145		    && GET_CODE (src) == SUBREG
7146		    && GET_MODE (src) == GET_MODE (elt->exp)
7147		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
7148			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
7149	    {
7150	      elt = elt->next_same_value;
7151	      continue;
7152	    }
7153
7154          if (elt) src_elt_cost = elt->cost;
7155
7156          /* Find cheapest and skip it for the next time.   For items
7157	     of equal cost, use this order:
7158	     src_folded, src, src_eqv, src_related and hash table entry.  */
7159          if (src_folded_cost <= src_cost
7160	      && src_folded_cost <= src_eqv_cost
7161	      && src_folded_cost <= src_related_cost
7162	      && src_folded_cost <= src_elt_cost)
7163	    {
7164	      trial = src_folded, src_folded_cost = 10000;
7165	      if (src_folded_force_flag)
7166		trial = force_const_mem (mode, trial);
7167	    }
7168          else if (src_cost <= src_eqv_cost
7169	           && src_cost <= src_related_cost
7170	           && src_cost <= src_elt_cost)
7171	    trial = src, src_cost = 10000;
7172          else if (src_eqv_cost <= src_related_cost
7173	           && src_eqv_cost <= src_elt_cost)
7174	    trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
7175          else if (src_related_cost <= src_elt_cost)
7176	    trial = copy_rtx (src_related), src_related_cost = 10000;
7177          else
7178	    {
7179	      trial = copy_rtx (elt->exp);
7180	      elt = elt->next_same_value;
7181	      src_elt_cost = 10000;
7182	    }
7183
7184	  /* We don't normally have an insn matching (set (pc) (pc)), so
7185	     check for this separately here.  We will delete such an
7186	     insn below.
7187
7188	     Tablejump insns contain a USE of the table, so simply replacing
7189	     the operand with the constant won't match.  This is simply an
7190	     unconditional branch, however, and is therefore valid.  Just
7191	     insert the substitution here and we will delete and re-emit
7192	     the insn later.  */
7193
7194	  /* Keep track of the original SET_SRC so that we can fix notes
7195	     on libcall instructions.  */
7196 	  old_src = SET_SRC (sets[i].rtl);
7197
7198	  if (n_sets == 1 && dest == pc_rtx
7199	      && (trial == pc_rtx
7200		  || (GET_CODE (trial) == LABEL_REF
7201		      && ! condjump_p (insn))))
7202	    {
7203	      /* If TRIAL is a label in front of a jump table, we are
7204		 really falling through the switch (this is how casesi
7205		 insns work), so we must branch around the table.  */
7206	      if (GET_CODE (trial) == CODE_LABEL
7207		  && NEXT_INSN (trial) != 0
7208		  && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
7209		  && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
7210		      || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
7211
7212		trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
7213
7214	      SET_SRC (sets[i].rtl) = trial;
7215 	      cse_jumps_altered = 1;
7216	      break;
7217	    }
7218
7219	  /* Look for a substitution that makes a valid insn.  */
7220          else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
7221	    {
7222	      /* If we just made a substitution inside a libcall, then we
7223		 need to make the same substitution in any notes attached
7224		 to the RETVAL insn.  */
7225	      if (libcall_insn
7226		  && (GET_CODE (old_src) == REG
7227		      || GET_CODE (old_src) == SUBREG
7228		      ||  GET_CODE (old_src) == MEM))
7229		replace_rtx (REG_NOTES (libcall_insn), old_src,
7230			     canon_reg (SET_SRC (sets[i].rtl), insn));
7231
7232	      /* The result of apply_change_group can be ignored; see
7233		 canon_reg.  */
7234
7235	      validate_change (insn, &SET_SRC (sets[i].rtl),
7236			       canon_reg (SET_SRC (sets[i].rtl), insn),
7237			       1);
7238	      apply_change_group ();
7239	      break;
7240	    }
7241
7242	  /* If we previously found constant pool entries for
7243	     constants and this is a constant, try making a
7244	     pool entry.  Put it in src_folded unless we already have done
7245	     this since that is where it likely came from.  */
7246
7247	  else if (constant_pool_entries_cost
7248		   && CONSTANT_P (trial)
7249		   && ! (GET_CODE (trial) == CONST
7250			 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
7251		   && (src_folded == 0
7252		       || (GET_CODE (src_folded) != MEM
7253			   && ! src_folded_force_flag))
7254		   && GET_MODE_CLASS (mode) != MODE_CC
7255		   && mode != VOIDmode)
7256	    {
7257	      src_folded_force_flag = 1;
7258	      src_folded = trial;
7259	      src_folded_cost = constant_pool_entries_cost;
7260	    }
7261        }
7262
7263      src = SET_SRC (sets[i].rtl);
7264
7265      /* In general, it is good to have a SET with SET_SRC == SET_DEST.
7266	 However, there is an important exception:  If both are registers
7267	 that are not the head of their equivalence class, replace SET_SRC
7268	 with the head of the class.  If we do not do this, we will have
7269	 both registers live over a portion of the basic block.  This way,
7270	 their lifetimes will likely abut instead of overlapping.  */
7271      if (GET_CODE (dest) == REG
7272	  && REGNO_QTY_VALID_P (REGNO (dest))
7273	  && qty_mode[REG_QTY (REGNO (dest))] == GET_MODE (dest)
7274	  && qty_first_reg[REG_QTY (REGNO (dest))] != REGNO (dest)
7275	  && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
7276	  /* Don't do this if the original insn had a hard reg as
7277	     SET_SRC.  */
7278	  && (GET_CODE (sets[i].src) != REG
7279	      || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER))
7280	/* We can't call canon_reg here because it won't do anything if
7281	   SRC is a hard register.  */
7282	{
7283	  int first = qty_first_reg[REG_QTY (REGNO (src))];
7284	  rtx new_src
7285	    = (first >= FIRST_PSEUDO_REGISTER
7286	       ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
7287
7288	  /* We must use validate-change even for this, because this
7289	     might be a special no-op instruction, suitable only to
7290	     tag notes onto.  */
7291	  if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
7292	    {
7293	      src = new_src;
7294	      /* If we had a constant that is cheaper than what we are now
7295		 setting SRC to, use that constant.  We ignored it when we
7296		 thought we could make this into a no-op.  */
7297	      if (src_const && COST (src_const) < COST (src)
7298		  && validate_change (insn, &SET_SRC (sets[i].rtl), src_const,
7299				      0))
7300		src = src_const;
7301	    }
7302	}
7303
7304      /* If we made a change, recompute SRC values.  */
7305      if (src != sets[i].src)
7306        {
7307          do_not_record = 0;
7308          hash_arg_in_memory = 0;
7309          hash_arg_in_struct = 0;
7310	  sets[i].src = src;
7311          sets[i].src_hash = HASH (src, mode);
7312          sets[i].src_volatile = do_not_record;
7313          sets[i].src_in_memory = hash_arg_in_memory;
7314          sets[i].src_in_struct = hash_arg_in_struct;
7315          sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
7316        }
7317
7318      /* If this is a single SET, we are setting a register, and we have an
7319	 equivalent constant, we want to add a REG_NOTE.   We don't want
7320	 to write a REG_EQUAL note for a constant pseudo since verifying that
7321	 that pseudo hasn't been eliminated is a pain.  Such a note also
7322	 won't help anything.
7323
7324	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
7325	 which can be created for a reference to a compile time computable
7326	 entry in a jump table.  */
7327
7328      if (n_sets == 1 && src_const && GET_CODE (dest) == REG
7329	  && GET_CODE (src_const) != REG
7330	  && ! (GET_CODE (src_const) == CONST
7331		&& GET_CODE (XEXP (src_const, 0)) == MINUS
7332		&& GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
7333		&& GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
7334	{
7335	  tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7336
7337	  /* Make sure that the rtx is not shared with any other insn.  */
7338	  src_const = copy_rtx (src_const);
7339
7340	  /* Record the actual constant value in a REG_EQUAL note, making
7341	     a new one if one does not already exist.  */
7342	  if (tem)
7343	    XEXP (tem, 0) = src_const;
7344	  else
7345	    REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
7346						  src_const, REG_NOTES (insn));
7347
7348          /* If storing a constant value in a register that
7349	     previously held the constant value 0,
7350	     record this fact with a REG_WAS_0 note on this insn.
7351
7352	     Note that the *register* is required to have previously held 0,
7353	     not just any register in the quantity and we must point to the
7354	     insn that set that register to zero.
7355
7356	     Rather than track each register individually, we just see if
7357	     the last set for this quantity was for this register.  */
7358
7359	  if (REGNO_QTY_VALID_P (REGNO (dest))
7360	      && qty_const[REG_QTY (REGNO (dest))] == const0_rtx)
7361	    {
7362	      /* See if we previously had a REG_WAS_0 note.  */
7363	      rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7364	      rtx const_insn = qty_const_insn[REG_QTY (REGNO (dest))];
7365
7366	      if ((tem = single_set (const_insn)) != 0
7367		  && rtx_equal_p (SET_DEST (tem), dest))
7368		{
7369		  if (note)
7370		    XEXP (note, 0) = const_insn;
7371		  else
7372		    REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_WAS_0,
7373							  const_insn,
7374							  REG_NOTES (insn));
7375		}
7376	    }
7377	}
7378
7379      /* Now deal with the destination.  */
7380      do_not_record = 0;
7381      sets[i].inner_dest_loc = &SET_DEST (sets[0].rtl);
7382
7383      /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
7384	 to the MEM or REG within it.  */
7385      while (GET_CODE (dest) == SIGN_EXTRACT
7386	     || GET_CODE (dest) == ZERO_EXTRACT
7387	     || GET_CODE (dest) == SUBREG
7388	     || GET_CODE (dest) == STRICT_LOW_PART)
7389	{
7390	  sets[i].inner_dest_loc = &XEXP (dest, 0);
7391	  dest = XEXP (dest, 0);
7392	}
7393
7394      sets[i].inner_dest = dest;
7395
7396      if (GET_CODE (dest) == MEM)
7397	{
7398#ifdef PUSH_ROUNDING
7399	  /* Stack pushes invalidate the stack pointer.  */
7400	  rtx addr = XEXP (dest, 0);
7401	  if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
7402	       || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
7403	      && XEXP (addr, 0) == stack_pointer_rtx)
7404	    invalidate (stack_pointer_rtx, Pmode);
7405#endif
7406	  dest = fold_rtx (dest, insn);
7407	}
7408
7409      /* Compute the hash code of the destination now,
7410	 before the effects of this instruction are recorded,
7411	 since the register values used in the address computation
7412	 are those before this instruction.  */
7413      sets[i].dest_hash = HASH (dest, mode);
7414
7415      /* Don't enter a bit-field in the hash table
7416	 because the value in it after the store
7417	 may not equal what was stored, due to truncation.  */
7418
7419      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
7420	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
7421	{
7422	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
7423
7424	  if (src_const != 0 && GET_CODE (src_const) == CONST_INT
7425	      && GET_CODE (width) == CONST_INT
7426	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
7427	      && ! (INTVAL (src_const)
7428		    & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
7429	    /* Exception: if the value is constant,
7430	       and it won't be truncated, record it.  */
7431	    ;
7432	  else
7433	    {
7434	      /* This is chosen so that the destination will be invalidated
7435		 but no new value will be recorded.
7436		 We must invalidate because sometimes constant
7437		 values can be recorded for bitfields.  */
7438	      sets[i].src_elt = 0;
7439	      sets[i].src_volatile = 1;
7440	      src_eqv = 0;
7441	      src_eqv_elt = 0;
7442	    }
7443	}
7444
7445      /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
7446	 the insn.  */
7447      else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
7448	{
7449	  PUT_CODE (insn, NOTE);
7450	  NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7451	  NOTE_SOURCE_FILE (insn) = 0;
7452	  cse_jumps_altered = 1;
7453	  /* One less use of the label this insn used to jump to.  */
7454	  if (JUMP_LABEL (insn) != 0)
7455	    --LABEL_NUSES (JUMP_LABEL (insn));
7456	  /* No more processing for this set.  */
7457	  sets[i].rtl = 0;
7458	}
7459
7460      /* If this SET is now setting PC to a label, we know it used to
7461	 be a conditional or computed branch.  So we see if we can follow
7462	 it.  If it was a computed branch, delete it and re-emit.  */
7463      else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
7464	{
7465	  rtx p;
7466
7467	  /* If this is not in the format for a simple branch and
7468	     we are the only SET in it, re-emit it.  */
7469	  if (! simplejump_p (insn) && n_sets == 1)
7470	    {
7471	      rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
7472	      JUMP_LABEL (new) = XEXP (src, 0);
7473	      LABEL_NUSES (XEXP (src, 0))++;
7474	      insn = new;
7475	    }
7476	  else
7477	    /* Otherwise, force rerecognition, since it probably had
7478	       a different pattern before.
7479	       This shouldn't really be necessary, since whatever
7480	       changed the source value above should have done this.
7481	       Until the right place is found, might as well do this here.  */
7482	    INSN_CODE (insn) = -1;
7483
7484	  /* Now emit a BARRIER after the unconditional jump.  Do not bother
7485	     deleting any unreachable code, let jump/flow do that.  */
7486	  if (NEXT_INSN (insn) != 0
7487	      && GET_CODE (NEXT_INSN (insn)) != BARRIER)
7488	    emit_barrier_after (insn);
7489
7490	  cse_jumps_altered = 1;
7491	  sets[i].rtl = 0;
7492	}
7493
7494      /* If destination is volatile, invalidate it and then do no further
7495	 processing for this assignment.  */
7496
7497      else if (do_not_record)
7498	{
7499	  if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7500	      || GET_CODE (dest) == MEM)
7501	    invalidate (dest, VOIDmode);
7502	  else if (GET_CODE (dest) == STRICT_LOW_PART
7503		   || GET_CODE (dest) == ZERO_EXTRACT)
7504	    invalidate (XEXP (dest, 0), GET_MODE (dest));
7505	  sets[i].rtl = 0;
7506	}
7507
7508      if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
7509	sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
7510
7511#ifdef HAVE_cc0
7512      /* If setting CC0, record what it was set to, or a constant, if it
7513	 is equivalent to a constant.  If it is being set to a floating-point
7514	 value, make a COMPARE with the appropriate constant of 0.  If we
7515	 don't do this, later code can interpret this as a test against
7516	 const0_rtx, which can cause problems if we try to put it into an
7517	 insn as a floating-point operand.  */
7518      if (dest == cc0_rtx)
7519	{
7520	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
7521	  this_insn_cc0_mode = mode;
7522	  if (FLOAT_MODE_P (mode))
7523	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
7524					     CONST0_RTX (mode));
7525	}
7526#endif
7527    }
7528
7529  /* Now enter all non-volatile source expressions in the hash table
7530     if they are not already present.
7531     Record their equivalence classes in src_elt.
7532     This way we can insert the corresponding destinations into
7533     the same classes even if the actual sources are no longer in them
7534     (having been invalidated).  */
7535
7536  if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
7537      && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
7538    {
7539      register struct table_elt *elt;
7540      register struct table_elt *classp = sets[0].src_elt;
7541      rtx dest = SET_DEST (sets[0].rtl);
7542      enum machine_mode eqvmode = GET_MODE (dest);
7543
7544      if (GET_CODE (dest) == STRICT_LOW_PART)
7545	{
7546	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
7547	  classp = 0;
7548	}
7549      if (insert_regs (src_eqv, classp, 0))
7550	{
7551	  rehash_using_reg (src_eqv);
7552	  src_eqv_hash = HASH (src_eqv, eqvmode);
7553	}
7554      elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
7555      elt->in_memory = src_eqv_in_memory;
7556      elt->in_struct = src_eqv_in_struct;
7557      src_eqv_elt = elt;
7558
7559      /* Check to see if src_eqv_elt is the same as a set source which
7560	 does not yet have an elt, and if so set the elt of the set source
7561	 to src_eqv_elt.  */
7562      for (i = 0; i < n_sets; i++)
7563	if (sets[i].rtl && sets[i].src_elt == 0
7564	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
7565	  sets[i].src_elt = src_eqv_elt;
7566    }
7567
7568  for (i = 0; i < n_sets; i++)
7569    if (sets[i].rtl && ! sets[i].src_volatile
7570	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
7571      {
7572	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
7573	  {
7574	    /* REG_EQUAL in setting a STRICT_LOW_PART
7575	       gives an equivalent for the entire destination register,
7576	       not just for the subreg being stored in now.
7577	       This is a more interesting equivalence, so we arrange later
7578	       to treat the entire reg as the destination.  */
7579	    sets[i].src_elt = src_eqv_elt;
7580	    sets[i].src_hash = src_eqv_hash;
7581	  }
7582	else
7583	  {
7584	    /* Insert source and constant equivalent into hash table, if not
7585	       already present.  */
7586	    register struct table_elt *classp = src_eqv_elt;
7587	    register rtx src = sets[i].src;
7588	    register rtx dest = SET_DEST (sets[i].rtl);
7589	    enum machine_mode mode
7590	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
7591
7592	    /* Don't put a hard register source into the table if this is
7593	       the last insn of a libcall.  */
7594	    if (sets[i].src_elt == 0
7595		&& (GET_CODE (src) != REG
7596		    || REGNO (src) >= FIRST_PSEUDO_REGISTER
7597		    || ! find_reg_note (insn, REG_RETVAL, NULL_RTX)))
7598	      {
7599		register struct table_elt *elt;
7600
7601		/* Note that these insert_regs calls cannot remove
7602		   any of the src_elt's, because they would have failed to
7603		   match if not still valid.  */
7604		if (insert_regs (src, classp, 0))
7605		  {
7606		    rehash_using_reg (src);
7607		    sets[i].src_hash = HASH (src, mode);
7608		  }
7609		elt = insert (src, classp, sets[i].src_hash, mode);
7610		elt->in_memory = sets[i].src_in_memory;
7611		elt->in_struct = sets[i].src_in_struct;
7612		sets[i].src_elt = classp = elt;
7613	      }
7614
7615	    if (sets[i].src_const && sets[i].src_const_elt == 0
7616		&& src != sets[i].src_const
7617		&& ! rtx_equal_p (sets[i].src_const, src))
7618	      sets[i].src_elt = insert (sets[i].src_const, classp,
7619					sets[i].src_const_hash, mode);
7620	  }
7621      }
7622    else if (sets[i].src_elt == 0)
7623      /* If we did not insert the source into the hash table (e.g., it was
7624	 volatile), note the equivalence class for the REG_EQUAL value, if any,
7625	 so that the destination goes into that class.  */
7626      sets[i].src_elt = src_eqv_elt;
7627
7628  invalidate_from_clobbers (x);
7629
7630  /* Some registers are invalidated by subroutine calls.  Memory is
7631     invalidated by non-constant calls.  */
7632
7633  if (GET_CODE (insn) == CALL_INSN)
7634    {
7635      if (! CONST_CALL_P (insn))
7636	invalidate_memory ();
7637      invalidate_for_call ();
7638    }
7639
7640  /* Now invalidate everything set by this instruction.
7641     If a SUBREG or other funny destination is being set,
7642     sets[i].rtl is still nonzero, so here we invalidate the reg
7643     a part of which is being set.  */
7644
7645  for (i = 0; i < n_sets; i++)
7646    if (sets[i].rtl)
7647      {
7648	/* We can't use the inner dest, because the mode associated with
7649	   a ZERO_EXTRACT is significant.  */
7650	register rtx dest = SET_DEST (sets[i].rtl);
7651
7652	/* Needed for registers to remove the register from its
7653	   previous quantity's chain.
7654	   Needed for memory if this is a nonvarying address, unless
7655	   we have just done an invalidate_memory that covers even those.  */
7656	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
7657	    || GET_CODE (dest) == MEM)
7658	  invalidate (dest, VOIDmode);
7659	else if (GET_CODE (dest) == STRICT_LOW_PART
7660		 || GET_CODE (dest) == ZERO_EXTRACT)
7661	  invalidate (XEXP (dest, 0), GET_MODE (dest));
7662      }
7663
7664  /* A volatile ASM invalidates everything.  */
7665  if (GET_CODE (insn) == INSN
7666      && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
7667      && MEM_VOLATILE_P (PATTERN (insn)))
7668    flush_hash_table ();
7669
7670  /* Make sure registers mentioned in destinations
7671     are safe for use in an expression to be inserted.
7672     This removes from the hash table
7673     any invalid entry that refers to one of these registers.
7674
7675     We don't care about the return value from mention_regs because
7676     we are going to hash the SET_DEST values unconditionally.  */
7677
7678  for (i = 0; i < n_sets; i++)
7679    {
7680      if (sets[i].rtl)
7681	{
7682	  rtx x = SET_DEST (sets[i].rtl);
7683
7684	  if (GET_CODE (x) != REG)
7685	    mention_regs (x);
7686	  else
7687	    {
7688	      /* We used to rely on all references to a register becoming
7689		 inaccessible when a register changes to a new quantity,
7690		 since that changes the hash code.  However, that is not
7691		 safe, since after NBUCKETS new quantities we get a
7692		 hash 'collision' of a register with its own invalid
7693		 entries.  And since SUBREGs have been changed not to
7694		 change their hash code with the hash code of the register,
7695		 it wouldn't work any longer at all.  So we have to check
7696		 for any invalid references lying around now.
7697		 This code is similar to the REG case in mention_regs,
7698		 but it knows that reg_tick has been incremented, and
7699		 it leaves reg_in_table as -1 .  */
7700	      register int regno = REGNO (x);
7701	      register int endregno
7702		= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
7703			   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
7704	      int i;
7705
7706	      for (i = regno; i < endregno; i++)
7707		{
7708		  if (REG_IN_TABLE (i) >= 0)
7709		    {
7710		      remove_invalid_refs (i);
7711		      REG_IN_TABLE (i) = -1;
7712		    }
7713		}
7714	    }
7715	}
7716    }
7717
7718  /* We may have just removed some of the src_elt's from the hash table.
7719     So replace each one with the current head of the same class.  */
7720
7721  for (i = 0; i < n_sets; i++)
7722    if (sets[i].rtl)
7723      {
7724	if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
7725	  /* If elt was removed, find current head of same class,
7726	     or 0 if nothing remains of that class.  */
7727	  {
7728	    register struct table_elt *elt = sets[i].src_elt;
7729
7730	    while (elt && elt->prev_same_value)
7731	      elt = elt->prev_same_value;
7732
7733	    while (elt && elt->first_same_value == 0)
7734	      elt = elt->next_same_value;
7735	    sets[i].src_elt = elt ? elt->first_same_value : 0;
7736	  }
7737      }
7738
7739  /* Now insert the destinations into their equivalence classes.  */
7740
7741  for (i = 0; i < n_sets; i++)
7742    if (sets[i].rtl)
7743      {
7744	register rtx dest = SET_DEST (sets[i].rtl);
7745	rtx inner_dest = sets[i].inner_dest;
7746	register struct table_elt *elt;
7747
7748	/* Don't record value if we are not supposed to risk allocating
7749	   floating-point values in registers that might be wider than
7750	   memory.  */
7751	if ((flag_float_store
7752	     && GET_CODE (dest) == MEM
7753	     && FLOAT_MODE_P (GET_MODE (dest)))
7754	    /* Don't record BLKmode values, because we don't know the
7755	       size of it, and can't be sure that other BLKmode values
7756	       have the same or smaller size.  */
7757	    || GET_MODE (dest) == BLKmode
7758	    /* Don't record values of destinations set inside a libcall block
7759	       since we might delete the libcall.  Things should have been set
7760	       up so we won't want to reuse such a value, but we play it safe
7761	       here.  */
7762	    || libcall_insn
7763	    /* If we didn't put a REG_EQUAL value or a source into the hash
7764	       table, there is no point is recording DEST.  */
7765	    || sets[i].src_elt == 0
7766	    /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
7767	       or SIGN_EXTEND, don't record DEST since it can cause
7768	       some tracking to be wrong.
7769
7770	       ??? Think about this more later.  */
7771	    || (GET_CODE (dest) == SUBREG
7772		&& (GET_MODE_SIZE (GET_MODE (dest))
7773		    > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7774		&& (GET_CODE (sets[i].src) == SIGN_EXTEND
7775		    || GET_CODE (sets[i].src) == ZERO_EXTEND)))
7776	  continue;
7777
7778	/* STRICT_LOW_PART isn't part of the value BEING set,
7779	   and neither is the SUBREG inside it.
7780	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
7781	if (GET_CODE (dest) == STRICT_LOW_PART)
7782	  dest = SUBREG_REG (XEXP (dest, 0));
7783
7784	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
7785	  /* Registers must also be inserted into chains for quantities.  */
7786	  if (insert_regs (dest, sets[i].src_elt, 1))
7787	    {
7788	      /* If `insert_regs' changes something, the hash code must be
7789		 recalculated.  */
7790	      rehash_using_reg (dest);
7791	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
7792	    }
7793
7794	if (GET_CODE (inner_dest) == MEM
7795	    && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
7796	  /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
7797	     that (MEM (ADDRESSOF (X))) is equivalent to Y.
7798	     Consider the case in which the address of the MEM is
7799	     passed to a function, which alters the MEM.  Then, if we
7800	     later use Y instead of the MEM we'll miss the update.  */
7801	  elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
7802	else
7803	  elt = insert (dest, sets[i].src_elt,
7804			sets[i].dest_hash, GET_MODE (dest));
7805
7806	elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
7807			  && (! RTX_UNCHANGING_P (sets[i].inner_dest)
7808			      || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
7809							  0))));
7810
7811	if (elt->in_memory)
7812	  {
7813	    /* This implicitly assumes a whole struct
7814	       need not have MEM_IN_STRUCT_P.
7815	       But a whole struct is *supposed* to have MEM_IN_STRUCT_P.  */
7816	    elt->in_struct = (MEM_IN_STRUCT_P (sets[i].inner_dest)
7817			      || sets[i].inner_dest != SET_DEST (sets[i].rtl));
7818	  }
7819
7820	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
7821	   narrower than M2, and both M1 and M2 are the same number of words,
7822	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
7823	   make that equivalence as well.
7824
7825	   However, BAR may have equivalences for which gen_lowpart_if_possible
7826	   will produce a simpler value than gen_lowpart_if_possible applied to
7827	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
7828	   BAR's equivalences.  If we don't get a simplified form, make
7829	   the SUBREG.  It will not be used in an equivalence, but will
7830	   cause two similar assignments to be detected.
7831
7832	   Note the loop below will find SUBREG_REG (DEST) since we have
7833	   already entered SRC and DEST of the SET in the table.  */
7834
7835	if (GET_CODE (dest) == SUBREG
7836	    && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
7837		 / UNITS_PER_WORD)
7838		== (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
7839	    && (GET_MODE_SIZE (GET_MODE (dest))
7840		>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
7841	    && sets[i].src_elt != 0)
7842	  {
7843	    enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
7844	    struct table_elt *elt, *classp = 0;
7845
7846	    for (elt = sets[i].src_elt->first_same_value; elt;
7847		 elt = elt->next_same_value)
7848	      {
7849		rtx new_src = 0;
7850		unsigned src_hash;
7851		struct table_elt *src_elt;
7852
7853		/* Ignore invalid entries.  */
7854		if (GET_CODE (elt->exp) != REG
7855		    && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
7856		  continue;
7857
7858		new_src = gen_lowpart_if_possible (new_mode, elt->exp);
7859		if (new_src == 0)
7860		  new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
7861
7862		src_hash = HASH (new_src, new_mode);
7863		src_elt = lookup (new_src, src_hash, new_mode);
7864
7865		/* Put the new source in the hash table is if isn't
7866		   already.  */
7867		if (src_elt == 0)
7868		  {
7869		    if (insert_regs (new_src, classp, 0))
7870		      {
7871			rehash_using_reg (new_src);
7872			src_hash = HASH (new_src, new_mode);
7873		      }
7874		    src_elt = insert (new_src, classp, src_hash, new_mode);
7875		    src_elt->in_memory = elt->in_memory;
7876		    src_elt->in_struct = elt->in_struct;
7877		  }
7878		else if (classp && classp != src_elt->first_same_value)
7879		  /* Show that two things that we've seen before are
7880		     actually the same.  */
7881		  merge_equiv_classes (src_elt, classp);
7882
7883		classp = src_elt->first_same_value;
7884		/* Ignore invalid entries.  */
7885		while (classp
7886		       && GET_CODE (classp->exp) != REG
7887		       && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
7888		  classp = classp->next_same_value;
7889	      }
7890	  }
7891      }
7892
7893  /* Special handling for (set REG0 REG1)
7894     where REG0 is the "cheapest", cheaper than REG1.
7895     After cse, REG1 will probably not be used in the sequel,
7896     so (if easily done) change this insn to (set REG1 REG0) and
7897     replace REG1 with REG0 in the previous insn that computed their value.
7898     Then REG1 will become a dead store and won't cloud the situation
7899     for later optimizations.
7900
7901     Do not make this change if REG1 is a hard register, because it will
7902     then be used in the sequel and we may be changing a two-operand insn
7903     into a three-operand insn.
7904
7905     Also do not do this if we are operating on a copy of INSN.
7906
7907     Also don't do this if INSN ends a libcall; this would cause an unrelated
7908     register to be set in the middle of a libcall, and we then get bad code
7909     if the libcall is deleted.  */
7910
7911  if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
7912      && NEXT_INSN (PREV_INSN (insn)) == insn
7913      && GET_CODE (SET_SRC (sets[0].rtl)) == REG
7914      && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
7915      && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl)))
7916      && (qty_first_reg[REG_QTY (REGNO (SET_SRC (sets[0].rtl)))]
7917	  == REGNO (SET_DEST (sets[0].rtl)))
7918      && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
7919    {
7920      rtx prev = PREV_INSN (insn);
7921      while (prev && GET_CODE (prev) == NOTE)
7922	prev = PREV_INSN (prev);
7923
7924      if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
7925	  && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
7926	{
7927	  rtx dest = SET_DEST (sets[0].rtl);
7928	  rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
7929
7930	  validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
7931	  validate_change (insn, & SET_DEST (sets[0].rtl),
7932			   SET_SRC (sets[0].rtl), 1);
7933	  validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
7934	  apply_change_group ();
7935
7936	  /* If REG1 was equivalent to a constant, REG0 is not.  */
7937	  if (note)
7938	    PUT_REG_NOTE_KIND (note, REG_EQUAL);
7939
7940	  /* If there was a REG_WAS_0 note on PREV, remove it.  Move
7941	     any REG_WAS_0 note on INSN to PREV.  */
7942	  note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
7943	  if (note)
7944	    remove_note (prev, note);
7945
7946	  note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
7947	  if (note)
7948	    {
7949	      remove_note (insn, note);
7950	      XEXP (note, 1) = REG_NOTES (prev);
7951	      REG_NOTES (prev) = note;
7952	    }
7953
7954	  /* If INSN has a REG_EQUAL note, and this note mentions REG0,
7955	     then we must delete it, because the value in REG0 has changed.  */
7956	  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7957	  if (note && reg_mentioned_p (dest, XEXP (note, 0)))
7958	    remove_note (insn, note);
7959	}
7960    }
7961
7962  /* If this is a conditional jump insn, record any known equivalences due to
7963     the condition being tested.  */
7964
7965  last_jump_equiv_class = 0;
7966  if (GET_CODE (insn) == JUMP_INSN
7967      && n_sets == 1 && GET_CODE (x) == SET
7968      && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
7969    record_jump_equiv (insn, 0);
7970
7971#ifdef HAVE_cc0
7972  /* If the previous insn set CC0 and this insn no longer references CC0,
7973     delete the previous insn.  Here we use the fact that nothing expects CC0
7974     to be valid over an insn, which is true until the final pass.  */
7975  if (prev_insn && GET_CODE (prev_insn) == INSN
7976      && (tem = single_set (prev_insn)) != 0
7977      && SET_DEST (tem) == cc0_rtx
7978      && ! reg_mentioned_p (cc0_rtx, x))
7979    {
7980      PUT_CODE (prev_insn, NOTE);
7981      NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
7982      NOTE_SOURCE_FILE (prev_insn) = 0;
7983    }
7984
7985  prev_insn_cc0 = this_insn_cc0;
7986  prev_insn_cc0_mode = this_insn_cc0_mode;
7987#endif
7988
7989  prev_insn = insn;
7990}
7991
7992/* Remove from the hash table all expressions that reference memory.  */
7993static void
7994invalidate_memory ()
7995{
7996  register int i;
7997  register struct table_elt *p, *next;
7998
7999  for (i = 0; i < NBUCKETS; i++)
8000    for (p = table[i]; p; p = next)
8001      {
8002	next = p->next_same_hash;
8003	if (p->in_memory)
8004	  remove_from_table (p, i);
8005      }
8006}
8007
8008/* XXX ??? The name of this function bears little resemblance to
8009   what this function actually does.  FIXME.  */
8010static int
8011note_mem_written (addr)
8012     register rtx addr;
8013{
8014  /* Pushing or popping the stack invalidates just the stack pointer.  */
8015  if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
8016       || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
8017      && GET_CODE (XEXP (addr, 0)) == REG
8018      && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
8019    {
8020      if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
8021	REG_TICK (STACK_POINTER_REGNUM)++;
8022
8023      /* This should be *very* rare.  */
8024      if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
8025	invalidate (stack_pointer_rtx, VOIDmode);
8026      return 1;
8027    }
8028  return 0;
8029}
8030
8031/* Perform invalidation on the basis of everything about an insn
8032   except for invalidating the actual places that are SET in it.
8033   This includes the places CLOBBERed, and anything that might
8034   alias with something that is SET or CLOBBERed.
8035
8036   X is the pattern of the insn.  */
8037
8038static void
8039invalidate_from_clobbers (x)
8040     rtx x;
8041{
8042  if (GET_CODE (x) == CLOBBER)
8043    {
8044      rtx ref = XEXP (x, 0);
8045      if (ref)
8046	{
8047	  if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
8048	      || GET_CODE (ref) == MEM)
8049	    invalidate (ref, VOIDmode);
8050	  else if (GET_CODE (ref) == STRICT_LOW_PART
8051		   || GET_CODE (ref) == ZERO_EXTRACT)
8052	    invalidate (XEXP (ref, 0), GET_MODE (ref));
8053	}
8054    }
8055  else if (GET_CODE (x) == PARALLEL)
8056    {
8057      register int i;
8058      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
8059	{
8060	  register rtx y = XVECEXP (x, 0, i);
8061	  if (GET_CODE (y) == CLOBBER)
8062	    {
8063	      rtx ref = XEXP (y, 0);
8064	      if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
8065		  || GET_CODE (ref) == MEM)
8066		invalidate (ref, VOIDmode);
8067	      else if (GET_CODE (ref) == STRICT_LOW_PART
8068		       || GET_CODE (ref) == ZERO_EXTRACT)
8069		invalidate (XEXP (ref, 0), GET_MODE (ref));
8070	    }
8071	}
8072    }
8073}
8074
8075/* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
8076   and replace any registers in them with either an equivalent constant
8077   or the canonical form of the register.  If we are inside an address,
8078   only do this if the address remains valid.
8079
8080   OBJECT is 0 except when within a MEM in which case it is the MEM.
8081
8082   Return the replacement for X.  */
8083
8084static rtx
8085cse_process_notes (x, object)
8086     rtx x;
8087     rtx object;
8088{
8089  enum rtx_code code = GET_CODE (x);
8090  char *fmt = GET_RTX_FORMAT (code);
8091  int i;
8092
8093  switch (code)
8094    {
8095    case CONST_INT:
8096    case CONST:
8097    case SYMBOL_REF:
8098    case LABEL_REF:
8099    case CONST_DOUBLE:
8100    case PC:
8101    case CC0:
8102    case LO_SUM:
8103      return x;
8104
8105    case MEM:
8106      XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
8107      return x;
8108
8109    case EXPR_LIST:
8110    case INSN_LIST:
8111      if (REG_NOTE_KIND (x) == REG_EQUAL)
8112	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
8113      if (XEXP (x, 1))
8114	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
8115      return x;
8116
8117    case SIGN_EXTEND:
8118    case ZERO_EXTEND:
8119    case SUBREG:
8120      {
8121	rtx new = cse_process_notes (XEXP (x, 0), object);
8122	/* We don't substitute VOIDmode constants into these rtx,
8123	   since they would impede folding.  */
8124	if (GET_MODE (new) != VOIDmode)
8125	  validate_change (object, &XEXP (x, 0), new, 0);
8126	return x;
8127      }
8128
8129    case REG:
8130      i = REG_QTY (REGNO (x));
8131
8132      /* Return a constant or a constant register.  */
8133      if (REGNO_QTY_VALID_P (REGNO (x))
8134	  && qty_const[i] != 0
8135	  && (CONSTANT_P (qty_const[i])
8136	      || GET_CODE (qty_const[i]) == REG))
8137	{
8138	  rtx new = gen_lowpart_if_possible (GET_MODE (x), qty_const[i]);
8139	  if (new)
8140	    return new;
8141	}
8142
8143      /* Otherwise, canonicalize this register.  */
8144      return canon_reg (x, NULL_RTX);
8145
8146    default:
8147      break;
8148    }
8149
8150  for (i = 0; i < GET_RTX_LENGTH (code); i++)
8151    if (fmt[i] == 'e')
8152      validate_change (object, &XEXP (x, i),
8153		       cse_process_notes (XEXP (x, i), object), 0);
8154
8155  return x;
8156}
8157
8158/* Find common subexpressions between the end test of a loop and the beginning
8159   of the loop.  LOOP_START is the CODE_LABEL at the start of a loop.
8160
8161   Often we have a loop where an expression in the exit test is used
8162   in the body of the loop.  For example "while (*p) *q++ = *p++;".
8163   Because of the way we duplicate the loop exit test in front of the loop,
8164   however, we don't detect that common subexpression.  This will be caught
8165   when global cse is implemented, but this is a quite common case.
8166
8167   This function handles the most common cases of these common expressions.
8168   It is called after we have processed the basic block ending with the
8169   NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
8170   jumps to a label used only once.  */
8171
8172static void
8173cse_around_loop (loop_start)
8174     rtx loop_start;
8175{
8176  rtx insn;
8177  int i;
8178  struct table_elt *p;
8179
8180  /* If the jump at the end of the loop doesn't go to the start, we don't
8181     do anything.  */
8182  for (insn = PREV_INSN (loop_start);
8183       insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
8184       insn = PREV_INSN (insn))
8185    ;
8186
8187  if (insn == 0
8188      || GET_CODE (insn) != NOTE
8189      || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
8190    return;
8191
8192  /* If the last insn of the loop (the end test) was an NE comparison,
8193     we will interpret it as an EQ comparison, since we fell through
8194     the loop.  Any equivalences resulting from that comparison are
8195     therefore not valid and must be invalidated.  */
8196  if (last_jump_equiv_class)
8197    for (p = last_jump_equiv_class->first_same_value; p;
8198	 p = p->next_same_value)
8199      {
8200        if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
8201	    || (GET_CODE (p->exp) == SUBREG
8202	        && GET_CODE (SUBREG_REG (p->exp)) == REG))
8203	  invalidate (p->exp, VOIDmode);
8204        else if (GET_CODE (p->exp) == STRICT_LOW_PART
8205	         || GET_CODE (p->exp) == ZERO_EXTRACT)
8206	  invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
8207      }
8208
8209  /* Process insns starting after LOOP_START until we hit a CALL_INSN or
8210     a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
8211
8212     The only thing we do with SET_DEST is invalidate entries, so we
8213     can safely process each SET in order.  It is slightly less efficient
8214     to do so, but we only want to handle the most common cases.
8215
8216     The gen_move_insn call in cse_set_around_loop may create new pseudos.
8217     These pseudos won't have valid entries in any of the tables indexed
8218     by register number, such as reg_qty.  We avoid out-of-range array
8219     accesses by not processing any instructions created after cse started.  */
8220
8221  for (insn = NEXT_INSN (loop_start);
8222       GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
8223       && INSN_UID (insn) < max_insn_uid
8224       && ! (GET_CODE (insn) == NOTE
8225	     && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
8226       insn = NEXT_INSN (insn))
8227    {
8228      if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8229	  && (GET_CODE (PATTERN (insn)) == SET
8230	      || GET_CODE (PATTERN (insn)) == CLOBBER))
8231	cse_set_around_loop (PATTERN (insn), insn, loop_start);
8232      else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8233	       && GET_CODE (PATTERN (insn)) == PARALLEL)
8234	for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
8235	  if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
8236	      || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
8237	    cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
8238				 loop_start);
8239    }
8240}
8241
8242/* Process one SET of an insn that was skipped.  We ignore CLOBBERs
8243   since they are done elsewhere.  This function is called via note_stores.  */
8244
8245static void
8246invalidate_skipped_set (dest, set)
8247     rtx set;
8248     rtx dest;
8249{
8250  enum rtx_code code = GET_CODE (dest);
8251
8252  if (code == MEM
8253      && ! note_mem_written (dest)	/* If this is not a stack push ... */
8254      /* There are times when an address can appear varying and be a PLUS
8255	 during this scan when it would be a fixed address were we to know
8256	 the proper equivalences.  So invalidate all memory if there is
8257	 a BLKmode or nonscalar memory reference or a reference to a
8258	 variable address.  */
8259      && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
8260	  || cse_rtx_varies_p (XEXP (dest, 0))))
8261    {
8262      invalidate_memory ();
8263      return;
8264    }
8265
8266  if (GET_CODE (set) == CLOBBER
8267#ifdef HAVE_cc0
8268      || dest == cc0_rtx
8269#endif
8270      || dest == pc_rtx)
8271    return;
8272
8273  if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
8274    invalidate (XEXP (dest, 0), GET_MODE (dest));
8275  else if (code == REG || code == SUBREG || code == MEM)
8276    invalidate (dest, VOIDmode);
8277}
8278
8279/* Invalidate all insns from START up to the end of the function or the
8280   next label.  This called when we wish to CSE around a block that is
8281   conditionally executed.  */
8282
8283static void
8284invalidate_skipped_block (start)
8285     rtx start;
8286{
8287  rtx insn;
8288
8289  for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
8290       insn = NEXT_INSN (insn))
8291    {
8292      if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8293	continue;
8294
8295      if (GET_CODE (insn) == CALL_INSN)
8296	{
8297	  if (! CONST_CALL_P (insn))
8298	    invalidate_memory ();
8299	  invalidate_for_call ();
8300	}
8301
8302      invalidate_from_clobbers (PATTERN (insn));
8303      note_stores (PATTERN (insn), invalidate_skipped_set);
8304    }
8305}
8306
8307/* Used for communication between the following two routines; contains a
8308   value to be checked for modification.  */
8309
8310static rtx cse_check_loop_start_value;
8311
8312/* If modifying X will modify the value in CSE_CHECK_LOOP_START_VALUE,
8313   indicate that fact by setting CSE_CHECK_LOOP_START_VALUE to 0.  */
8314
8315static void
8316cse_check_loop_start (x, set)
8317     rtx x;
8318     rtx set ATTRIBUTE_UNUSED;
8319{
8320  if (cse_check_loop_start_value == 0
8321      || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
8322    return;
8323
8324  if ((GET_CODE (x) == MEM && GET_CODE (cse_check_loop_start_value) == MEM)
8325      || reg_overlap_mentioned_p (x, cse_check_loop_start_value))
8326    cse_check_loop_start_value = 0;
8327}
8328
8329/* X is a SET or CLOBBER contained in INSN that was found near the start of
8330   a loop that starts with the label at LOOP_START.
8331
8332   If X is a SET, we see if its SET_SRC is currently in our hash table.
8333   If so, we see if it has a value equal to some register used only in the
8334   loop exit code (as marked by jump.c).
8335
8336   If those two conditions are true, we search backwards from the start of
8337   the loop to see if that same value was loaded into a register that still
8338   retains its value at the start of the loop.
8339
8340   If so, we insert an insn after the load to copy the destination of that
8341   load into the equivalent register and (try to) replace our SET_SRC with that
8342   register.
8343
8344   In any event, we invalidate whatever this SET or CLOBBER modifies.  */
8345
8346static void
8347cse_set_around_loop (x, insn, loop_start)
8348     rtx x;
8349     rtx insn;
8350     rtx loop_start;
8351{
8352  struct table_elt *src_elt;
8353
8354  /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
8355     are setting PC or CC0 or whose SET_SRC is already a register.  */
8356  if (GET_CODE (x) == SET
8357      && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
8358      && GET_CODE (SET_SRC (x)) != REG)
8359    {
8360      src_elt = lookup (SET_SRC (x),
8361			HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
8362			GET_MODE (SET_DEST (x)));
8363
8364      if (src_elt)
8365	for (src_elt = src_elt->first_same_value; src_elt;
8366	     src_elt = src_elt->next_same_value)
8367	  if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
8368	      && COST (src_elt->exp) < COST (SET_SRC (x)))
8369	    {
8370	      rtx p, set;
8371
8372	      /* Look for an insn in front of LOOP_START that sets
8373		 something in the desired mode to SET_SRC (x) before we hit
8374		 a label or CALL_INSN.  */
8375
8376	      for (p = prev_nonnote_insn (loop_start);
8377		   p && GET_CODE (p) != CALL_INSN
8378		   && GET_CODE (p) != CODE_LABEL;
8379		   p = prev_nonnote_insn  (p))
8380		if ((set = single_set (p)) != 0
8381		    && GET_CODE (SET_DEST (set)) == REG
8382		    && GET_MODE (SET_DEST (set)) == src_elt->mode
8383		    && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
8384		  {
8385		    /* We now have to ensure that nothing between P
8386		       and LOOP_START modified anything referenced in
8387		       SET_SRC (x).  We know that nothing within the loop
8388		       can modify it, or we would have invalidated it in
8389		       the hash table.  */
8390		    rtx q;
8391
8392		    cse_check_loop_start_value = SET_SRC (x);
8393		    for (q = p; q != loop_start; q = NEXT_INSN (q))
8394		      if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
8395			note_stores (PATTERN (q), cse_check_loop_start);
8396
8397		    /* If nothing was changed and we can replace our
8398		       SET_SRC, add an insn after P to copy its destination
8399		       to what we will be replacing SET_SRC with.  */
8400		    if (cse_check_loop_start_value
8401			&& validate_change (insn, &SET_SRC (x),
8402					    src_elt->exp, 0))
8403		      {
8404			/* If this creates new pseudos, this is unsafe,
8405			   because the regno of new pseudo is unsuitable
8406			   to index into reg_qty when cse_insn processes
8407			   the new insn.  Therefore, if a new pseudo was
8408			   created, discard this optimization.  */
8409			int nregs = max_reg_num ();
8410			rtx move
8411			  = gen_move_insn (src_elt->exp, SET_DEST (set));
8412			if (nregs != max_reg_num ())
8413			  {
8414			    if (! validate_change (insn, &SET_SRC (x),
8415						   SET_SRC (set), 0))
8416			      abort ();
8417			  }
8418			else
8419			  emit_insn_after (move, p);
8420		      }
8421		    break;
8422		  }
8423	    }
8424    }
8425
8426  /* Now invalidate anything modified by X.  */
8427  note_mem_written (SET_DEST (x));
8428
8429  /* See comment on similar code in cse_insn for explanation of these tests.  */
8430  if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
8431      || GET_CODE (SET_DEST (x)) == MEM)
8432    invalidate (SET_DEST (x), VOIDmode);
8433  else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
8434	   || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
8435    invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
8436}
8437
8438/* Find the end of INSN's basic block and return its range,
8439   the total number of SETs in all the insns of the block, the last insn of the
8440   block, and the branch path.
8441
8442   The branch path indicates which branches should be followed.  If a non-zero
8443   path size is specified, the block should be rescanned and a different set
8444   of branches will be taken.  The branch path is only used if
8445   FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
8446
8447   DATA is a pointer to a struct cse_basic_block_data, defined below, that is
8448   used to describe the block.  It is filled in with the information about
8449   the current block.  The incoming structure's branch path, if any, is used
8450   to construct the output branch path.  */
8451
8452void
8453cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
8454     rtx insn;
8455     struct cse_basic_block_data *data;
8456     int follow_jumps;
8457     int after_loop;
8458     int skip_blocks;
8459{
8460  rtx p = insn, q;
8461  int nsets = 0;
8462  int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
8463  rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
8464  int path_size = data->path_size;
8465  int path_entry = 0;
8466  int i;
8467
8468  /* Update the previous branch path, if any.  If the last branch was
8469     previously TAKEN, mark it NOT_TAKEN.  If it was previously NOT_TAKEN,
8470     shorten the path by one and look at the previous branch.  We know that
8471     at least one branch must have been taken if PATH_SIZE is non-zero.  */
8472  while (path_size > 0)
8473    {
8474      if (data->path[path_size - 1].status != NOT_TAKEN)
8475	{
8476	  data->path[path_size - 1].status = NOT_TAKEN;
8477	  break;
8478	}
8479      else
8480	path_size--;
8481    }
8482
8483  /* Scan to end of this basic block.  */
8484  while (p && GET_CODE (p) != CODE_LABEL)
8485    {
8486      /* Don't cse out the end of a loop.  This makes a difference
8487	 only for the unusual loops that always execute at least once;
8488	 all other loops have labels there so we will stop in any case.
8489	 Cse'ing out the end of the loop is dangerous because it
8490	 might cause an invariant expression inside the loop
8491	 to be reused after the end of the loop.  This would make it
8492	 hard to move the expression out of the loop in loop.c,
8493	 especially if it is one of several equivalent expressions
8494	 and loop.c would like to eliminate it.
8495
8496	 If we are running after loop.c has finished, we can ignore
8497	 the NOTE_INSN_LOOP_END.  */
8498
8499      if (! after_loop && GET_CODE (p) == NOTE
8500	  && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
8501	break;
8502
8503      /* Don't cse over a call to setjmp; on some machines (eg vax)
8504	 the regs restored by the longjmp come from
8505	 a later time than the setjmp.  */
8506      if (GET_CODE (p) == NOTE
8507	  && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
8508	break;
8509
8510      /* A PARALLEL can have lots of SETs in it,
8511	 especially if it is really an ASM_OPERANDS.  */
8512      if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
8513	  && GET_CODE (PATTERN (p)) == PARALLEL)
8514	nsets += XVECLEN (PATTERN (p), 0);
8515      else if (GET_CODE (p) != NOTE)
8516	nsets += 1;
8517
8518      /* Ignore insns made by CSE; they cannot affect the boundaries of
8519	 the basic block.  */
8520
8521      if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
8522	high_cuid = INSN_CUID (p);
8523      if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
8524	low_cuid = INSN_CUID (p);
8525
8526      /* See if this insn is in our branch path.  If it is and we are to
8527	 take it, do so.  */
8528      if (path_entry < path_size && data->path[path_entry].branch == p)
8529	{
8530	  if (data->path[path_entry].status != NOT_TAKEN)
8531	    p = JUMP_LABEL (p);
8532
8533	  /* Point to next entry in path, if any.  */
8534	  path_entry++;
8535	}
8536
8537      /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
8538	 was specified, we haven't reached our maximum path length, there are
8539	 insns following the target of the jump, this is the only use of the
8540	 jump label, and the target label is preceded by a BARRIER.
8541
8542	 Alternatively, we can follow the jump if it branches around a
8543	 block of code and there are no other branches into the block.
8544	 In this case invalidate_skipped_block will be called to invalidate any
8545	 registers set in the block when following the jump.  */
8546
8547      else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
8548	       && GET_CODE (p) == JUMP_INSN
8549      	       && GET_CODE (PATTERN (p)) == SET
8550	       && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
8551	       && JUMP_LABEL (p) != 0
8552	       && LABEL_NUSES (JUMP_LABEL (p)) == 1
8553	       && NEXT_INSN (JUMP_LABEL (p)) != 0)
8554	{
8555	  for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
8556	    if ((GET_CODE (q) != NOTE
8557	         || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
8558	         || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
8559	        && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
8560	      break;
8561
8562	  /* If we ran into a BARRIER, this code is an extension of the
8563	     basic block when the branch is taken.  */
8564	  if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
8565	    {
8566	      /* Don't allow ourself to keep walking around an
8567		 always-executed loop.  */
8568	      if (next_real_insn (q) == next)
8569		{
8570		  p = NEXT_INSN (p);
8571		  continue;
8572		}
8573
8574	      /* Similarly, don't put a branch in our path more than once.  */
8575	      for (i = 0; i < path_entry; i++)
8576		if (data->path[i].branch == p)
8577		  break;
8578
8579	      if (i != path_entry)
8580		break;
8581
8582	      data->path[path_entry].branch = p;
8583	      data->path[path_entry++].status = TAKEN;
8584
8585	      /* This branch now ends our path.  It was possible that we
8586		 didn't see this branch the last time around (when the
8587		 insn in front of the target was a JUMP_INSN that was
8588		 turned into a no-op).  */
8589	      path_size = path_entry;
8590
8591	      p = JUMP_LABEL (p);
8592	      /* Mark block so we won't scan it again later.  */
8593	      PUT_MODE (NEXT_INSN (p), QImode);
8594	    }
8595	  /* Detect a branch around a block of code.  */
8596	  else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
8597	    {
8598	      register rtx tmp;
8599
8600	      if (next_real_insn (q) == next)
8601		{
8602		  p = NEXT_INSN (p);
8603		  continue;
8604		}
8605
8606	      for (i = 0; i < path_entry; i++)
8607		if (data->path[i].branch == p)
8608		  break;
8609
8610	      if (i != path_entry)
8611		break;
8612
8613	      /* This is no_labels_between_p (p, q) with an added check for
8614		 reaching the end of a function (in case Q precedes P).  */
8615	      for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
8616		if (GET_CODE (tmp) == CODE_LABEL)
8617		  break;
8618
8619	      if (tmp == q)
8620		{
8621		  data->path[path_entry].branch = p;
8622		  data->path[path_entry++].status = AROUND;
8623
8624		  path_size = path_entry;
8625
8626		  p = JUMP_LABEL (p);
8627		  /* Mark block so we won't scan it again later.  */
8628		  PUT_MODE (NEXT_INSN (p), QImode);
8629		}
8630	    }
8631	}
8632      p = NEXT_INSN (p);
8633    }
8634
8635  data->low_cuid = low_cuid;
8636  data->high_cuid = high_cuid;
8637  data->nsets = nsets;
8638  data->last = p;
8639
8640  /* If all jumps in the path are not taken, set our path length to zero
8641     so a rescan won't be done.  */
8642  for (i = path_size - 1; i >= 0; i--)
8643    if (data->path[i].status != NOT_TAKEN)
8644      break;
8645
8646  if (i == -1)
8647    data->path_size = 0;
8648  else
8649    data->path_size = path_size;
8650
8651  /* End the current branch path.  */
8652  data->path[path_size].branch = 0;
8653}
8654
8655/* Perform cse on the instructions of a function.
8656   F is the first instruction.
8657   NREGS is one plus the highest pseudo-reg number used in the instruction.
8658
8659   AFTER_LOOP is 1 if this is the cse call done after loop optimization
8660   (only if -frerun-cse-after-loop).
8661
8662   Returns 1 if jump_optimize should be redone due to simplifications
8663   in conditional jump instructions.  */
8664
8665int
8666cse_main (f, nregs, after_loop, file)
8667     rtx f;
8668     int nregs;
8669     int after_loop;
8670     FILE *file;
8671{
8672  struct cse_basic_block_data val;
8673  register rtx insn = f;
8674  register int i;
8675
8676  cse_jumps_altered = 0;
8677  recorded_label_ref = 0;
8678  constant_pool_entries_cost = 0;
8679  val.path_size = 0;
8680
8681  init_recog ();
8682  init_alias_analysis ();
8683
8684  max_reg = nregs;
8685
8686  max_insn_uid = get_max_uid ();
8687
8688  reg_next_eqv = (int *) alloca (nregs * sizeof (int));
8689  reg_prev_eqv = (int *) alloca (nregs * sizeof (int));
8690
8691#ifdef LOAD_EXTEND_OP
8692
8693  /* Allocate scratch rtl here.  cse_insn will fill in the memory reference
8694     and change the code and mode as appropriate.  */
8695  memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
8696#endif
8697
8698  /* Discard all the free elements of the previous function
8699     since they are allocated in the temporarily obstack.  */
8700  bzero ((char *) table, sizeof table);
8701  free_element_chain = 0;
8702  n_elements_made = 0;
8703
8704  /* Find the largest uid.  */
8705
8706  max_uid = get_max_uid ();
8707  uid_cuid = (int *) alloca ((max_uid + 1) * sizeof (int));
8708  bzero ((char *) uid_cuid, (max_uid + 1) * sizeof (int));
8709
8710  /* Compute the mapping from uids to cuids.
8711     CUIDs are numbers assigned to insns, like uids,
8712     except that cuids increase monotonically through the code.
8713     Don't assign cuids to line-number NOTEs, so that the distance in cuids
8714     between two insns is not affected by -g.  */
8715
8716  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
8717    {
8718      if (GET_CODE (insn) != NOTE
8719	  || NOTE_LINE_NUMBER (insn) < 0)
8720	INSN_CUID (insn) = ++i;
8721      else
8722	/* Give a line number note the same cuid as preceding insn.  */
8723	INSN_CUID (insn) = i;
8724    }
8725
8726  /* Initialize which registers are clobbered by calls.  */
8727
8728  CLEAR_HARD_REG_SET (regs_invalidated_by_call);
8729
8730  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8731    if ((call_used_regs[i]
8732	 /* Used to check !fixed_regs[i] here, but that isn't safe;
8733	    fixed regs are still call-clobbered, and sched can get
8734	    confused if they can "live across calls".
8735
8736	    The frame pointer is always preserved across calls.  The arg
8737	    pointer is if it is fixed.  The stack pointer usually is, unless
8738	    RETURN_POPS_ARGS, in which case an explicit CLOBBER
8739	    will be present.  If we are generating PIC code, the PIC offset
8740	    table register is preserved across calls.  */
8741
8742	 && i != STACK_POINTER_REGNUM
8743	 && i != FRAME_POINTER_REGNUM
8744#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
8745	 && i != HARD_FRAME_POINTER_REGNUM
8746#endif
8747#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
8748	 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
8749#endif
8750#if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
8751	 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
8752#endif
8753	 )
8754	|| global_regs[i])
8755      SET_HARD_REG_BIT (regs_invalidated_by_call, i);
8756
8757  /* Loop over basic blocks.
8758     Compute the maximum number of qty's needed for each basic block
8759     (which is 2 for each SET).  */
8760  insn = f;
8761  while (insn)
8762    {
8763      cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
8764			      flag_cse_skip_blocks);
8765
8766      /* If this basic block was already processed or has no sets, skip it.  */
8767      if (val.nsets == 0 || GET_MODE (insn) == QImode)
8768	{
8769	  PUT_MODE (insn, VOIDmode);
8770	  insn = (val.last ? NEXT_INSN (val.last) : 0);
8771	  val.path_size = 0;
8772	  continue;
8773	}
8774
8775      cse_basic_block_start = val.low_cuid;
8776      cse_basic_block_end = val.high_cuid;
8777      max_qty = val.nsets * 2;
8778
8779      if (file)
8780	fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
8781		 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
8782		 val.nsets);
8783
8784      /* Make MAX_QTY bigger to give us room to optimize
8785	 past the end of this basic block, if that should prove useful.  */
8786      if (max_qty < 500)
8787	max_qty = 500;
8788
8789      max_qty += max_reg;
8790
8791      /* If this basic block is being extended by following certain jumps,
8792         (see `cse_end_of_basic_block'), we reprocess the code from the start.
8793         Otherwise, we start after this basic block.  */
8794      if (val.path_size > 0)
8795        cse_basic_block (insn, val.last, val.path, 0);
8796      else
8797	{
8798	  int old_cse_jumps_altered = cse_jumps_altered;
8799	  rtx temp;
8800
8801	  /* When cse changes a conditional jump to an unconditional
8802	     jump, we want to reprocess the block, since it will give
8803	     us a new branch path to investigate.  */
8804	  cse_jumps_altered = 0;
8805	  temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
8806	  if (cse_jumps_altered == 0
8807	      || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
8808	    insn = temp;
8809
8810	  cse_jumps_altered |= old_cse_jumps_altered;
8811	}
8812
8813#ifdef USE_C_ALLOCA
8814      alloca (0);
8815#endif
8816    }
8817
8818  /* Tell refers_to_mem_p that qty_const info is not available.  */
8819  qty_const = 0;
8820
8821  if (max_elements_made < n_elements_made)
8822    max_elements_made = n_elements_made;
8823
8824  return cse_jumps_altered || recorded_label_ref;
8825}
8826
8827/* Process a single basic block.  FROM and TO and the limits of the basic
8828   block.  NEXT_BRANCH points to the branch path when following jumps or
8829   a null path when not following jumps.
8830
8831   AROUND_LOOP is non-zero if we are to try to cse around to the start of a
8832   loop.  This is true when we are being called for the last time on a
8833   block and this CSE pass is before loop.c.  */
8834
8835static rtx
8836cse_basic_block (from, to, next_branch, around_loop)
8837     register rtx from, to;
8838     struct branch_path *next_branch;
8839     int around_loop;
8840{
8841  register rtx insn;
8842  int to_usage = 0;
8843  rtx libcall_insn = NULL_RTX;
8844  int num_insns = 0;
8845
8846  /* Each of these arrays is undefined before max_reg, so only allocate
8847     the space actually needed and adjust the start below.  */
8848
8849  qty_first_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8850  qty_last_reg = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8851  qty_mode= (enum machine_mode *) alloca ((max_qty - max_reg) * sizeof (enum machine_mode));
8852  qty_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8853  qty_const_insn = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8854  qty_comparison_code
8855    = (enum rtx_code *) alloca ((max_qty - max_reg) * sizeof (enum rtx_code));
8856  qty_comparison_qty = (int *) alloca ((max_qty - max_reg) * sizeof (int));
8857  qty_comparison_const = (rtx *) alloca ((max_qty - max_reg) * sizeof (rtx));
8858
8859  qty_first_reg -= max_reg;
8860  qty_last_reg -= max_reg;
8861  qty_mode -= max_reg;
8862  qty_const -= max_reg;
8863  qty_const_insn -= max_reg;
8864  qty_comparison_code -= max_reg;
8865  qty_comparison_qty -= max_reg;
8866  qty_comparison_const -= max_reg;
8867
8868  new_basic_block ();
8869
8870  /* TO might be a label.  If so, protect it from being deleted.  */
8871  if (to != 0 && GET_CODE (to) == CODE_LABEL)
8872    ++LABEL_NUSES (to);
8873
8874  for (insn = from; insn != to; insn = NEXT_INSN (insn))
8875    {
8876      register enum rtx_code code = GET_CODE (insn);
8877
8878      /* If we have processed 1,000 insns, flush the hash table to
8879	 avoid extreme quadratic behavior.  We must not include NOTEs
8880	 in the count since there may be more or them when generating
8881	 debugging information.  If we clear the table at different
8882	 times, code generated with -g -O might be different than code
8883	 generated with -O but not -g.
8884
8885	 ??? This is a real kludge and needs to be done some other way.
8886	 Perhaps for 2.9.  */
8887      if (code != NOTE && num_insns++ > 1000)
8888	{
8889	  flush_hash_table ();
8890	  num_insns = 0;
8891	}
8892
8893      /* See if this is a branch that is part of the path.  If so, and it is
8894	 to be taken, do so.  */
8895      if (next_branch->branch == insn)
8896	{
8897	  enum taken status = next_branch++->status;
8898	  if (status != NOT_TAKEN)
8899	    {
8900	      if (status == TAKEN)
8901		record_jump_equiv (insn, 1);
8902	      else
8903		invalidate_skipped_block (NEXT_INSN (insn));
8904
8905	      /* Set the last insn as the jump insn; it doesn't affect cc0.
8906		 Then follow this branch.  */
8907#ifdef HAVE_cc0
8908	      prev_insn_cc0 = 0;
8909#endif
8910	      prev_insn = insn;
8911	      insn = JUMP_LABEL (insn);
8912	      continue;
8913	    }
8914	}
8915
8916      if (GET_MODE (insn) == QImode)
8917	PUT_MODE (insn, VOIDmode);
8918
8919      if (GET_RTX_CLASS (code) == 'i')
8920	{
8921	  rtx p;
8922
8923	  /* Process notes first so we have all notes in canonical forms when
8924	     looking for duplicate operations.  */
8925
8926	  if (REG_NOTES (insn))
8927	    REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
8928
8929	  /* Track when we are inside in LIBCALL block.  Inside such a block,
8930	     we do not want to record destinations.  The last insn of a
8931	     LIBCALL block is not considered to be part of the block, since
8932	     its destination is the result of the block and hence should be
8933	     recorded.  */
8934
8935	  if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
8936	    libcall_insn = XEXP (p, 0);
8937	  else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
8938	    libcall_insn = NULL_RTX;
8939
8940	  cse_insn (insn, libcall_insn);
8941	}
8942
8943      /* If INSN is now an unconditional jump, skip to the end of our
8944	 basic block by pretending that we just did the last insn in the
8945	 basic block.  If we are jumping to the end of our block, show
8946	 that we can have one usage of TO.  */
8947
8948      if (simplejump_p (insn))
8949	{
8950	  if (to == 0)
8951	    return 0;
8952
8953	  if (JUMP_LABEL (insn) == to)
8954	    to_usage = 1;
8955
8956	  /* Maybe TO was deleted because the jump is unconditional.
8957	     If so, there is nothing left in this basic block.  */
8958	  /* ??? Perhaps it would be smarter to set TO
8959	     to whatever follows this insn,
8960	     and pretend the basic block had always ended here.  */
8961	  if (INSN_DELETED_P (to))
8962	    break;
8963
8964	  insn = PREV_INSN (to);
8965	}
8966
8967      /* See if it is ok to keep on going past the label
8968	 which used to end our basic block.  Remember that we incremented
8969	 the count of that label, so we decrement it here.  If we made
8970	 a jump unconditional, TO_USAGE will be one; in that case, we don't
8971	 want to count the use in that jump.  */
8972
8973      if (to != 0 && NEXT_INSN (insn) == to
8974	  && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
8975	{
8976	  struct cse_basic_block_data val;
8977	  rtx prev;
8978
8979	  insn = NEXT_INSN (to);
8980
8981	  /* If TO was the last insn in the function, we are done.  */
8982	  if (insn == 0)
8983	    return 0;
8984
8985	  /* If TO was preceded by a BARRIER we are done with this block
8986	     because it has no continuation.  */
8987	  prev = prev_nonnote_insn (to);
8988	  if (prev && GET_CODE (prev) == BARRIER)
8989	    return insn;
8990
8991	  /* Find the end of the following block.  Note that we won't be
8992	     following branches in this case.  */
8993	  to_usage = 0;
8994	  val.path_size = 0;
8995	  cse_end_of_basic_block (insn, &val, 0, 0, 0);
8996
8997	  /* If the tables we allocated have enough space left
8998	     to handle all the SETs in the next basic block,
8999	     continue through it.  Otherwise, return,
9000	     and that block will be scanned individually.  */
9001	  if (val.nsets * 2 + next_qty > max_qty)
9002	    break;
9003
9004	  cse_basic_block_start = val.low_cuid;
9005	  cse_basic_block_end = val.high_cuid;
9006	  to = val.last;
9007
9008	  /* Prevent TO from being deleted if it is a label.  */
9009	  if (to != 0 && GET_CODE (to) == CODE_LABEL)
9010	    ++LABEL_NUSES (to);
9011
9012	  /* Back up so we process the first insn in the extension.  */
9013	  insn = PREV_INSN (insn);
9014	}
9015    }
9016
9017  if (next_qty > max_qty)
9018    abort ();
9019
9020  /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
9021     the previous insn is the only insn that branches to the head of a loop,
9022     we can cse into the loop.  Don't do this if we changed the jump
9023     structure of a loop unless we aren't going to be following jumps.  */
9024
9025  if ((cse_jumps_altered == 0
9026       || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
9027      && around_loop && to != 0
9028      && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
9029      && GET_CODE (PREV_INSN (to)) == JUMP_INSN
9030      && JUMP_LABEL (PREV_INSN (to)) != 0
9031      && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
9032    cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
9033
9034  return to ? NEXT_INSN (to) : 0;
9035}
9036
9037/* Count the number of times registers are used (not set) in X.
9038   COUNTS is an array in which we accumulate the count, INCR is how much
9039   we count each register usage.
9040
9041   Don't count a usage of DEST, which is the SET_DEST of a SET which
9042   contains X in its SET_SRC.  This is because such a SET does not
9043   modify the liveness of DEST.  */
9044
9045static void
9046count_reg_usage (x, counts, dest, incr)
9047     rtx x;
9048     int *counts;
9049     rtx dest;
9050     int incr;
9051{
9052  enum rtx_code code;
9053  char *fmt;
9054  int i, j;
9055
9056  if (x == 0)
9057    return;
9058
9059  switch (code = GET_CODE (x))
9060    {
9061    case REG:
9062      if (x != dest)
9063	counts[REGNO (x)] += incr;
9064      return;
9065
9066    case PC:
9067    case CC0:
9068    case CONST:
9069    case CONST_INT:
9070    case CONST_DOUBLE:
9071    case SYMBOL_REF:
9072    case LABEL_REF:
9073      return;
9074
9075    case CLOBBER:
9076      /* If we are clobbering a MEM, mark any registers inside the address
9077         as being used.  */
9078      if (GET_CODE (XEXP (x, 0)) == MEM)
9079	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
9080      return;
9081
9082    case SET:
9083      /* Unless we are setting a REG, count everything in SET_DEST.  */
9084      if (GET_CODE (SET_DEST (x)) != REG)
9085	count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
9086
9087      /* If SRC has side-effects, then we can't delete this insn, so the
9088	 usage of SET_DEST inside SRC counts.
9089
9090	 ??? Strictly-speaking, we might be preserving this insn
9091	 because some other SET has side-effects, but that's hard
9092	 to do and can't happen now.  */
9093      count_reg_usage (SET_SRC (x), counts,
9094		       side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
9095		       incr);
9096      return;
9097
9098    case CALL_INSN:
9099      count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
9100
9101      /* ... falls through ...  */
9102    case INSN:
9103    case JUMP_INSN:
9104      count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
9105
9106      /* Things used in a REG_EQUAL note aren't dead since loop may try to
9107	 use them.  */
9108
9109      count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
9110      return;
9111
9112    case EXPR_LIST:
9113    case INSN_LIST:
9114      if (REG_NOTE_KIND (x) == REG_EQUAL
9115	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
9116	count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
9117      count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
9118      return;
9119
9120    default:
9121      break;
9122    }
9123
9124  fmt = GET_RTX_FORMAT (code);
9125  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9126    {
9127      if (fmt[i] == 'e')
9128	count_reg_usage (XEXP (x, i), counts, dest, incr);
9129      else if (fmt[i] == 'E')
9130	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9131	  count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
9132    }
9133}
9134
9135/* Scan all the insns and delete any that are dead; i.e., they store a register
9136   that is never used or they copy a register to itself.
9137
9138   This is used to remove insns made obviously dead by cse, loop or other
9139   optimizations.  It improves the heuristics in loop since it won't try to
9140   move dead invariants out of loops or make givs for dead quantities.  The
9141   remaining passes of the compilation are also sped up.  */
9142
9143void
9144delete_trivially_dead_insns (insns, nreg)
9145     rtx insns;
9146     int nreg;
9147{
9148  int *counts = (int *) alloca (nreg * sizeof (int));
9149  rtx insn, prev;
9150#ifdef HAVE_cc0
9151  rtx tem;
9152#endif
9153  int i;
9154  int in_libcall = 0, dead_libcall = 0;
9155
9156  /* First count the number of times each register is used.  */
9157  bzero ((char *) counts, sizeof (int) * nreg);
9158  for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
9159    count_reg_usage (insn, counts, NULL_RTX, 1);
9160
9161  /* Go from the last insn to the first and delete insns that only set unused
9162     registers or copy a register to itself.  As we delete an insn, remove
9163     usage counts for registers it uses.  */
9164  for (insn = prev_real_insn (get_last_insn ()); insn; insn = prev)
9165    {
9166      int live_insn = 0;
9167      rtx note;
9168
9169      prev = prev_real_insn (insn);
9170
9171      /* Don't delete any insns that are part of a libcall block unless
9172	 we can delete the whole libcall block.
9173
9174	 Flow or loop might get confused if we did that.  Remember
9175	 that we are scanning backwards.  */
9176      if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
9177	{
9178	  in_libcall = 1;
9179	  live_insn = 1;
9180	  dead_libcall = 0;
9181
9182	  /* See if there's a REG_EQUAL note on this insn and try to
9183	     replace the source with the REG_EQUAL expression.
9184
9185	     We assume that insns with REG_RETVALs can only be reg->reg
9186	     copies at this point.  */
9187	  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
9188	  if (note)
9189	    {
9190	      rtx set = single_set (insn);
9191	      if (set
9192		  && validate_change (insn, &SET_SRC (set), XEXP (note, 0), 0))
9193		{
9194		  remove_note (insn,
9195			       find_reg_note (insn, REG_RETVAL, NULL_RTX));
9196		  dead_libcall = 1;
9197		}
9198	    }
9199	}
9200      else if (in_libcall)
9201	live_insn = ! dead_libcall;
9202      else if (GET_CODE (PATTERN (insn)) == SET)
9203	{
9204	  if (GET_CODE (SET_DEST (PATTERN (insn))) == REG
9205	      && SET_DEST (PATTERN (insn)) == SET_SRC (PATTERN (insn)))
9206	    ;
9207
9208#ifdef HAVE_cc0
9209	  else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
9210		   && ! side_effects_p (SET_SRC (PATTERN (insn)))
9211		   && ((tem = next_nonnote_insn (insn)) == 0
9212		       || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9213		       || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9214	    ;
9215#endif
9216	  else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
9217		   || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
9218		   || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
9219		   || side_effects_p (SET_SRC (PATTERN (insn))))
9220	    live_insn = 1;
9221	}
9222      else if (GET_CODE (PATTERN (insn)) == PARALLEL)
9223	for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
9224	  {
9225	    rtx elt = XVECEXP (PATTERN (insn), 0, i);
9226
9227	    if (GET_CODE (elt) == SET)
9228	      {
9229		if (GET_CODE (SET_DEST (elt)) == REG
9230		    && SET_DEST (elt) == SET_SRC (elt))
9231		  ;
9232
9233#ifdef HAVE_cc0
9234		else if (GET_CODE (SET_DEST (elt)) == CC0
9235			 && ! side_effects_p (SET_SRC (elt))
9236			 && ((tem = next_nonnote_insn (insn)) == 0
9237			     || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
9238			     || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
9239		  ;
9240#endif
9241		else if (GET_CODE (SET_DEST (elt)) != REG
9242			 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
9243			 || counts[REGNO (SET_DEST (elt))] != 0
9244			 || side_effects_p (SET_SRC (elt)))
9245		  live_insn = 1;
9246	      }
9247	    else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
9248	      live_insn = 1;
9249	  }
9250      else
9251	live_insn = 1;
9252
9253      /* If this is a dead insn, delete it and show registers in it aren't
9254	 being used.  */
9255
9256      if (! live_insn)
9257	{
9258	  count_reg_usage (insn, counts, NULL_RTX, -1);
9259	  delete_insn (insn);
9260	}
9261
9262      if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
9263	{
9264	  in_libcall = 0;
9265	  dead_libcall = 0;
9266	}
9267    }
9268}
9269