cse.c revision 102780
1/* Common subexpression elimination for GNU compiler.
2   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3   1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22#include "config.h"
23/* stdio.h must precede rtl.h for FFS.  */
24#include "system.h"
25
26#include "rtl.h"
27#include "tm_p.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "flags.h"
32#include "real.h"
33#include "insn-config.h"
34#include "recog.h"
35#include "function.h"
36#include "expr.h"
37#include "toplev.h"
38#include "output.h"
39#include "ggc.h"
40
41/* The basic idea of common subexpression elimination is to go
42   through the code, keeping a record of expressions that would
43   have the same value at the current scan point, and replacing
44   expressions encountered with the cheapest equivalent expression.
45
46   It is too complicated to keep track of the different possibilities
47   when control paths merge in this code; so, at each label, we forget all
48   that is known and start fresh.  This can be described as processing each
49   extended basic block separately.  We have a separate pass to perform
50   global CSE.
51
52   Note CSE can turn a conditional or computed jump into a nop or
53   an unconditional jump.  When this occurs we arrange to run the jump
54   optimizer after CSE to delete the unreachable code.
55
56   We use two data structures to record the equivalent expressions:
57   a hash table for most expressions, and a vector of "quantity
58   numbers" to record equivalent (pseudo) registers.
59
60   The use of the special data structure for registers is desirable
61   because it is faster.  It is possible because registers references
62   contain a fairly small number, the register number, taken from
63   a contiguously allocated series, and two register references are
64   identical if they have the same number.  General expressions
65   do not have any such thing, so the only way to retrieve the
66   information recorded on an expression other than a register
67   is to keep it in a hash table.
68
69Registers and "quantity numbers":
70
71   At the start of each basic block, all of the (hardware and pseudo)
72   registers used in the function are given distinct quantity
73   numbers to indicate their contents.  During scan, when the code
74   copies one register into another, we copy the quantity number.
75   When a register is loaded in any other way, we allocate a new
76   quantity number to describe the value generated by this operation.
77   `reg_qty' records what quantity a register is currently thought
78   of as containing.
79
80   All real quantity numbers are greater than or equal to `max_reg'.
81   If register N has not been assigned a quantity, reg_qty[N] will equal N.
82
83   Quantity numbers below `max_reg' do not exist and none of the `qty_table'
84   entries should be referenced with an index below `max_reg'.
85
86   We also maintain a bidirectional chain of registers for each
87   quantity number.  The `qty_table` members `first_reg' and `last_reg',
88   and `reg_eqv_table' members `next' and `prev' hold these chains.
89
90   The first register in a chain is the one whose lifespan is least local.
91   Among equals, it is the one that was seen first.
92   We replace any equivalent register with that one.
93
94   If two registers have the same quantity number, it must be true that
95   REG expressions with qty_table `mode' must be in the hash table for both
96   registers and must be in the same class.
97
98   The converse is not true.  Since hard registers may be referenced in
99   any mode, two REG expressions might be equivalent in the hash table
100   but not have the same quantity number if the quantity number of one
101   of the registers is not the same mode as those expressions.
102
103Constants and quantity numbers
104
105   When a quantity has a known constant value, that value is stored
106   in the appropriate qty_table `const_rtx'.  This is in addition to
107   putting the constant in the hash table as is usual for non-regs.
108
109   Whether a reg or a constant is preferred is determined by the configuration
110   macro CONST_COSTS and will often depend on the constant value.  In any
111   event, expressions containing constants can be simplified, by fold_rtx.
112
113   When a quantity has a known nearly constant value (such as an address
114   of a stack slot), that value is stored in the appropriate qty_table
115   `const_rtx'.
116
117   Integer constants don't have a machine mode.  However, cse
118   determines the intended machine mode from the destination
119   of the instruction that moves the constant.  The machine mode
120   is recorded in the hash table along with the actual RTL
121   constant expression so that different modes are kept separate.
122
123Other expressions:
124
125   To record known equivalences among expressions in general
126   we use a hash table called `table'.  It has a fixed number of buckets
127   that contain chains of `struct table_elt' elements for expressions.
128   These chains connect the elements whose expressions have the same
129   hash codes.
130
131   Other chains through the same elements connect the elements which
132   currently have equivalent values.
133
134   Register references in an expression are canonicalized before hashing
135   the expression.  This is done using `reg_qty' and qty_table `first_reg'.
136   The hash code of a register reference is computed using the quantity
137   number, not the register number.
138
139   When the value of an expression changes, it is necessary to remove from the
140   hash table not just that expression but all expressions whose values
141   could be different as a result.
142
143     1. If the value changing is in memory, except in special cases
144     ANYTHING referring to memory could be changed.  That is because
145     nobody knows where a pointer does not point.
146     The function `invalidate_memory' removes what is necessary.
147
148     The special cases are when the address is constant or is
149     a constant plus a fixed register such as the frame pointer
150     or a static chain pointer.  When such addresses are stored in,
151     we can tell exactly which other such addresses must be invalidated
152     due to overlap.  `invalidate' does this.
153     All expressions that refer to non-constant
154     memory addresses are also invalidated.  `invalidate_memory' does this.
155
156     2. If the value changing is a register, all expressions
157     containing references to that register, and only those,
158     must be removed.
159
160   Because searching the entire hash table for expressions that contain
161   a register is very slow, we try to figure out when it isn't necessary.
162   Precisely, this is necessary only when expressions have been
163   entered in the hash table using this register, and then the value has
164   changed, and then another expression wants to be added to refer to
165   the register's new value.  This sequence of circumstances is rare
166   within any one basic block.
167
168   The vectors `reg_tick' and `reg_in_table' are used to detect this case.
169   reg_tick[i] is incremented whenever a value is stored in register i.
170   reg_in_table[i] holds -1 if no references to register i have been
171   entered in the table; otherwise, it contains the value reg_tick[i] had
172   when the references were entered.  If we want to enter a reference
173   and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
174   Until we want to enter a new entry, the mere fact that the two vectors
175   don't match makes the entries be ignored if anyone tries to match them.
176
177   Registers themselves are entered in the hash table as well as in
178   the equivalent-register chains.  However, the vectors `reg_tick'
179   and `reg_in_table' do not apply to expressions which are simple
180   register references.  These expressions are removed from the table
181   immediately when they become invalid, and this can be done even if
182   we do not immediately search for all the expressions that refer to
183   the register.
184
185   A CLOBBER rtx in an instruction invalidates its operand for further
186   reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
187   invalidates everything that resides in memory.
188
189Related expressions:
190
191   Constant expressions that differ only by an additive integer
192   are called related.  When a constant expression is put in
193   the table, the related expression with no constant term
194   is also entered.  These are made to point at each other
195   so that it is possible to find out if there exists any
196   register equivalent to an expression related to a given expression.  */
197
198/* One plus largest register number used in this function.  */
199
200static int max_reg;
201
202/* One plus largest instruction UID used in this function at time of
203   cse_main call.  */
204
205static int max_insn_uid;
206
207/* Length of qty_table vector.  We know in advance we will not need
208   a quantity number this big.  */
209
210static int max_qty;
211
212/* Next quantity number to be allocated.
213   This is 1 + the largest number needed so far.  */
214
215static int next_qty;
216
217/* Per-qty information tracking.
218
219   `first_reg' and `last_reg' track the head and tail of the
220   chain of registers which currently contain this quantity.
221
222   `mode' contains the machine mode of this quantity.
223
224   `const_rtx' holds the rtx of the constant value of this
225   quantity, if known.  A summations of the frame/arg pointer
226   and a constant can also be entered here.  When this holds
227   a known value, `const_insn' is the insn which stored the
228   constant value.
229
230   `comparison_{code,const,qty}' are used to track when a
231   comparison between a quantity and some constant or register has
232   been passed.  In such a case, we know the results of the comparison
233   in case we see it again.  These members record a comparison that
234   is known to be true.  `comparison_code' holds the rtx code of such
235   a comparison, else it is set to UNKNOWN and the other two
236   comparison members are undefined.  `comparison_const' holds
237   the constant being compared against, or zero if the comparison
238   is not against a constant.  `comparison_qty' holds the quantity
239   being compared against when the result is known.  If the comparison
240   is not with a register, `comparison_qty' is -1.  */
241
242struct qty_table_elem
243{
244  rtx const_rtx;
245  rtx const_insn;
246  rtx comparison_const;
247  int comparison_qty;
248  unsigned int first_reg, last_reg;
249  enum machine_mode mode;
250  enum rtx_code comparison_code;
251};
252
253/* The table of all qtys, indexed by qty number.  */
254static struct qty_table_elem *qty_table;
255
256#ifdef HAVE_cc0
257/* For machines that have a CC0, we do not record its value in the hash
258   table since its use is guaranteed to be the insn immediately following
259   its definition and any other insn is presumed to invalidate it.
260
261   Instead, we store below the value last assigned to CC0.  If it should
262   happen to be a constant, it is stored in preference to the actual
263   assigned value.  In case it is a constant, we store the mode in which
264   the constant should be interpreted.  */
265
266static rtx prev_insn_cc0;
267static enum machine_mode prev_insn_cc0_mode;
268#endif
269
270/* Previous actual insn.  0 if at first insn of basic block.  */
271
272static rtx prev_insn;
273
274/* Insn being scanned.  */
275
276static rtx this_insn;
277
278/* Index by register number, gives the number of the next (or
279   previous) register in the chain of registers sharing the same
280   value.
281
282   Or -1 if this register is at the end of the chain.
283
284   If reg_qty[N] == N, reg_eqv_table[N].next is undefined.  */
285
286/* Per-register equivalence chain.  */
287struct reg_eqv_elem
288{
289  int next, prev;
290};
291
292/* The table of all register equivalence chains.  */
293static struct reg_eqv_elem *reg_eqv_table;
294
295struct cse_reg_info
296{
297  /* Next in hash chain.  */
298  struct cse_reg_info *hash_next;
299
300  /* The next cse_reg_info structure in the free or used list.  */
301  struct cse_reg_info *next;
302
303  /* Search key */
304  unsigned int regno;
305
306  /* The quantity number of the register's current contents.  */
307  int reg_qty;
308
309  /* The number of times the register has been altered in the current
310     basic block.  */
311  int reg_tick;
312
313  /* The REG_TICK value at which rtx's containing this register are
314     valid in the hash table.  If this does not equal the current
315     reg_tick value, such expressions existing in the hash table are
316     invalid.  */
317  int reg_in_table;
318};
319
320/* A free list of cse_reg_info entries.  */
321static struct cse_reg_info *cse_reg_info_free_list;
322
323/* A used list of cse_reg_info entries.  */
324static struct cse_reg_info *cse_reg_info_used_list;
325static struct cse_reg_info *cse_reg_info_used_list_end;
326
327/* A mapping from registers to cse_reg_info data structures.  */
328#define REGHASH_SHIFT	7
329#define REGHASH_SIZE	(1 << REGHASH_SHIFT)
330#define REGHASH_MASK	(REGHASH_SIZE - 1)
331static struct cse_reg_info *reg_hash[REGHASH_SIZE];
332
333#define REGHASH_FN(REGNO)	\
334	(((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
335
336/* The last lookup we did into the cse_reg_info_tree.  This allows us
337   to cache repeated lookups.  */
338static unsigned int cached_regno;
339static struct cse_reg_info *cached_cse_reg_info;
340
341/* A HARD_REG_SET containing all the hard registers for which there is
342   currently a REG expression in the hash table.  Note the difference
343   from the above variables, which indicate if the REG is mentioned in some
344   expression in the table.  */
345
346static HARD_REG_SET hard_regs_in_table;
347
348/* CUID of insn that starts the basic block currently being cse-processed.  */
349
350static int cse_basic_block_start;
351
352/* CUID of insn that ends the basic block currently being cse-processed.  */
353
354static int cse_basic_block_end;
355
356/* Vector mapping INSN_UIDs to cuids.
357   The cuids are like uids but increase monotonically always.
358   We use them to see whether a reg is used outside a given basic block.  */
359
360static int *uid_cuid;
361
362/* Highest UID in UID_CUID.  */
363static int max_uid;
364
365/* Get the cuid of an insn.  */
366
367#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
368
369/* Nonzero if this pass has made changes, and therefore it's
370   worthwhile to run the garbage collector.  */
371
372static int cse_altered;
373
374/* Nonzero if cse has altered conditional jump insns
375   in such a way that jump optimization should be redone.  */
376
377static int cse_jumps_altered;
378
379/* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
380   REG_LABEL, we have to rerun jump after CSE to put in the note.  */
381static int recorded_label_ref;
382
383/* canon_hash stores 1 in do_not_record
384   if it notices a reference to CC0, PC, or some other volatile
385   subexpression.  */
386
387static int do_not_record;
388
389#ifdef LOAD_EXTEND_OP
390
391/* Scratch rtl used when looking for load-extended copy of a MEM.  */
392static rtx memory_extend_rtx;
393#endif
394
395/* canon_hash stores 1 in hash_arg_in_memory
396   if it notices a reference to memory within the expression being hashed.  */
397
398static int hash_arg_in_memory;
399
400/* The hash table contains buckets which are chains of `struct table_elt's,
401   each recording one expression's information.
402   That expression is in the `exp' field.
403
404   The canon_exp field contains a canonical (from the point of view of
405   alias analysis) version of the `exp' field.
406
407   Those elements with the same hash code are chained in both directions
408   through the `next_same_hash' and `prev_same_hash' fields.
409
410   Each set of expressions with equivalent values
411   are on a two-way chain through the `next_same_value'
412   and `prev_same_value' fields, and all point with
413   the `first_same_value' field at the first element in
414   that chain.  The chain is in order of increasing cost.
415   Each element's cost value is in its `cost' field.
416
417   The `in_memory' field is nonzero for elements that
418   involve any reference to memory.  These elements are removed
419   whenever a write is done to an unidentified location in memory.
420   To be safe, we assume that a memory address is unidentified unless
421   the address is either a symbol constant or a constant plus
422   the frame pointer or argument pointer.
423
424   The `related_value' field is used to connect related expressions
425   (that differ by adding an integer).
426   The related expressions are chained in a circular fashion.
427   `related_value' is zero for expressions for which this
428   chain is not useful.
429
430   The `cost' field stores the cost of this element's expression.
431   The `regcost' field stores the value returned by approx_reg_cost for
432   this element's expression.
433
434   The `is_const' flag is set if the element is a constant (including
435   a fixed address).
436
437   The `flag' field is used as a temporary during some search routines.
438
439   The `mode' field is usually the same as GET_MODE (`exp'), but
440   if `exp' is a CONST_INT and has no machine mode then the `mode'
441   field is the mode it was being used as.  Each constant is
442   recorded separately for each mode it is used with.  */
443
444struct table_elt
445{
446  rtx exp;
447  rtx canon_exp;
448  struct table_elt *next_same_hash;
449  struct table_elt *prev_same_hash;
450  struct table_elt *next_same_value;
451  struct table_elt *prev_same_value;
452  struct table_elt *first_same_value;
453  struct table_elt *related_value;
454  int cost;
455  int regcost;
456  enum machine_mode mode;
457  char in_memory;
458  char is_const;
459  char flag;
460};
461
462/* We don't want a lot of buckets, because we rarely have very many
463   things stored in the hash table, and a lot of buckets slows
464   down a lot of loops that happen frequently.  */
465#define HASH_SHIFT	5
466#define HASH_SIZE	(1 << HASH_SHIFT)
467#define HASH_MASK	(HASH_SIZE - 1)
468
469/* Compute hash code of X in mode M.  Special-case case where X is a pseudo
470   register (hard registers may require `do_not_record' to be set).  */
471
472#define HASH(X, M)	\
473 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER	\
474  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))	\
475  : canon_hash (X, M)) & HASH_MASK)
476
477/* Determine whether register number N is considered a fixed register for the
478   purpose of approximating register costs.
479   It is desirable to replace other regs with fixed regs, to reduce need for
480   non-fixed hard regs.
481   A reg wins if it is either the frame pointer or designated as fixed.  */
482#define FIXED_REGNO_P(N)  \
483  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
484   || fixed_regs[N] || global_regs[N])
485
486/* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
487   hard registers and pointers into the frame are the cheapest with a cost
488   of 0.  Next come pseudos with a cost of one and other hard registers with
489   a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
490
491#define CHEAP_REGNO(N) \
492  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM 	\
493   || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM	     	\
494   || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) 	\
495   || ((N) < FIRST_PSEUDO_REGISTER					\
496       && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
497
498#define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
499#define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
500
501/* Get the info associated with register N.  */
502
503#define GET_CSE_REG_INFO(N) 			\
504  (((N) == cached_regno && cached_cse_reg_info)	\
505   ? cached_cse_reg_info : get_cse_reg_info ((N)))
506
507/* Get the number of times this register has been updated in this
508   basic block.  */
509
510#define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
511
512/* Get the point at which REG was recorded in the table.  */
513
514#define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
515
516/* Get the quantity number for REG.  */
517
518#define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
519
520/* Determine if the quantity number for register X represents a valid index
521   into the qty_table.  */
522
523#define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
524
525static struct table_elt *table[HASH_SIZE];
526
527/* Chain of `struct table_elt's made so far for this function
528   but currently removed from the table.  */
529
530static struct table_elt *free_element_chain;
531
532/* Number of `struct table_elt' structures made so far for this function.  */
533
534static int n_elements_made;
535
536/* Maximum value `n_elements_made' has had so far in this compilation
537   for functions previously processed.  */
538
539static int max_elements_made;
540
541/* Surviving equivalence class when two equivalence classes are merged
542   by recording the effects of a jump in the last insn.  Zero if the
543   last insn was not a conditional jump.  */
544
545static struct table_elt *last_jump_equiv_class;
546
547/* Set to the cost of a constant pool reference if one was found for a
548   symbolic constant.  If this was found, it means we should try to
549   convert constants into constant pool entries if they don't fit in
550   the insn.  */
551
552static int constant_pool_entries_cost;
553
554/* Define maximum length of a branch path.  */
555
556#define PATHLENGTH	10
557
558/* This data describes a block that will be processed by cse_basic_block.  */
559
560struct cse_basic_block_data
561{
562  /* Lowest CUID value of insns in block.  */
563  int low_cuid;
564  /* Highest CUID value of insns in block.  */
565  int high_cuid;
566  /* Total number of SETs in block.  */
567  int nsets;
568  /* Last insn in the block.  */
569  rtx last;
570  /* Size of current branch path, if any.  */
571  int path_size;
572  /* Current branch path, indicating which branches will be taken.  */
573  struct branch_path
574    {
575      /* The branch insn.  */
576      rtx branch;
577      /* Whether it should be taken or not.  AROUND is the same as taken
578	 except that it is used when the destination label is not preceded
579       by a BARRIER.  */
580      enum taken {TAKEN, NOT_TAKEN, AROUND} status;
581    } path[PATHLENGTH];
582};
583
584/* Nonzero if X has the form (PLUS frame-pointer integer).  We check for
585   virtual regs here because the simplify_*_operation routines are called
586   by integrate.c, which is called before virtual register instantiation.
587
588   ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
589   a header file so that their definitions can be shared with the
590   simplification routines in simplify-rtx.c.  Until then, do not
591   change these macros without also changing the copy in simplify-rtx.c.  */
592
593#define FIXED_BASE_PLUS_P(X)					\
594  ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx	\
595   || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
596   || (X) == virtual_stack_vars_rtx				\
597   || (X) == virtual_incoming_args_rtx				\
598   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
599       && (XEXP (X, 0) == frame_pointer_rtx			\
600	   || XEXP (X, 0) == hard_frame_pointer_rtx		\
601	   || ((X) == arg_pointer_rtx				\
602	       && fixed_regs[ARG_POINTER_REGNUM])		\
603	   || XEXP (X, 0) == virtual_stack_vars_rtx		\
604	   || XEXP (X, 0) == virtual_incoming_args_rtx))	\
605   || GET_CODE (X) == ADDRESSOF)
606
607/* Similar, but also allows reference to the stack pointer.
608
609   This used to include FIXED_BASE_PLUS_P, however, we can't assume that
610   arg_pointer_rtx by itself is nonzero, because on at least one machine,
611   the i960, the arg pointer is zero when it is unused.  */
612
613#define NONZERO_BASE_PLUS_P(X)					\
614  ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx	\
615   || (X) == virtual_stack_vars_rtx				\
616   || (X) == virtual_incoming_args_rtx				\
617   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
618       && (XEXP (X, 0) == frame_pointer_rtx			\
619	   || XEXP (X, 0) == hard_frame_pointer_rtx		\
620	   || ((X) == arg_pointer_rtx				\
621	       && fixed_regs[ARG_POINTER_REGNUM])		\
622	   || XEXP (X, 0) == virtual_stack_vars_rtx		\
623	   || XEXP (X, 0) == virtual_incoming_args_rtx))	\
624   || (X) == stack_pointer_rtx					\
625   || (X) == virtual_stack_dynamic_rtx				\
626   || (X) == virtual_outgoing_args_rtx				\
627   || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
628       && (XEXP (X, 0) == stack_pointer_rtx			\
629	   || XEXP (X, 0) == virtual_stack_dynamic_rtx		\
630	   || XEXP (X, 0) == virtual_outgoing_args_rtx))	\
631   || GET_CODE (X) == ADDRESSOF)
632
633static int notreg_cost		PARAMS ((rtx, enum rtx_code));
634static int approx_reg_cost_1	PARAMS ((rtx *, void *));
635static int approx_reg_cost	PARAMS ((rtx));
636static int preferrable		PARAMS ((int, int, int, int));
637static void new_basic_block	PARAMS ((void));
638static void make_new_qty	PARAMS ((unsigned int, enum machine_mode));
639static void make_regs_eqv	PARAMS ((unsigned int, unsigned int));
640static void delete_reg_equiv	PARAMS ((unsigned int));
641static int mention_regs		PARAMS ((rtx));
642static int insert_regs		PARAMS ((rtx, struct table_elt *, int));
643static void remove_from_table	PARAMS ((struct table_elt *, unsigned));
644static struct table_elt *lookup	PARAMS ((rtx, unsigned, enum machine_mode)),
645       *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
646static rtx lookup_as_function	PARAMS ((rtx, enum rtx_code));
647static struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
648					 enum machine_mode));
649static void merge_equiv_classes PARAMS ((struct table_elt *,
650					 struct table_elt *));
651static void invalidate		PARAMS ((rtx, enum machine_mode));
652static int cse_rtx_varies_p	PARAMS ((rtx, int));
653static void remove_invalid_refs	PARAMS ((unsigned int));
654static void remove_invalid_subreg_refs	PARAMS ((unsigned int, unsigned int,
655						 enum machine_mode));
656static void rehash_using_reg	PARAMS ((rtx));
657static void invalidate_memory	PARAMS ((void));
658static void invalidate_for_call	PARAMS ((void));
659static rtx use_related_value	PARAMS ((rtx, struct table_elt *));
660static unsigned canon_hash	PARAMS ((rtx, enum machine_mode));
661static unsigned canon_hash_string PARAMS ((const char *));
662static unsigned safe_hash	PARAMS ((rtx, enum machine_mode));
663static int exp_equiv_p		PARAMS ((rtx, rtx, int, int));
664static rtx canon_reg		PARAMS ((rtx, rtx));
665static void find_best_addr	PARAMS ((rtx, rtx *, enum machine_mode));
666static enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
667						   enum machine_mode *,
668						   enum machine_mode *));
669static rtx fold_rtx		PARAMS ((rtx, rtx));
670static rtx equiv_constant	PARAMS ((rtx));
671static void record_jump_equiv	PARAMS ((rtx, int));
672static void record_jump_cond	PARAMS ((enum rtx_code, enum machine_mode,
673					 rtx, rtx, int));
674static void cse_insn		PARAMS ((rtx, rtx));
675static int addr_affects_sp_p	PARAMS ((rtx));
676static void invalidate_from_clobbers PARAMS ((rtx));
677static rtx cse_process_notes	PARAMS ((rtx, rtx));
678static void cse_around_loop	PARAMS ((rtx));
679static void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
680static void invalidate_skipped_block PARAMS ((rtx));
681static void cse_check_loop_start PARAMS ((rtx, rtx, void *));
682static void cse_set_around_loop	PARAMS ((rtx, rtx, rtx));
683static rtx cse_basic_block	PARAMS ((rtx, rtx, struct branch_path *, int));
684static void count_reg_usage	PARAMS ((rtx, int *, rtx, int));
685static int check_for_label_ref	PARAMS ((rtx *, void *));
686extern void dump_class          PARAMS ((struct table_elt*));
687static struct cse_reg_info * get_cse_reg_info PARAMS ((unsigned int));
688static int check_dependence	PARAMS ((rtx *, void *));
689
690static void flush_hash_table	PARAMS ((void));
691static bool insn_live_p		PARAMS ((rtx, int *));
692static bool set_live_p		PARAMS ((rtx, rtx, int *));
693static bool dead_libcall_p	PARAMS ((rtx));
694
695/* Dump the expressions in the equivalence class indicated by CLASSP.
696   This function is used only for debugging.  */
697void
698dump_class (classp)
699     struct table_elt *classp;
700{
701  struct table_elt *elt;
702
703  fprintf (stderr, "Equivalence chain for ");
704  print_rtl (stderr, classp->exp);
705  fprintf (stderr, ": \n");
706
707  for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
708    {
709      print_rtl (stderr, elt->exp);
710      fprintf (stderr, "\n");
711    }
712}
713
714/* Subroutine of approx_reg_cost; called through for_each_rtx.  */
715
716static int
717approx_reg_cost_1 (xp, data)
718     rtx *xp;
719     void *data;
720{
721  rtx x = *xp;
722  regset set = (regset) data;
723
724  if (x && GET_CODE (x) == REG)
725    SET_REGNO_REG_SET (set, REGNO (x));
726  return 0;
727}
728
729/* Return an estimate of the cost of the registers used in an rtx.
730   This is mostly the number of different REG expressions in the rtx;
731   however for some exceptions like fixed registers we use a cost of
732   0.  If any other hard register reference occurs, return MAX_COST.  */
733
734static int
735approx_reg_cost (x)
736     rtx x;
737{
738  regset_head set;
739  int i;
740  int cost = 0;
741  int hardregs = 0;
742
743  INIT_REG_SET (&set);
744  for_each_rtx (&x, approx_reg_cost_1, (void *)&set);
745
746  EXECUTE_IF_SET_IN_REG_SET
747    (&set, 0, i,
748     {
749       if (! CHEAP_REGNO (i))
750	 {
751	   if (i < FIRST_PSEUDO_REGISTER)
752	     hardregs++;
753
754	   cost += i < FIRST_PSEUDO_REGISTER ? 2 : 1;
755	 }
756     });
757
758  CLEAR_REG_SET (&set);
759  return hardregs && SMALL_REGISTER_CLASSES ? MAX_COST : cost;
760}
761
762/* Return a negative value if an rtx A, whose costs are given by COST_A
763   and REGCOST_A, is more desirable than an rtx B.
764   Return a positive value if A is less desirable, or 0 if the two are
765   equally good.  */
766static int
767preferrable (cost_a, regcost_a, cost_b, regcost_b)
768     int cost_a, regcost_a, cost_b, regcost_b;
769{
770  /* First, get rid of a cases involving expressions that are entirely
771     unwanted.  */
772  if (cost_a != cost_b)
773    {
774      if (cost_a == MAX_COST)
775	return 1;
776      if (cost_b == MAX_COST)
777	return -1;
778    }
779
780  /* Avoid extending lifetimes of hardregs.  */
781  if (regcost_a != regcost_b)
782    {
783      if (regcost_a == MAX_COST)
784	return 1;
785      if (regcost_b == MAX_COST)
786	return -1;
787    }
788
789  /* Normal operation costs take precedence.  */
790  if (cost_a != cost_b)
791    return cost_a - cost_b;
792  /* Only if these are identical consider effects on register pressure.  */
793  if (regcost_a != regcost_b)
794    return regcost_a - regcost_b;
795  return 0;
796}
797
798/* Internal function, to compute cost when X is not a register; called
799   from COST macro to keep it simple.  */
800
801static int
802notreg_cost (x, outer)
803     rtx x;
804     enum rtx_code outer;
805{
806  return ((GET_CODE (x) == SUBREG
807	   && GET_CODE (SUBREG_REG (x)) == REG
808	   && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
809	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
810	   && (GET_MODE_SIZE (GET_MODE (x))
811	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
812	   && subreg_lowpart_p (x)
813	   && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
814				     GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
815	  ? 0
816	  : rtx_cost (x, outer) * 2);
817}
818
819/* Return an estimate of the cost of computing rtx X.
820   One use is in cse, to decide which expression to keep in the hash table.
821   Another is in rtl generation, to pick the cheapest way to multiply.
822   Other uses like the latter are expected in the future.  */
823
824int
825rtx_cost (x, outer_code)
826     rtx x;
827     enum rtx_code outer_code ATTRIBUTE_UNUSED;
828{
829  int i, j;
830  enum rtx_code code;
831  const char *fmt;
832  int total;
833
834  if (x == 0)
835    return 0;
836
837  /* Compute the default costs of certain things.
838     Note that RTX_COSTS can override the defaults.  */
839
840  code = GET_CODE (x);
841  switch (code)
842    {
843    case MULT:
844      /* Count multiplication by 2**n as a shift,
845	 because if we are considering it, we would output it as a shift.  */
846      if (GET_CODE (XEXP (x, 1)) == CONST_INT
847	  && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
848	total = 2;
849      else
850	total = COSTS_N_INSNS (5);
851      break;
852    case DIV:
853    case UDIV:
854    case MOD:
855    case UMOD:
856      total = COSTS_N_INSNS (7);
857      break;
858    case USE:
859      /* Used in loop.c and combine.c as a marker.  */
860      total = 0;
861      break;
862    default:
863      total = COSTS_N_INSNS (1);
864    }
865
866  switch (code)
867    {
868    case REG:
869      return 0;
870
871    case SUBREG:
872      /* If we can't tie these modes, make this expensive.  The larger
873	 the mode, the more expensive it is.  */
874      if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
875	return COSTS_N_INSNS (2
876			      + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
877      break;
878
879#ifdef RTX_COSTS
880      RTX_COSTS (x, code, outer_code);
881#endif
882#ifdef CONST_COSTS
883      CONST_COSTS (x, code, outer_code);
884#endif
885
886    default:
887#ifdef DEFAULT_RTX_COSTS
888      DEFAULT_RTX_COSTS (x, code, outer_code);
889#endif
890      break;
891    }
892
893  /* Sum the costs of the sub-rtx's, plus cost of this operation,
894     which is already in total.  */
895
896  fmt = GET_RTX_FORMAT (code);
897  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
898    if (fmt[i] == 'e')
899      total += rtx_cost (XEXP (x, i), code);
900    else if (fmt[i] == 'E')
901      for (j = 0; j < XVECLEN (x, i); j++)
902	total += rtx_cost (XVECEXP (x, i, j), code);
903
904  return total;
905}
906
907/* Return cost of address expression X.
908   Expect that X is properly formed address reference.  */
909
910int
911address_cost (x, mode)
912     rtx x;
913     enum machine_mode mode;
914{
915  /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes.  But,
916     during CSE, such nodes are present.  Using an ADDRESSOF node which
917     refers to the address of a REG is a good thing because we can then
918     turn (MEM (ADDRESSSOF (REG))) into just plain REG.  */
919
920  if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
921    return -1;
922
923  /* We may be asked for cost of various unusual addresses, such as operands
924     of push instruction.  It is not worthwhile to complicate writing
925     of ADDRESS_COST macro by such cases.  */
926
927  if (!memory_address_p (mode, x))
928    return 1000;
929#ifdef ADDRESS_COST
930  return ADDRESS_COST (x);
931#else
932  return rtx_cost (x, MEM);
933#endif
934}
935
936
937static struct cse_reg_info *
938get_cse_reg_info (regno)
939     unsigned int regno;
940{
941  struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
942  struct cse_reg_info *p;
943
944  for (p = *hash_head; p != NULL; p = p->hash_next)
945    if (p->regno == regno)
946      break;
947
948  if (p == NULL)
949    {
950      /* Get a new cse_reg_info structure.  */
951      if (cse_reg_info_free_list)
952	{
953	  p = cse_reg_info_free_list;
954	  cse_reg_info_free_list = p->next;
955	}
956      else
957	p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
958
959      /* Insert into hash table.  */
960      p->hash_next = *hash_head;
961      *hash_head = p;
962
963      /* Initialize it.  */
964      p->reg_tick = 1;
965      p->reg_in_table = -1;
966      p->reg_qty = regno;
967      p->regno = regno;
968      p->next = cse_reg_info_used_list;
969      cse_reg_info_used_list = p;
970      if (!cse_reg_info_used_list_end)
971	cse_reg_info_used_list_end = p;
972    }
973
974  /* Cache this lookup; we tend to be looking up information about the
975     same register several times in a row.  */
976  cached_regno = regno;
977  cached_cse_reg_info = p;
978
979  return p;
980}
981
982/* Clear the hash table and initialize each register with its own quantity,
983   for a new basic block.  */
984
985static void
986new_basic_block ()
987{
988  int i;
989
990  next_qty = max_reg;
991
992  /* Clear out hash table state for this pass.  */
993
994  memset ((char *) reg_hash, 0, sizeof reg_hash);
995
996  if (cse_reg_info_used_list)
997    {
998      cse_reg_info_used_list_end->next = cse_reg_info_free_list;
999      cse_reg_info_free_list = cse_reg_info_used_list;
1000      cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
1001    }
1002  cached_cse_reg_info = 0;
1003
1004  CLEAR_HARD_REG_SET (hard_regs_in_table);
1005
1006  /* The per-quantity values used to be initialized here, but it is
1007     much faster to initialize each as it is made in `make_new_qty'.  */
1008
1009  for (i = 0; i < HASH_SIZE; i++)
1010    {
1011      struct table_elt *first;
1012
1013      first = table[i];
1014      if (first != NULL)
1015	{
1016	  struct table_elt *last = first;
1017
1018	  table[i] = NULL;
1019
1020	  while (last->next_same_hash != NULL)
1021	    last = last->next_same_hash;
1022
1023	  /* Now relink this hash entire chain into
1024	     the free element list.  */
1025
1026	  last->next_same_hash = free_element_chain;
1027	  free_element_chain = first;
1028	}
1029    }
1030
1031  prev_insn = 0;
1032
1033#ifdef HAVE_cc0
1034  prev_insn_cc0 = 0;
1035#endif
1036}
1037
1038/* Say that register REG contains a quantity in mode MODE not in any
1039   register before and initialize that quantity.  */
1040
1041static void
1042make_new_qty (reg, mode)
1043     unsigned int reg;
1044     enum machine_mode mode;
1045{
1046  int q;
1047  struct qty_table_elem *ent;
1048  struct reg_eqv_elem *eqv;
1049
1050  if (next_qty >= max_qty)
1051    abort ();
1052
1053  q = REG_QTY (reg) = next_qty++;
1054  ent = &qty_table[q];
1055  ent->first_reg = reg;
1056  ent->last_reg = reg;
1057  ent->mode = mode;
1058  ent->const_rtx = ent->const_insn = NULL_RTX;
1059  ent->comparison_code = UNKNOWN;
1060
1061  eqv = &reg_eqv_table[reg];
1062  eqv->next = eqv->prev = -1;
1063}
1064
1065/* Make reg NEW equivalent to reg OLD.
1066   OLD is not changing; NEW is.  */
1067
1068static void
1069make_regs_eqv (new, old)
1070     unsigned int new, old;
1071{
1072  unsigned int lastr, firstr;
1073  int q = REG_QTY (old);
1074  struct qty_table_elem *ent;
1075
1076  ent = &qty_table[q];
1077
1078  /* Nothing should become eqv until it has a "non-invalid" qty number.  */
1079  if (! REGNO_QTY_VALID_P (old))
1080    abort ();
1081
1082  REG_QTY (new) = q;
1083  firstr = ent->first_reg;
1084  lastr = ent->last_reg;
1085
1086  /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
1087     hard regs.  Among pseudos, if NEW will live longer than any other reg
1088     of the same qty, and that is beyond the current basic block,
1089     make it the new canonical replacement for this qty.  */
1090  if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1091      /* Certain fixed registers might be of the class NO_REGS.  This means
1092	 that not only can they not be allocated by the compiler, but
1093	 they cannot be used in substitutions or canonicalizations
1094	 either.  */
1095      && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1096      && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1097	  || (new >= FIRST_PSEUDO_REGISTER
1098	      && (firstr < FIRST_PSEUDO_REGISTER
1099		  || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1100		       || (uid_cuid[REGNO_FIRST_UID (new)]
1101			   < cse_basic_block_start))
1102		      && (uid_cuid[REGNO_LAST_UID (new)]
1103			  > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1104    {
1105      reg_eqv_table[firstr].prev = new;
1106      reg_eqv_table[new].next = firstr;
1107      reg_eqv_table[new].prev = -1;
1108      ent->first_reg = new;
1109    }
1110  else
1111    {
1112      /* If NEW is a hard reg (known to be non-fixed), insert at end.
1113	 Otherwise, insert before any non-fixed hard regs that are at the
1114	 end.  Registers of class NO_REGS cannot be used as an
1115	 equivalent for anything.  */
1116      while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1117	     && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1118	     && new >= FIRST_PSEUDO_REGISTER)
1119	lastr = reg_eqv_table[lastr].prev;
1120      reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1121      if (reg_eqv_table[lastr].next >= 0)
1122	reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1123      else
1124	qty_table[q].last_reg = new;
1125      reg_eqv_table[lastr].next = new;
1126      reg_eqv_table[new].prev = lastr;
1127    }
1128}
1129
1130/* Remove REG from its equivalence class.  */
1131
1132static void
1133delete_reg_equiv (reg)
1134     unsigned int reg;
1135{
1136  struct qty_table_elem *ent;
1137  int q = REG_QTY (reg);
1138  int p, n;
1139
1140  /* If invalid, do nothing.  */
1141  if (q == (int) reg)
1142    return;
1143
1144  ent = &qty_table[q];
1145
1146  p = reg_eqv_table[reg].prev;
1147  n = reg_eqv_table[reg].next;
1148
1149  if (n != -1)
1150    reg_eqv_table[n].prev = p;
1151  else
1152    ent->last_reg = p;
1153  if (p != -1)
1154    reg_eqv_table[p].next = n;
1155  else
1156    ent->first_reg = n;
1157
1158  REG_QTY (reg) = reg;
1159}
1160
1161/* Remove any invalid expressions from the hash table
1162   that refer to any of the registers contained in expression X.
1163
1164   Make sure that newly inserted references to those registers
1165   as subexpressions will be considered valid.
1166
1167   mention_regs is not called when a register itself
1168   is being stored in the table.
1169
1170   Return 1 if we have done something that may have changed the hash code
1171   of X.  */
1172
1173static int
1174mention_regs (x)
1175     rtx x;
1176{
1177  enum rtx_code code;
1178  int i, j;
1179  const char *fmt;
1180  int changed = 0;
1181
1182  if (x == 0)
1183    return 0;
1184
1185  code = GET_CODE (x);
1186  if (code == REG)
1187    {
1188      unsigned int regno = REGNO (x);
1189      unsigned int endregno
1190	= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1191		   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1192      unsigned int i;
1193
1194      for (i = regno; i < endregno; i++)
1195	{
1196	  if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1197	    remove_invalid_refs (i);
1198
1199	  REG_IN_TABLE (i) = REG_TICK (i);
1200	}
1201
1202      return 0;
1203    }
1204
1205  /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1206     pseudo if they don't use overlapping words.  We handle only pseudos
1207     here for simplicity.  */
1208  if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1209      && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1210    {
1211      unsigned int i = REGNO (SUBREG_REG (x));
1212
1213      if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1214	{
1215	  /* If reg_tick has been incremented more than once since
1216	     reg_in_table was last set, that means that the entire
1217	     register has been set before, so discard anything memorized
1218	     for the entire register, including all SUBREG expressions.  */
1219	  if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1220	    remove_invalid_refs (i);
1221	  else
1222	    remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1223	}
1224
1225      REG_IN_TABLE (i) = REG_TICK (i);
1226      return 0;
1227    }
1228
1229  /* If X is a comparison or a COMPARE and either operand is a register
1230     that does not have a quantity, give it one.  This is so that a later
1231     call to record_jump_equiv won't cause X to be assigned a different
1232     hash code and not found in the table after that call.
1233
1234     It is not necessary to do this here, since rehash_using_reg can
1235     fix up the table later, but doing this here eliminates the need to
1236     call that expensive function in the most common case where the only
1237     use of the register is in the comparison.  */
1238
1239  if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1240    {
1241      if (GET_CODE (XEXP (x, 0)) == REG
1242	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1243	if (insert_regs (XEXP (x, 0), NULL, 0))
1244	  {
1245	    rehash_using_reg (XEXP (x, 0));
1246	    changed = 1;
1247	  }
1248
1249      if (GET_CODE (XEXP (x, 1)) == REG
1250	  && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1251	if (insert_regs (XEXP (x, 1), NULL, 0))
1252	  {
1253	    rehash_using_reg (XEXP (x, 1));
1254	    changed = 1;
1255	  }
1256    }
1257
1258  fmt = GET_RTX_FORMAT (code);
1259  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1260    if (fmt[i] == 'e')
1261      changed |= mention_regs (XEXP (x, i));
1262    else if (fmt[i] == 'E')
1263      for (j = 0; j < XVECLEN (x, i); j++)
1264	changed |= mention_regs (XVECEXP (x, i, j));
1265
1266  return changed;
1267}
1268
1269/* Update the register quantities for inserting X into the hash table
1270   with a value equivalent to CLASSP.
1271   (If the class does not contain a REG, it is irrelevant.)
1272   If MODIFIED is nonzero, X is a destination; it is being modified.
1273   Note that delete_reg_equiv should be called on a register
1274   before insert_regs is done on that register with MODIFIED != 0.
1275
1276   Nonzero value means that elements of reg_qty have changed
1277   so X's hash code may be different.  */
1278
1279static int
1280insert_regs (x, classp, modified)
1281     rtx x;
1282     struct table_elt *classp;
1283     int modified;
1284{
1285  if (GET_CODE (x) == REG)
1286    {
1287      unsigned int regno = REGNO (x);
1288      int qty_valid;
1289
1290      /* If REGNO is in the equivalence table already but is of the
1291	 wrong mode for that equivalence, don't do anything here.  */
1292
1293      qty_valid = REGNO_QTY_VALID_P (regno);
1294      if (qty_valid)
1295	{
1296	  struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1297
1298	  if (ent->mode != GET_MODE (x))
1299	    return 0;
1300	}
1301
1302      if (modified || ! qty_valid)
1303	{
1304	  if (classp)
1305	    for (classp = classp->first_same_value;
1306		 classp != 0;
1307		 classp = classp->next_same_value)
1308	      if (GET_CODE (classp->exp) == REG
1309		  && GET_MODE (classp->exp) == GET_MODE (x))
1310		{
1311		  make_regs_eqv (regno, REGNO (classp->exp));
1312		  return 1;
1313		}
1314
1315	  /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1316	     than REG_IN_TABLE to find out if there was only a single preceding
1317	     invalidation - for the SUBREG - or another one, which would be
1318	     for the full register.  However, if we find here that REG_TICK
1319	     indicates that the register is invalid, it means that it has
1320	     been invalidated in a separate operation.  The SUBREG might be used
1321	     now (then this is a recursive call), or we might use the full REG
1322	     now and a SUBREG of it later.  So bump up REG_TICK so that
1323	     mention_regs will do the right thing.  */
1324	  if (! modified
1325	      && REG_IN_TABLE (regno) >= 0
1326	      && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1327	    REG_TICK (regno)++;
1328	  make_new_qty (regno, GET_MODE (x));
1329	  return 1;
1330	}
1331
1332      return 0;
1333    }
1334
1335  /* If X is a SUBREG, we will likely be inserting the inner register in the
1336     table.  If that register doesn't have an assigned quantity number at
1337     this point but does later, the insertion that we will be doing now will
1338     not be accessible because its hash code will have changed.  So assign
1339     a quantity number now.  */
1340
1341  else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1342	   && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1343    {
1344      insert_regs (SUBREG_REG (x), NULL, 0);
1345      mention_regs (x);
1346      return 1;
1347    }
1348  else
1349    return mention_regs (x);
1350}
1351
1352/* Look in or update the hash table.  */
1353
1354/* Remove table element ELT from use in the table.
1355   HASH is its hash code, made using the HASH macro.
1356   It's an argument because often that is known in advance
1357   and we save much time not recomputing it.  */
1358
1359static void
1360remove_from_table (elt, hash)
1361     struct table_elt *elt;
1362     unsigned hash;
1363{
1364  if (elt == 0)
1365    return;
1366
1367  /* Mark this element as removed.  See cse_insn.  */
1368  elt->first_same_value = 0;
1369
1370  /* Remove the table element from its equivalence class.  */
1371
1372  {
1373    struct table_elt *prev = elt->prev_same_value;
1374    struct table_elt *next = elt->next_same_value;
1375
1376    if (next)
1377      next->prev_same_value = prev;
1378
1379    if (prev)
1380      prev->next_same_value = next;
1381    else
1382      {
1383	struct table_elt *newfirst = next;
1384	while (next)
1385	  {
1386	    next->first_same_value = newfirst;
1387	    next = next->next_same_value;
1388	  }
1389      }
1390  }
1391
1392  /* Remove the table element from its hash bucket.  */
1393
1394  {
1395    struct table_elt *prev = elt->prev_same_hash;
1396    struct table_elt *next = elt->next_same_hash;
1397
1398    if (next)
1399      next->prev_same_hash = prev;
1400
1401    if (prev)
1402      prev->next_same_hash = next;
1403    else if (table[hash] == elt)
1404      table[hash] = next;
1405    else
1406      {
1407	/* This entry is not in the proper hash bucket.  This can happen
1408	   when two classes were merged by `merge_equiv_classes'.  Search
1409	   for the hash bucket that it heads.  This happens only very
1410	   rarely, so the cost is acceptable.  */
1411	for (hash = 0; hash < HASH_SIZE; hash++)
1412	  if (table[hash] == elt)
1413	    table[hash] = next;
1414      }
1415  }
1416
1417  /* Remove the table element from its related-value circular chain.  */
1418
1419  if (elt->related_value != 0 && elt->related_value != elt)
1420    {
1421      struct table_elt *p = elt->related_value;
1422
1423      while (p->related_value != elt)
1424	p = p->related_value;
1425      p->related_value = elt->related_value;
1426      if (p->related_value == p)
1427	p->related_value = 0;
1428    }
1429
1430  /* Now add it to the free element chain.  */
1431  elt->next_same_hash = free_element_chain;
1432  free_element_chain = elt;
1433}
1434
1435/* Look up X in the hash table and return its table element,
1436   or 0 if X is not in the table.
1437
1438   MODE is the machine-mode of X, or if X is an integer constant
1439   with VOIDmode then MODE is the mode with which X will be used.
1440
1441   Here we are satisfied to find an expression whose tree structure
1442   looks like X.  */
1443
1444static struct table_elt *
1445lookup (x, hash, mode)
1446     rtx x;
1447     unsigned hash;
1448     enum machine_mode mode;
1449{
1450  struct table_elt *p;
1451
1452  for (p = table[hash]; p; p = p->next_same_hash)
1453    if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1454			    || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1455      return p;
1456
1457  return 0;
1458}
1459
1460/* Like `lookup' but don't care whether the table element uses invalid regs.
1461   Also ignore discrepancies in the machine mode of a register.  */
1462
1463static struct table_elt *
1464lookup_for_remove (x, hash, mode)
1465     rtx x;
1466     unsigned hash;
1467     enum machine_mode mode;
1468{
1469  struct table_elt *p;
1470
1471  if (GET_CODE (x) == REG)
1472    {
1473      unsigned int regno = REGNO (x);
1474
1475      /* Don't check the machine mode when comparing registers;
1476	 invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1477      for (p = table[hash]; p; p = p->next_same_hash)
1478	if (GET_CODE (p->exp) == REG
1479	    && REGNO (p->exp) == regno)
1480	  return p;
1481    }
1482  else
1483    {
1484      for (p = table[hash]; p; p = p->next_same_hash)
1485	if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1486	  return p;
1487    }
1488
1489  return 0;
1490}
1491
1492/* Look for an expression equivalent to X and with code CODE.
1493   If one is found, return that expression.  */
1494
1495static rtx
1496lookup_as_function (x, code)
1497     rtx x;
1498     enum rtx_code code;
1499{
1500  struct table_elt *p
1501    = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1502
1503  /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1504     long as we are narrowing.  So if we looked in vain for a mode narrower
1505     than word_mode before, look for word_mode now.  */
1506  if (p == 0 && code == CONST_INT
1507      && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1508    {
1509      x = copy_rtx (x);
1510      PUT_MODE (x, word_mode);
1511      p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1512    }
1513
1514  if (p == 0)
1515    return 0;
1516
1517  for (p = p->first_same_value; p; p = p->next_same_value)
1518    if (GET_CODE (p->exp) == code
1519	/* Make sure this is a valid entry in the table.  */
1520	&& exp_equiv_p (p->exp, p->exp, 1, 0))
1521      return p->exp;
1522
1523  return 0;
1524}
1525
1526/* Insert X in the hash table, assuming HASH is its hash code
1527   and CLASSP is an element of the class it should go in
1528   (or 0 if a new class should be made).
1529   It is inserted at the proper position to keep the class in
1530   the order cheapest first.
1531
1532   MODE is the machine-mode of X, or if X is an integer constant
1533   with VOIDmode then MODE is the mode with which X will be used.
1534
1535   For elements of equal cheapness, the most recent one
1536   goes in front, except that the first element in the list
1537   remains first unless a cheaper element is added.  The order of
1538   pseudo-registers does not matter, as canon_reg will be called to
1539   find the cheapest when a register is retrieved from the table.
1540
1541   The in_memory field in the hash table element is set to 0.
1542   The caller must set it nonzero if appropriate.
1543
1544   You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1545   and if insert_regs returns a nonzero value
1546   you must then recompute its hash code before calling here.
1547
1548   If necessary, update table showing constant values of quantities.  */
1549
1550#define CHEAPER(X, Y) \
1551 (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1552
1553static struct table_elt *
1554insert (x, classp, hash, mode)
1555     rtx x;
1556     struct table_elt *classp;
1557     unsigned hash;
1558     enum machine_mode mode;
1559{
1560  struct table_elt *elt;
1561
1562  /* If X is a register and we haven't made a quantity for it,
1563     something is wrong.  */
1564  if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1565    abort ();
1566
1567  /* If X is a hard register, show it is being put in the table.  */
1568  if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1569    {
1570      unsigned int regno = REGNO (x);
1571      unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1572      unsigned int i;
1573
1574      for (i = regno; i < endregno; i++)
1575	SET_HARD_REG_BIT (hard_regs_in_table, i);
1576    }
1577
1578  /* Put an element for X into the right hash bucket.  */
1579
1580  elt = free_element_chain;
1581  if (elt)
1582    free_element_chain = elt->next_same_hash;
1583  else
1584    {
1585      n_elements_made++;
1586      elt = (struct table_elt *) xmalloc (sizeof (struct table_elt));
1587    }
1588
1589  elt->exp = x;
1590  elt->canon_exp = NULL_RTX;
1591  elt->cost = COST (x);
1592  elt->regcost = approx_reg_cost (x);
1593  elt->next_same_value = 0;
1594  elt->prev_same_value = 0;
1595  elt->next_same_hash = table[hash];
1596  elt->prev_same_hash = 0;
1597  elt->related_value = 0;
1598  elt->in_memory = 0;
1599  elt->mode = mode;
1600  elt->is_const = (CONSTANT_P (x)
1601		   /* GNU C++ takes advantage of this for `this'
1602		      (and other const values).  */
1603		   || (RTX_UNCHANGING_P (x)
1604		       && GET_CODE (x) == REG
1605		       && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1606		   || FIXED_BASE_PLUS_P (x));
1607
1608  if (table[hash])
1609    table[hash]->prev_same_hash = elt;
1610  table[hash] = elt;
1611
1612  /* Put it into the proper value-class.  */
1613  if (classp)
1614    {
1615      classp = classp->first_same_value;
1616      if (CHEAPER (elt, classp))
1617	/* Insert at the head of the class */
1618	{
1619	  struct table_elt *p;
1620	  elt->next_same_value = classp;
1621	  classp->prev_same_value = elt;
1622	  elt->first_same_value = elt;
1623
1624	  for (p = classp; p; p = p->next_same_value)
1625	    p->first_same_value = elt;
1626	}
1627      else
1628	{
1629	  /* Insert not at head of the class.  */
1630	  /* Put it after the last element cheaper than X.  */
1631	  struct table_elt *p, *next;
1632
1633	  for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1634	       p = next);
1635
1636	  /* Put it after P and before NEXT.  */
1637	  elt->next_same_value = next;
1638	  if (next)
1639	    next->prev_same_value = elt;
1640
1641	  elt->prev_same_value = p;
1642	  p->next_same_value = elt;
1643	  elt->first_same_value = classp;
1644	}
1645    }
1646  else
1647    elt->first_same_value = elt;
1648
1649  /* If this is a constant being set equivalent to a register or a register
1650     being set equivalent to a constant, note the constant equivalence.
1651
1652     If this is a constant, it cannot be equivalent to a different constant,
1653     and a constant is the only thing that can be cheaper than a register.  So
1654     we know the register is the head of the class (before the constant was
1655     inserted).
1656
1657     If this is a register that is not already known equivalent to a
1658     constant, we must check the entire class.
1659
1660     If this is a register that is already known equivalent to an insn,
1661     update the qtys `const_insn' to show that `this_insn' is the latest
1662     insn making that quantity equivalent to the constant.  */
1663
1664  if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1665      && GET_CODE (x) != REG)
1666    {
1667      int exp_q = REG_QTY (REGNO (classp->exp));
1668      struct qty_table_elem *exp_ent = &qty_table[exp_q];
1669
1670      exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1671      exp_ent->const_insn = this_insn;
1672    }
1673
1674  else if (GET_CODE (x) == REG
1675	   && classp
1676	   && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1677	   && ! elt->is_const)
1678    {
1679      struct table_elt *p;
1680
1681      for (p = classp; p != 0; p = p->next_same_value)
1682	{
1683	  if (p->is_const && GET_CODE (p->exp) != REG)
1684	    {
1685	      int x_q = REG_QTY (REGNO (x));
1686	      struct qty_table_elem *x_ent = &qty_table[x_q];
1687
1688	      x_ent->const_rtx
1689		= gen_lowpart_if_possible (GET_MODE (x), p->exp);
1690	      x_ent->const_insn = this_insn;
1691	      break;
1692	    }
1693	}
1694    }
1695
1696  else if (GET_CODE (x) == REG
1697	   && qty_table[REG_QTY (REGNO (x))].const_rtx
1698	   && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1699    qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1700
1701  /* If this is a constant with symbolic value,
1702     and it has a term with an explicit integer value,
1703     link it up with related expressions.  */
1704  if (GET_CODE (x) == CONST)
1705    {
1706      rtx subexp = get_related_value (x);
1707      unsigned subhash;
1708      struct table_elt *subelt, *subelt_prev;
1709
1710      if (subexp != 0)
1711	{
1712	  /* Get the integer-free subexpression in the hash table.  */
1713	  subhash = safe_hash (subexp, mode) & HASH_MASK;
1714	  subelt = lookup (subexp, subhash, mode);
1715	  if (subelt == 0)
1716	    subelt = insert (subexp, NULL, subhash, mode);
1717	  /* Initialize SUBELT's circular chain if it has none.  */
1718	  if (subelt->related_value == 0)
1719	    subelt->related_value = subelt;
1720	  /* Find the element in the circular chain that precedes SUBELT.  */
1721	  subelt_prev = subelt;
1722	  while (subelt_prev->related_value != subelt)
1723	    subelt_prev = subelt_prev->related_value;
1724	  /* Put new ELT into SUBELT's circular chain just before SUBELT.
1725	     This way the element that follows SUBELT is the oldest one.  */
1726	  elt->related_value = subelt_prev->related_value;
1727	  subelt_prev->related_value = elt;
1728	}
1729    }
1730
1731  return elt;
1732}
1733
1734/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1735   CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1736   the two classes equivalent.
1737
1738   CLASS1 will be the surviving class; CLASS2 should not be used after this
1739   call.
1740
1741   Any invalid entries in CLASS2 will not be copied.  */
1742
1743static void
1744merge_equiv_classes (class1, class2)
1745     struct table_elt *class1, *class2;
1746{
1747  struct table_elt *elt, *next, *new;
1748
1749  /* Ensure we start with the head of the classes.  */
1750  class1 = class1->first_same_value;
1751  class2 = class2->first_same_value;
1752
1753  /* If they were already equal, forget it.  */
1754  if (class1 == class2)
1755    return;
1756
1757  for (elt = class2; elt; elt = next)
1758    {
1759      unsigned int hash;
1760      rtx exp = elt->exp;
1761      enum machine_mode mode = elt->mode;
1762
1763      next = elt->next_same_value;
1764
1765      /* Remove old entry, make a new one in CLASS1's class.
1766	 Don't do this for invalid entries as we cannot find their
1767	 hash code (it also isn't necessary).  */
1768      if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1769	{
1770	  hash_arg_in_memory = 0;
1771	  hash = HASH (exp, mode);
1772
1773	  if (GET_CODE (exp) == REG)
1774	    delete_reg_equiv (REGNO (exp));
1775
1776	  remove_from_table (elt, hash);
1777
1778	  if (insert_regs (exp, class1, 0))
1779	    {
1780	      rehash_using_reg (exp);
1781	      hash = HASH (exp, mode);
1782	    }
1783	  new = insert (exp, class1, hash, mode);
1784	  new->in_memory = hash_arg_in_memory;
1785	}
1786    }
1787}
1788
1789/* Flush the entire hash table.  */
1790
1791static void
1792flush_hash_table ()
1793{
1794  int i;
1795  struct table_elt *p;
1796
1797  for (i = 0; i < HASH_SIZE; i++)
1798    for (p = table[i]; p; p = table[i])
1799      {
1800	/* Note that invalidate can remove elements
1801	   after P in the current hash chain.  */
1802	if (GET_CODE (p->exp) == REG)
1803	  invalidate (p->exp, p->mode);
1804	else
1805	  remove_from_table (p, i);
1806      }
1807}
1808
1809/* Function called for each rtx to check whether true dependence exist.  */
1810struct check_dependence_data
1811{
1812  enum machine_mode mode;
1813  rtx exp;
1814};
1815
1816static int
1817check_dependence (x, data)
1818     rtx *x;
1819     void *data;
1820{
1821  struct check_dependence_data *d = (struct check_dependence_data *) data;
1822  if (*x && GET_CODE (*x) == MEM)
1823    return true_dependence (d->exp, d->mode, *x, cse_rtx_varies_p);
1824  else
1825    return 0;
1826}
1827
1828/* Remove from the hash table, or mark as invalid, all expressions whose
1829   values could be altered by storing in X.  X is a register, a subreg, or
1830   a memory reference with nonvarying address (because, when a memory
1831   reference with a varying address is stored in, all memory references are
1832   removed by invalidate_memory so specific invalidation is superfluous).
1833   FULL_MODE, if not VOIDmode, indicates that this much should be
1834   invalidated instead of just the amount indicated by the mode of X.  This
1835   is only used for bitfield stores into memory.
1836
1837   A nonvarying address may be just a register or just a symbol reference,
1838   or it may be either of those plus a numeric offset.  */
1839
1840static void
1841invalidate (x, full_mode)
1842     rtx x;
1843     enum machine_mode full_mode;
1844{
1845  int i;
1846  struct table_elt *p;
1847
1848  switch (GET_CODE (x))
1849    {
1850    case REG:
1851      {
1852	/* If X is a register, dependencies on its contents are recorded
1853	   through the qty number mechanism.  Just change the qty number of
1854	   the register, mark it as invalid for expressions that refer to it,
1855	   and remove it itself.  */
1856	unsigned int regno = REGNO (x);
1857	unsigned int hash = HASH (x, GET_MODE (x));
1858
1859	/* Remove REGNO from any quantity list it might be on and indicate
1860	   that its value might have changed.  If it is a pseudo, remove its
1861	   entry from the hash table.
1862
1863	   For a hard register, we do the first two actions above for any
1864	   additional hard registers corresponding to X.  Then, if any of these
1865	   registers are in the table, we must remove any REG entries that
1866	   overlap these registers.  */
1867
1868	delete_reg_equiv (regno);
1869	REG_TICK (regno)++;
1870
1871	if (regno >= FIRST_PSEUDO_REGISTER)
1872	  {
1873	    /* Because a register can be referenced in more than one mode,
1874	       we might have to remove more than one table entry.  */
1875	    struct table_elt *elt;
1876
1877	    while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1878	      remove_from_table (elt, hash);
1879	  }
1880	else
1881	  {
1882	    HOST_WIDE_INT in_table
1883	      = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1884	    unsigned int endregno
1885	      = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1886	    unsigned int tregno, tendregno, rn;
1887	    struct table_elt *p, *next;
1888
1889	    CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1890
1891	    for (rn = regno + 1; rn < endregno; rn++)
1892	      {
1893		in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1894		CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1895		delete_reg_equiv (rn);
1896		REG_TICK (rn)++;
1897	      }
1898
1899	    if (in_table)
1900	      for (hash = 0; hash < HASH_SIZE; hash++)
1901		for (p = table[hash]; p; p = next)
1902		  {
1903		    next = p->next_same_hash;
1904
1905		    if (GET_CODE (p->exp) != REG
1906			|| REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1907		      continue;
1908
1909		    tregno = REGNO (p->exp);
1910		    tendregno
1911		      = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1912		    if (tendregno > regno && tregno < endregno)
1913		      remove_from_table (p, hash);
1914		  }
1915	  }
1916      }
1917      return;
1918
1919    case SUBREG:
1920      invalidate (SUBREG_REG (x), VOIDmode);
1921      return;
1922
1923    case PARALLEL:
1924      for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1925	invalidate (XVECEXP (x, 0, i), VOIDmode);
1926      return;
1927
1928    case EXPR_LIST:
1929      /* This is part of a disjoint return value; extract the location in
1930	 question ignoring the offset.  */
1931      invalidate (XEXP (x, 0), VOIDmode);
1932      return;
1933
1934    case MEM:
1935      /* Calculate the canonical version of X here so that
1936	 true_dependence doesn't generate new RTL for X on each call.  */
1937      x = canon_rtx (x);
1938
1939      /* Remove all hash table elements that refer to overlapping pieces of
1940	 memory.  */
1941      if (full_mode == VOIDmode)
1942	full_mode = GET_MODE (x);
1943
1944      for (i = 0; i < HASH_SIZE; i++)
1945	{
1946	  struct table_elt *next;
1947
1948	  for (p = table[i]; p; p = next)
1949	    {
1950	      next = p->next_same_hash;
1951	      if (p->in_memory)
1952		{
1953		  struct check_dependence_data d;
1954
1955		  /* Just canonicalize the expression once;
1956		     otherwise each time we call invalidate
1957		     true_dependence will canonicalize the
1958		     expression again.  */
1959		  if (!p->canon_exp)
1960		    p->canon_exp = canon_rtx (p->exp);
1961		  d.exp = x;
1962		  d.mode = full_mode;
1963		  if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1964		    remove_from_table (p, i);
1965		}
1966	    }
1967	}
1968      return;
1969
1970    default:
1971      abort ();
1972    }
1973}
1974
1975/* Remove all expressions that refer to register REGNO,
1976   since they are already invalid, and we are about to
1977   mark that register valid again and don't want the old
1978   expressions to reappear as valid.  */
1979
1980static void
1981remove_invalid_refs (regno)
1982     unsigned int regno;
1983{
1984  unsigned int i;
1985  struct table_elt *p, *next;
1986
1987  for (i = 0; i < HASH_SIZE; i++)
1988    for (p = table[i]; p; p = next)
1989      {
1990	next = p->next_same_hash;
1991	if (GET_CODE (p->exp) != REG
1992	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx*) 0))
1993	  remove_from_table (p, i);
1994      }
1995}
1996
1997/* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1998   and mode MODE.  */
1999static void
2000remove_invalid_subreg_refs (regno, offset, mode)
2001     unsigned int regno;
2002     unsigned int offset;
2003     enum machine_mode mode;
2004{
2005  unsigned int i;
2006  struct table_elt *p, *next;
2007  unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2008
2009  for (i = 0; i < HASH_SIZE; i++)
2010    for (p = table[i]; p; p = next)
2011      {
2012	rtx exp = p->exp;
2013	next = p->next_same_hash;
2014
2015	if (GET_CODE (exp) != REG
2016	    && (GET_CODE (exp) != SUBREG
2017		|| GET_CODE (SUBREG_REG (exp)) != REG
2018		|| REGNO (SUBREG_REG (exp)) != regno
2019		|| (((SUBREG_BYTE (exp)
2020		      + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2021		    && SUBREG_BYTE (exp) <= end))
2022	    && refers_to_regno_p (regno, regno + 1, p->exp, (rtx*) 0))
2023	  remove_from_table (p, i);
2024      }
2025}
2026
2027/* Recompute the hash codes of any valid entries in the hash table that
2028   reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2029
2030   This is called when we make a jump equivalence.  */
2031
2032static void
2033rehash_using_reg (x)
2034     rtx x;
2035{
2036  unsigned int i;
2037  struct table_elt *p, *next;
2038  unsigned hash;
2039
2040  if (GET_CODE (x) == SUBREG)
2041    x = SUBREG_REG (x);
2042
2043  /* If X is not a register or if the register is known not to be in any
2044     valid entries in the table, we have no work to do.  */
2045
2046  if (GET_CODE (x) != REG
2047      || REG_IN_TABLE (REGNO (x)) < 0
2048      || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2049    return;
2050
2051  /* Scan all hash chains looking for valid entries that mention X.
2052     If we find one and it is in the wrong hash chain, move it.  We can skip
2053     objects that are registers, since they are handled specially.  */
2054
2055  for (i = 0; i < HASH_SIZE; i++)
2056    for (p = table[i]; p; p = next)
2057      {
2058	next = p->next_same_hash;
2059	if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
2060	    && exp_equiv_p (p->exp, p->exp, 1, 0)
2061	    && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2062	  {
2063	    if (p->next_same_hash)
2064	      p->next_same_hash->prev_same_hash = p->prev_same_hash;
2065
2066	    if (p->prev_same_hash)
2067	      p->prev_same_hash->next_same_hash = p->next_same_hash;
2068	    else
2069	      table[i] = p->next_same_hash;
2070
2071	    p->next_same_hash = table[hash];
2072	    p->prev_same_hash = 0;
2073	    if (table[hash])
2074	      table[hash]->prev_same_hash = p;
2075	    table[hash] = p;
2076	  }
2077      }
2078}
2079
2080/* Remove from the hash table any expression that is a call-clobbered
2081   register.  Also update their TICK values.  */
2082
2083static void
2084invalidate_for_call ()
2085{
2086  unsigned int regno, endregno;
2087  unsigned int i;
2088  unsigned hash;
2089  struct table_elt *p, *next;
2090  int in_table = 0;
2091
2092  /* Go through all the hard registers.  For each that is clobbered in
2093     a CALL_INSN, remove the register from quantity chains and update
2094     reg_tick if defined.  Also see if any of these registers is currently
2095     in the table.  */
2096
2097  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2098    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2099      {
2100	delete_reg_equiv (regno);
2101	if (REG_TICK (regno) >= 0)
2102	  REG_TICK (regno)++;
2103
2104	in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2105      }
2106
2107  /* In the case where we have no call-clobbered hard registers in the
2108     table, we are done.  Otherwise, scan the table and remove any
2109     entry that overlaps a call-clobbered register.  */
2110
2111  if (in_table)
2112    for (hash = 0; hash < HASH_SIZE; hash++)
2113      for (p = table[hash]; p; p = next)
2114	{
2115	  next = p->next_same_hash;
2116
2117	  if (GET_CODE (p->exp) != REG
2118	      || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2119	    continue;
2120
2121	  regno = REGNO (p->exp);
2122	  endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2123
2124	  for (i = regno; i < endregno; i++)
2125	    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2126	      {
2127		remove_from_table (p, hash);
2128		break;
2129	      }
2130	}
2131}
2132
2133/* Given an expression X of type CONST,
2134   and ELT which is its table entry (or 0 if it
2135   is not in the hash table),
2136   return an alternate expression for X as a register plus integer.
2137   If none can be found, return 0.  */
2138
2139static rtx
2140use_related_value (x, elt)
2141     rtx x;
2142     struct table_elt *elt;
2143{
2144  struct table_elt *relt = 0;
2145  struct table_elt *p, *q;
2146  HOST_WIDE_INT offset;
2147
2148  /* First, is there anything related known?
2149     If we have a table element, we can tell from that.
2150     Otherwise, must look it up.  */
2151
2152  if (elt != 0 && elt->related_value != 0)
2153    relt = elt;
2154  else if (elt == 0 && GET_CODE (x) == CONST)
2155    {
2156      rtx subexp = get_related_value (x);
2157      if (subexp != 0)
2158	relt = lookup (subexp,
2159		       safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2160		       GET_MODE (subexp));
2161    }
2162
2163  if (relt == 0)
2164    return 0;
2165
2166  /* Search all related table entries for one that has an
2167     equivalent register.  */
2168
2169  p = relt;
2170  while (1)
2171    {
2172      /* This loop is strange in that it is executed in two different cases.
2173	 The first is when X is already in the table.  Then it is searching
2174	 the RELATED_VALUE list of X's class (RELT).  The second case is when
2175	 X is not in the table.  Then RELT points to a class for the related
2176	 value.
2177
2178	 Ensure that, whatever case we are in, that we ignore classes that have
2179	 the same value as X.  */
2180
2181      if (rtx_equal_p (x, p->exp))
2182	q = 0;
2183      else
2184	for (q = p->first_same_value; q; q = q->next_same_value)
2185	  if (GET_CODE (q->exp) == REG)
2186	    break;
2187
2188      if (q)
2189	break;
2190
2191      p = p->related_value;
2192
2193      /* We went all the way around, so there is nothing to be found.
2194	 Alternatively, perhaps RELT was in the table for some other reason
2195	 and it has no related values recorded.  */
2196      if (p == relt || p == 0)
2197	break;
2198    }
2199
2200  if (q == 0)
2201    return 0;
2202
2203  offset = (get_integer_term (x) - get_integer_term (p->exp));
2204  /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2205  return plus_constant (q->exp, offset);
2206}
2207
2208/* Hash a string.  Just add its bytes up.  */
2209static inline unsigned
2210canon_hash_string (ps)
2211     const char *ps;
2212{
2213  unsigned hash = 0;
2214  const unsigned char *p = (const unsigned char *)ps;
2215
2216  if (p)
2217    while (*p)
2218      hash += *p++;
2219
2220  return hash;
2221}
2222
2223/* Hash an rtx.  We are careful to make sure the value is never negative.
2224   Equivalent registers hash identically.
2225   MODE is used in hashing for CONST_INTs only;
2226   otherwise the mode of X is used.
2227
2228   Store 1 in do_not_record if any subexpression is volatile.
2229
2230   Store 1 in hash_arg_in_memory if X contains a MEM rtx
2231   which does not have the RTX_UNCHANGING_P bit set.
2232
2233   Note that cse_insn knows that the hash code of a MEM expression
2234   is just (int) MEM plus the hash code of the address.  */
2235
2236static unsigned
2237canon_hash (x, mode)
2238     rtx x;
2239     enum machine_mode mode;
2240{
2241  int i, j;
2242  unsigned hash = 0;
2243  enum rtx_code code;
2244  const char *fmt;
2245
2246  /* repeat is used to turn tail-recursion into iteration.  */
2247 repeat:
2248  if (x == 0)
2249    return hash;
2250
2251  code = GET_CODE (x);
2252  switch (code)
2253    {
2254    case REG:
2255      {
2256	unsigned int regno = REGNO (x);
2257	bool record;
2258
2259	/* On some machines, we can't record any non-fixed hard register,
2260	   because extending its life will cause reload problems.  We
2261	   consider ap, fp, sp, gp to be fixed for this purpose.
2262
2263	   We also consider CCmode registers to be fixed for this purpose;
2264	   failure to do so leads to failure to simplify 0<100 type of
2265	   conditionals.
2266
2267	   On all machines, we can't record any global registers.
2268	   Nor should we record any register that is in a small
2269	   class, as defined by CLASS_LIKELY_SPILLED_P.  */
2270
2271	if (regno >= FIRST_PSEUDO_REGISTER)
2272	  record = true;
2273	else if (x == frame_pointer_rtx
2274		 || x == hard_frame_pointer_rtx
2275		 || x == arg_pointer_rtx
2276		 || x == stack_pointer_rtx
2277		 || x == pic_offset_table_rtx)
2278	  record = true;
2279	else if (global_regs[regno])
2280	  record = false;
2281	else if (fixed_regs[regno])
2282	  record = true;
2283	else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2284	  record = true;
2285	else if (SMALL_REGISTER_CLASSES)
2286	  record = false;
2287	else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2288	  record = false;
2289	else
2290	  record = true;
2291
2292	if (!record)
2293	  {
2294	    do_not_record = 1;
2295	    return 0;
2296	  }
2297
2298	hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2299	return hash;
2300      }
2301
2302    /* We handle SUBREG of a REG specially because the underlying
2303       reg changes its hash value with every value change; we don't
2304       want to have to forget unrelated subregs when one subreg changes.  */
2305    case SUBREG:
2306      {
2307	if (GET_CODE (SUBREG_REG (x)) == REG)
2308	  {
2309	    hash += (((unsigned) SUBREG << 7)
2310		     + REGNO (SUBREG_REG (x))
2311		     + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2312	    return hash;
2313	  }
2314	break;
2315      }
2316
2317    case CONST_INT:
2318      {
2319	unsigned HOST_WIDE_INT tem = INTVAL (x);
2320	hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2321	return hash;
2322      }
2323
2324    case CONST_DOUBLE:
2325      /* This is like the general case, except that it only counts
2326	 the integers representing the constant.  */
2327      hash += (unsigned) code + (unsigned) GET_MODE (x);
2328      if (GET_MODE (x) != VOIDmode)
2329	for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2330	  {
2331	    unsigned HOST_WIDE_INT tem = XWINT (x, i);
2332	    hash += tem;
2333	  }
2334      else
2335	hash += ((unsigned) CONST_DOUBLE_LOW (x)
2336		 + (unsigned) CONST_DOUBLE_HIGH (x));
2337      return hash;
2338
2339    case CONST_VECTOR:
2340      {
2341	int units;
2342	rtx elt;
2343
2344	units = CONST_VECTOR_NUNITS (x);
2345
2346	for (i = 0; i < units; ++i)
2347	  {
2348	    elt = CONST_VECTOR_ELT (x, i);
2349	    hash += canon_hash (elt, GET_MODE (elt));
2350	  }
2351
2352	return hash;
2353      }
2354
2355      /* Assume there is only one rtx object for any given label.  */
2356    case LABEL_REF:
2357      hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2358      return hash;
2359
2360    case SYMBOL_REF:
2361      hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2362      return hash;
2363
2364    case MEM:
2365      /* We don't record if marked volatile or if BLKmode since we don't
2366	 know the size of the move.  */
2367      if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2368	{
2369	  do_not_record = 1;
2370	  return 0;
2371	}
2372      if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2373	{
2374	  hash_arg_in_memory = 1;
2375	}
2376      /* Now that we have already found this special case,
2377	 might as well speed it up as much as possible.  */
2378      hash += (unsigned) MEM;
2379      x = XEXP (x, 0);
2380      goto repeat;
2381
2382    case USE:
2383      /* A USE that mentions non-volatile memory needs special
2384	 handling since the MEM may be BLKmode which normally
2385	 prevents an entry from being made.  Pure calls are
2386	 marked by a USE which mentions BLKmode memory.  */
2387      if (GET_CODE (XEXP (x, 0)) == MEM
2388	  && ! MEM_VOLATILE_P (XEXP (x, 0)))
2389	{
2390	  hash += (unsigned)USE;
2391	  x = XEXP (x, 0);
2392
2393	  if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2394	    hash_arg_in_memory = 1;
2395
2396	  /* Now that we have already found this special case,
2397	     might as well speed it up as much as possible.  */
2398	  hash += (unsigned) MEM;
2399	  x = XEXP (x, 0);
2400	  goto repeat;
2401	}
2402      break;
2403
2404    case PRE_DEC:
2405    case PRE_INC:
2406    case POST_DEC:
2407    case POST_INC:
2408    case PRE_MODIFY:
2409    case POST_MODIFY:
2410    case PC:
2411    case CC0:
2412    case CALL:
2413    case UNSPEC_VOLATILE:
2414      do_not_record = 1;
2415      return 0;
2416
2417    case ASM_OPERANDS:
2418      if (MEM_VOLATILE_P (x))
2419	{
2420	  do_not_record = 1;
2421	  return 0;
2422	}
2423      else
2424	{
2425	  /* We don't want to take the filename and line into account.  */
2426	  hash += (unsigned) code + (unsigned) GET_MODE (x)
2427	    + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2428	    + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2429	    + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2430
2431	  if (ASM_OPERANDS_INPUT_LENGTH (x))
2432	    {
2433	      for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2434		{
2435		  hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2436				       GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2437			   + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2438						(x, i)));
2439		}
2440
2441	      hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2442	      x = ASM_OPERANDS_INPUT (x, 0);
2443	      mode = GET_MODE (x);
2444	      goto repeat;
2445	    }
2446
2447	  return hash;
2448	}
2449      break;
2450
2451    default:
2452      break;
2453    }
2454
2455  i = GET_RTX_LENGTH (code) - 1;
2456  hash += (unsigned) code + (unsigned) GET_MODE (x);
2457  fmt = GET_RTX_FORMAT (code);
2458  for (; i >= 0; i--)
2459    {
2460      if (fmt[i] == 'e')
2461	{
2462	  rtx tem = XEXP (x, i);
2463
2464	  /* If we are about to do the last recursive call
2465	     needed at this level, change it into iteration.
2466	     This function  is called enough to be worth it.  */
2467	  if (i == 0)
2468	    {
2469	      x = tem;
2470	      goto repeat;
2471	    }
2472	  hash += canon_hash (tem, 0);
2473	}
2474      else if (fmt[i] == 'E')
2475	for (j = 0; j < XVECLEN (x, i); j++)
2476	  hash += canon_hash (XVECEXP (x, i, j), 0);
2477      else if (fmt[i] == 's')
2478	hash += canon_hash_string (XSTR (x, i));
2479      else if (fmt[i] == 'i')
2480	{
2481	  unsigned tem = XINT (x, i);
2482	  hash += tem;
2483	}
2484      else if (fmt[i] == '0' || fmt[i] == 't')
2485	/* Unused.  */
2486	;
2487      else
2488	abort ();
2489    }
2490  return hash;
2491}
2492
2493/* Like canon_hash but with no side effects.  */
2494
2495static unsigned
2496safe_hash (x, mode)
2497     rtx x;
2498     enum machine_mode mode;
2499{
2500  int save_do_not_record = do_not_record;
2501  int save_hash_arg_in_memory = hash_arg_in_memory;
2502  unsigned hash = canon_hash (x, mode);
2503  hash_arg_in_memory = save_hash_arg_in_memory;
2504  do_not_record = save_do_not_record;
2505  return hash;
2506}
2507
2508/* Return 1 iff X and Y would canonicalize into the same thing,
2509   without actually constructing the canonicalization of either one.
2510   If VALIDATE is nonzero,
2511   we assume X is an expression being processed from the rtl
2512   and Y was found in the hash table.  We check register refs
2513   in Y for being marked as valid.
2514
2515   If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2516   that is known to be in the register.  Ordinarily, we don't allow them
2517   to match, because letting them match would cause unpredictable results
2518   in all the places that search a hash table chain for an equivalent
2519   for a given value.  A possible equivalent that has different structure
2520   has its hash code computed from different data.  Whether the hash code
2521   is the same as that of the given value is pure luck.  */
2522
2523static int
2524exp_equiv_p (x, y, validate, equal_values)
2525     rtx x, y;
2526     int validate;
2527     int equal_values;
2528{
2529  int i, j;
2530  enum rtx_code code;
2531  const char *fmt;
2532
2533  /* Note: it is incorrect to assume an expression is equivalent to itself
2534     if VALIDATE is nonzero.  */
2535  if (x == y && !validate)
2536    return 1;
2537  if (x == 0 || y == 0)
2538    return x == y;
2539
2540  code = GET_CODE (x);
2541  if (code != GET_CODE (y))
2542    {
2543      if (!equal_values)
2544	return 0;
2545
2546      /* If X is a constant and Y is a register or vice versa, they may be
2547	 equivalent.  We only have to validate if Y is a register.  */
2548      if (CONSTANT_P (x) && GET_CODE (y) == REG
2549	  && REGNO_QTY_VALID_P (REGNO (y)))
2550	{
2551	  int y_q = REG_QTY (REGNO (y));
2552	  struct qty_table_elem *y_ent = &qty_table[y_q];
2553
2554	  if (GET_MODE (y) == y_ent->mode
2555	      && rtx_equal_p (x, y_ent->const_rtx)
2556	      && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2557	    return 1;
2558	}
2559
2560      if (CONSTANT_P (y) && code == REG
2561	  && REGNO_QTY_VALID_P (REGNO (x)))
2562	{
2563	  int x_q = REG_QTY (REGNO (x));
2564	  struct qty_table_elem *x_ent = &qty_table[x_q];
2565
2566	  if (GET_MODE (x) == x_ent->mode
2567	      && rtx_equal_p (y, x_ent->const_rtx))
2568	    return 1;
2569	}
2570
2571      return 0;
2572    }
2573
2574  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2575  if (GET_MODE (x) != GET_MODE (y))
2576    return 0;
2577
2578  switch (code)
2579    {
2580    case PC:
2581    case CC0:
2582    case CONST_INT:
2583      return x == y;
2584
2585    case LABEL_REF:
2586      return XEXP (x, 0) == XEXP (y, 0);
2587
2588    case SYMBOL_REF:
2589      return XSTR (x, 0) == XSTR (y, 0);
2590
2591    case REG:
2592      {
2593	unsigned int regno = REGNO (y);
2594	unsigned int endregno
2595	  = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2596		     : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2597	unsigned int i;
2598
2599	/* If the quantities are not the same, the expressions are not
2600	   equivalent.  If there are and we are not to validate, they
2601	   are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2602
2603	if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2604	  return 0;
2605
2606	if (! validate)
2607	  return 1;
2608
2609	for (i = regno; i < endregno; i++)
2610	  if (REG_IN_TABLE (i) != REG_TICK (i))
2611	    return 0;
2612
2613	return 1;
2614      }
2615
2616    /*  For commutative operations, check both orders.  */
2617    case PLUS:
2618    case MULT:
2619    case AND:
2620    case IOR:
2621    case XOR:
2622    case NE:
2623    case EQ:
2624      return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2625	       && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2626			       validate, equal_values))
2627	      || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2628			       validate, equal_values)
2629		  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2630				  validate, equal_values)));
2631
2632    case ASM_OPERANDS:
2633      /* We don't use the generic code below because we want to
2634	 disregard filename and line numbers.  */
2635
2636      /* A volatile asm isn't equivalent to any other.  */
2637      if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2638	return 0;
2639
2640      if (GET_MODE (x) != GET_MODE (y)
2641	  || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2642	  || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2643		     ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2644	  || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2645	  || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2646	return 0;
2647
2648      if (ASM_OPERANDS_INPUT_LENGTH (x))
2649	{
2650	  for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2651	    if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2652			       ASM_OPERANDS_INPUT (y, i),
2653			       validate, equal_values)
2654		|| strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2655			   ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2656	      return 0;
2657	}
2658
2659      return 1;
2660
2661    default:
2662      break;
2663    }
2664
2665  /* Compare the elements.  If any pair of corresponding elements
2666     fail to match, return 0 for the whole things.  */
2667
2668  fmt = GET_RTX_FORMAT (code);
2669  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2670    {
2671      switch (fmt[i])
2672	{
2673	case 'e':
2674	  if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2675	    return 0;
2676	  break;
2677
2678	case 'E':
2679	  if (XVECLEN (x, i) != XVECLEN (y, i))
2680	    return 0;
2681	  for (j = 0; j < XVECLEN (x, i); j++)
2682	    if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2683			       validate, equal_values))
2684	      return 0;
2685	  break;
2686
2687	case 's':
2688	  if (strcmp (XSTR (x, i), XSTR (y, i)))
2689	    return 0;
2690	  break;
2691
2692	case 'i':
2693	  if (XINT (x, i) != XINT (y, i))
2694	    return 0;
2695	  break;
2696
2697	case 'w':
2698	  if (XWINT (x, i) != XWINT (y, i))
2699	    return 0;
2700	  break;
2701
2702	case '0':
2703	case 't':
2704	  break;
2705
2706	default:
2707	  abort ();
2708	}
2709    }
2710
2711  return 1;
2712}
2713
2714/* Return 1 if X has a value that can vary even between two
2715   executions of the program.  0 means X can be compared reliably
2716   against certain constants or near-constants.  */
2717
2718static int
2719cse_rtx_varies_p (x, from_alias)
2720     rtx x;
2721     int from_alias;
2722{
2723  /* We need not check for X and the equivalence class being of the same
2724     mode because if X is equivalent to a constant in some mode, it
2725     doesn't vary in any mode.  */
2726
2727  if (GET_CODE (x) == REG
2728      && REGNO_QTY_VALID_P (REGNO (x)))
2729    {
2730      int x_q = REG_QTY (REGNO (x));
2731      struct qty_table_elem *x_ent = &qty_table[x_q];
2732
2733      if (GET_MODE (x) == x_ent->mode
2734	  && x_ent->const_rtx != NULL_RTX)
2735	return 0;
2736    }
2737
2738  if (GET_CODE (x) == PLUS
2739      && GET_CODE (XEXP (x, 1)) == CONST_INT
2740      && GET_CODE (XEXP (x, 0)) == REG
2741      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2742    {
2743      int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2744      struct qty_table_elem *x0_ent = &qty_table[x0_q];
2745
2746      if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2747	  && x0_ent->const_rtx != NULL_RTX)
2748	return 0;
2749    }
2750
2751  /* This can happen as the result of virtual register instantiation, if
2752     the initial constant is too large to be a valid address.  This gives
2753     us a three instruction sequence, load large offset into a register,
2754     load fp minus a constant into a register, then a MEM which is the
2755     sum of the two `constant' registers.  */
2756  if (GET_CODE (x) == PLUS
2757      && GET_CODE (XEXP (x, 0)) == REG
2758      && GET_CODE (XEXP (x, 1)) == REG
2759      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2760      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2761    {
2762      int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2763      int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2764      struct qty_table_elem *x0_ent = &qty_table[x0_q];
2765      struct qty_table_elem *x1_ent = &qty_table[x1_q];
2766
2767      if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2768	  && x0_ent->const_rtx != NULL_RTX
2769	  && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2770	  && x1_ent->const_rtx != NULL_RTX)
2771	return 0;
2772    }
2773
2774  return rtx_varies_p (x, from_alias);
2775}
2776
2777/* Canonicalize an expression:
2778   replace each register reference inside it
2779   with the "oldest" equivalent register.
2780
2781   If INSN is non-zero and we are replacing a pseudo with a hard register
2782   or vice versa, validate_change is used to ensure that INSN remains valid
2783   after we make our substitution.  The calls are made with IN_GROUP non-zero
2784   so apply_change_group must be called upon the outermost return from this
2785   function (unless INSN is zero).  The result of apply_change_group can
2786   generally be discarded since the changes we are making are optional.  */
2787
2788static rtx
2789canon_reg (x, insn)
2790     rtx x;
2791     rtx insn;
2792{
2793  int i;
2794  enum rtx_code code;
2795  const char *fmt;
2796
2797  if (x == 0)
2798    return x;
2799
2800  code = GET_CODE (x);
2801  switch (code)
2802    {
2803    case PC:
2804    case CC0:
2805    case CONST:
2806    case CONST_INT:
2807    case CONST_DOUBLE:
2808    case CONST_VECTOR:
2809    case SYMBOL_REF:
2810    case LABEL_REF:
2811    case ADDR_VEC:
2812    case ADDR_DIFF_VEC:
2813      return x;
2814
2815    case REG:
2816      {
2817	int first;
2818	int q;
2819	struct qty_table_elem *ent;
2820
2821	/* Never replace a hard reg, because hard regs can appear
2822	   in more than one machine mode, and we must preserve the mode
2823	   of each occurrence.  Also, some hard regs appear in
2824	   MEMs that are shared and mustn't be altered.  Don't try to
2825	   replace any reg that maps to a reg of class NO_REGS.  */
2826	if (REGNO (x) < FIRST_PSEUDO_REGISTER
2827	    || ! REGNO_QTY_VALID_P (REGNO (x)))
2828	  return x;
2829
2830	q = REG_QTY (REGNO (x));
2831	ent = &qty_table[q];
2832	first = ent->first_reg;
2833	return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2834		: REGNO_REG_CLASS (first) == NO_REGS ? x
2835		: gen_rtx_REG (ent->mode, first));
2836      }
2837
2838    default:
2839      break;
2840    }
2841
2842  fmt = GET_RTX_FORMAT (code);
2843  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2844    {
2845      int j;
2846
2847      if (fmt[i] == 'e')
2848	{
2849	  rtx new = canon_reg (XEXP (x, i), insn);
2850	  int insn_code;
2851
2852	  /* If replacing pseudo with hard reg or vice versa, ensure the
2853	     insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2854	  if (insn != 0 && new != 0
2855	      && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2856	      && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2857		   != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2858		  || (insn_code = recog_memoized (insn)) < 0
2859		  || insn_data[insn_code].n_dups > 0))
2860	    validate_change (insn, &XEXP (x, i), new, 1);
2861	  else
2862	    XEXP (x, i) = new;
2863	}
2864      else if (fmt[i] == 'E')
2865	for (j = 0; j < XVECLEN (x, i); j++)
2866	  XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2867    }
2868
2869  return x;
2870}
2871
2872/* LOC is a location within INSN that is an operand address (the contents of
2873   a MEM).  Find the best equivalent address to use that is valid for this
2874   insn.
2875
2876   On most CISC machines, complicated address modes are costly, and rtx_cost
2877   is a good approximation for that cost.  However, most RISC machines have
2878   only a few (usually only one) memory reference formats.  If an address is
2879   valid at all, it is often just as cheap as any other address.  Hence, for
2880   RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2881   costs of various addresses.  For two addresses of equal cost, choose the one
2882   with the highest `rtx_cost' value as that has the potential of eliminating
2883   the most insns.  For equal costs, we choose the first in the equivalence
2884   class.  Note that we ignore the fact that pseudo registers are cheaper
2885   than hard registers here because we would also prefer the pseudo registers.
2886  */
2887
2888static void
2889find_best_addr (insn, loc, mode)
2890     rtx insn;
2891     rtx *loc;
2892     enum machine_mode mode;
2893{
2894  struct table_elt *elt;
2895  rtx addr = *loc;
2896#ifdef ADDRESS_COST
2897  struct table_elt *p;
2898  int found_better = 1;
2899#endif
2900  int save_do_not_record = do_not_record;
2901  int save_hash_arg_in_memory = hash_arg_in_memory;
2902  int addr_volatile;
2903  int regno;
2904  unsigned hash;
2905
2906  /* Do not try to replace constant addresses or addresses of local and
2907     argument slots.  These MEM expressions are made only once and inserted
2908     in many instructions, as well as being used to control symbol table
2909     output.  It is not safe to clobber them.
2910
2911     There are some uncommon cases where the address is already in a register
2912     for some reason, but we cannot take advantage of that because we have
2913     no easy way to unshare the MEM.  In addition, looking up all stack
2914     addresses is costly.  */
2915  if ((GET_CODE (addr) == PLUS
2916       && GET_CODE (XEXP (addr, 0)) == REG
2917       && GET_CODE (XEXP (addr, 1)) == CONST_INT
2918       && (regno = REGNO (XEXP (addr, 0)),
2919	   regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2920	   || regno == ARG_POINTER_REGNUM))
2921      || (GET_CODE (addr) == REG
2922	  && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2923	      || regno == HARD_FRAME_POINTER_REGNUM
2924	      || regno == ARG_POINTER_REGNUM))
2925      || GET_CODE (addr) == ADDRESSOF
2926      || CONSTANT_ADDRESS_P (addr))
2927    return;
2928
2929  /* If this address is not simply a register, try to fold it.  This will
2930     sometimes simplify the expression.  Many simplifications
2931     will not be valid, but some, usually applying the associative rule, will
2932     be valid and produce better code.  */
2933  if (GET_CODE (addr) != REG)
2934    {
2935      rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2936      int addr_folded_cost = address_cost (folded, mode);
2937      int addr_cost = address_cost (addr, mode);
2938
2939      if ((addr_folded_cost < addr_cost
2940	   || (addr_folded_cost == addr_cost
2941	       /* ??? The rtx_cost comparison is left over from an older
2942		  version of this code.  It is probably no longer helpful.  */
2943	       && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2944		   || approx_reg_cost (folded) < approx_reg_cost (addr))))
2945	  && validate_change (insn, loc, folded, 0))
2946	addr = folded;
2947    }
2948
2949  /* If this address is not in the hash table, we can't look for equivalences
2950     of the whole address.  Also, ignore if volatile.  */
2951
2952  do_not_record = 0;
2953  hash = HASH (addr, Pmode);
2954  addr_volatile = do_not_record;
2955  do_not_record = save_do_not_record;
2956  hash_arg_in_memory = save_hash_arg_in_memory;
2957
2958  if (addr_volatile)
2959    return;
2960
2961  elt = lookup (addr, hash, Pmode);
2962
2963#ifndef ADDRESS_COST
2964  if (elt)
2965    {
2966      int our_cost = elt->cost;
2967
2968      /* Find the lowest cost below ours that works.  */
2969      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2970	if (elt->cost < our_cost
2971	    && (GET_CODE (elt->exp) == REG
2972		|| exp_equiv_p (elt->exp, elt->exp, 1, 0))
2973	    && validate_change (insn, loc,
2974				canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2975	  return;
2976    }
2977#else
2978
2979  if (elt)
2980    {
2981      /* We need to find the best (under the criteria documented above) entry
2982	 in the class that is valid.  We use the `flag' field to indicate
2983	 choices that were invalid and iterate until we can't find a better
2984	 one that hasn't already been tried.  */
2985
2986      for (p = elt->first_same_value; p; p = p->next_same_value)
2987	p->flag = 0;
2988
2989      while (found_better)
2990	{
2991	  int best_addr_cost = address_cost (*loc, mode);
2992	  int best_rtx_cost = (elt->cost + 1) >> 1;
2993	  int exp_cost;
2994	  struct table_elt *best_elt = elt;
2995
2996	  found_better = 0;
2997	  for (p = elt->first_same_value; p; p = p->next_same_value)
2998	    if (! p->flag)
2999	      {
3000		if ((GET_CODE (p->exp) == REG
3001		     || exp_equiv_p (p->exp, p->exp, 1, 0))
3002		    && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
3003			|| (exp_cost == best_addr_cost
3004			    && ((p->cost + 1) >> 1) > best_rtx_cost)))
3005		  {
3006		    found_better = 1;
3007		    best_addr_cost = exp_cost;
3008		    best_rtx_cost = (p->cost + 1) >> 1;
3009		    best_elt = p;
3010		  }
3011	      }
3012
3013	  if (found_better)
3014	    {
3015	      if (validate_change (insn, loc,
3016				   canon_reg (copy_rtx (best_elt->exp),
3017					      NULL_RTX), 0))
3018		return;
3019	      else
3020		best_elt->flag = 1;
3021	    }
3022	}
3023    }
3024
3025  /* If the address is a binary operation with the first operand a register
3026     and the second a constant, do the same as above, but looking for
3027     equivalences of the register.  Then try to simplify before checking for
3028     the best address to use.  This catches a few cases:  First is when we
3029     have REG+const and the register is another REG+const.  We can often merge
3030     the constants and eliminate one insn and one register.  It may also be
3031     that a machine has a cheap REG+REG+const.  Finally, this improves the
3032     code on the Alpha for unaligned byte stores.  */
3033
3034  if (flag_expensive_optimizations
3035      && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
3036	  || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
3037      && GET_CODE (XEXP (*loc, 0)) == REG
3038      && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
3039    {
3040      rtx c = XEXP (*loc, 1);
3041
3042      do_not_record = 0;
3043      hash = HASH (XEXP (*loc, 0), Pmode);
3044      do_not_record = save_do_not_record;
3045      hash_arg_in_memory = save_hash_arg_in_memory;
3046
3047      elt = lookup (XEXP (*loc, 0), hash, Pmode);
3048      if (elt == 0)
3049	return;
3050
3051      /* We need to find the best (under the criteria documented above) entry
3052	 in the class that is valid.  We use the `flag' field to indicate
3053	 choices that were invalid and iterate until we can't find a better
3054	 one that hasn't already been tried.  */
3055
3056      for (p = elt->first_same_value; p; p = p->next_same_value)
3057	p->flag = 0;
3058
3059      while (found_better)
3060	{
3061	  int best_addr_cost = address_cost (*loc, mode);
3062	  int best_rtx_cost = (COST (*loc) + 1) >> 1;
3063	  struct table_elt *best_elt = elt;
3064	  rtx best_rtx = *loc;
3065	  int count;
3066
3067	  /* This is at worst case an O(n^2) algorithm, so limit our search
3068	     to the first 32 elements on the list.  This avoids trouble
3069	     compiling code with very long basic blocks that can easily
3070	     call simplify_gen_binary so many times that we run out of
3071	     memory.  */
3072
3073	  found_better = 0;
3074	  for (p = elt->first_same_value, count = 0;
3075	       p && count < 32;
3076	       p = p->next_same_value, count++)
3077	    if (! p->flag
3078		&& (GET_CODE (p->exp) == REG
3079		    || exp_equiv_p (p->exp, p->exp, 1, 0)))
3080	      {
3081		rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3082					       p->exp, c);
3083		int new_cost;
3084		new_cost = address_cost (new, mode);
3085
3086		if (new_cost < best_addr_cost
3087		    || (new_cost == best_addr_cost
3088			&& (COST (new) + 1) >> 1 > best_rtx_cost))
3089		  {
3090		    found_better = 1;
3091		    best_addr_cost = new_cost;
3092		    best_rtx_cost = (COST (new) + 1) >> 1;
3093		    best_elt = p;
3094		    best_rtx = new;
3095		  }
3096	      }
3097
3098	  if (found_better)
3099	    {
3100	      if (validate_change (insn, loc,
3101				   canon_reg (copy_rtx (best_rtx),
3102					      NULL_RTX), 0))
3103		return;
3104	      else
3105		best_elt->flag = 1;
3106	    }
3107	}
3108    }
3109#endif
3110}
3111
3112/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3113   operation (EQ, NE, GT, etc.), follow it back through the hash table and
3114   what values are being compared.
3115
3116   *PARG1 and *PARG2 are updated to contain the rtx representing the values
3117   actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
3118   was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3119   compared to produce cc0.
3120
3121   The return value is the comparison operator and is either the code of
3122   A or the code corresponding to the inverse of the comparison.  */
3123
3124static enum rtx_code
3125find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3126     enum rtx_code code;
3127     rtx *parg1, *parg2;
3128     enum machine_mode *pmode1, *pmode2;
3129{
3130  rtx arg1, arg2;
3131
3132  arg1 = *parg1, arg2 = *parg2;
3133
3134  /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
3135
3136  while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3137    {
3138      /* Set non-zero when we find something of interest.  */
3139      rtx x = 0;
3140      int reverse_code = 0;
3141      struct table_elt *p = 0;
3142
3143      /* If arg1 is a COMPARE, extract the comparison arguments from it.
3144	 On machines with CC0, this is the only case that can occur, since
3145	 fold_rtx will return the COMPARE or item being compared with zero
3146	 when given CC0.  */
3147
3148      if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3149	x = arg1;
3150
3151      /* If ARG1 is a comparison operator and CODE is testing for
3152	 STORE_FLAG_VALUE, get the inner arguments.  */
3153
3154      else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3155	{
3156	  if (code == NE
3157	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3158		  && code == LT && STORE_FLAG_VALUE == -1)
3159#ifdef FLOAT_STORE_FLAG_VALUE
3160	      || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3161		  && (REAL_VALUE_NEGATIVE
3162		      (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3163#endif
3164	      )
3165	    x = arg1;
3166	  else if (code == EQ
3167		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3168		       && code == GE && STORE_FLAG_VALUE == -1)
3169#ifdef FLOAT_STORE_FLAG_VALUE
3170		   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3171		       && (REAL_VALUE_NEGATIVE
3172			   (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3173#endif
3174		   )
3175	    x = arg1, reverse_code = 1;
3176	}
3177
3178      /* ??? We could also check for
3179
3180	 (ne (and (eq (...) (const_int 1))) (const_int 0))
3181
3182	 and related forms, but let's wait until we see them occurring.  */
3183
3184      if (x == 0)
3185	/* Look up ARG1 in the hash table and see if it has an equivalence
3186	   that lets us see what is being compared.  */
3187	p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3188		    GET_MODE (arg1));
3189      if (p)
3190	{
3191	  p = p->first_same_value;
3192
3193	  /* If what we compare is already known to be constant, that is as
3194	     good as it gets.
3195	     We need to break the loop in this case, because otherwise we
3196	     can have an infinite loop when looking at a reg that is known
3197	     to be a constant which is the same as a comparison of a reg
3198	     against zero which appears later in the insn stream, which in
3199	     turn is constant and the same as the comparison of the first reg
3200	     against zero...  */
3201	  if (p->is_const)
3202	    break;
3203	}
3204
3205      for (; p; p = p->next_same_value)
3206	{
3207	  enum machine_mode inner_mode = GET_MODE (p->exp);
3208
3209	  /* If the entry isn't valid, skip it.  */
3210	  if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3211	    continue;
3212
3213	  if (GET_CODE (p->exp) == COMPARE
3214	      /* Another possibility is that this machine has a compare insn
3215		 that includes the comparison code.  In that case, ARG1 would
3216		 be equivalent to a comparison operation that would set ARG1 to
3217		 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
3218		 ORIG_CODE is the actual comparison being done; if it is an EQ,
3219		 we must reverse ORIG_CODE.  On machine with a negative value
3220		 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3221	      || ((code == NE
3222		   || (code == LT
3223		       && GET_MODE_CLASS (inner_mode) == MODE_INT
3224		       && (GET_MODE_BITSIZE (inner_mode)
3225			   <= HOST_BITS_PER_WIDE_INT)
3226		       && (STORE_FLAG_VALUE
3227			   & ((HOST_WIDE_INT) 1
3228			      << (GET_MODE_BITSIZE (inner_mode) - 1))))
3229#ifdef FLOAT_STORE_FLAG_VALUE
3230		   || (code == LT
3231		       && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3232		       && (REAL_VALUE_NEGATIVE
3233			   (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3234#endif
3235		   )
3236		  && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3237	    {
3238	      x = p->exp;
3239	      break;
3240	    }
3241	  else if ((code == EQ
3242		    || (code == GE
3243			&& GET_MODE_CLASS (inner_mode) == MODE_INT
3244			&& (GET_MODE_BITSIZE (inner_mode)
3245			    <= HOST_BITS_PER_WIDE_INT)
3246			&& (STORE_FLAG_VALUE
3247			    & ((HOST_WIDE_INT) 1
3248			       << (GET_MODE_BITSIZE (inner_mode) - 1))))
3249#ifdef FLOAT_STORE_FLAG_VALUE
3250		    || (code == GE
3251			&& GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3252		        && (REAL_VALUE_NEGATIVE
3253			    (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3254#endif
3255		    )
3256		   && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3257	    {
3258	      reverse_code = 1;
3259	      x = p->exp;
3260	      break;
3261	    }
3262
3263	  /* If this is fp + constant, the equivalent is a better operand since
3264	     it may let us predict the value of the comparison.  */
3265	  else if (NONZERO_BASE_PLUS_P (p->exp))
3266	    {
3267	      arg1 = p->exp;
3268	      continue;
3269	    }
3270	}
3271
3272      /* If we didn't find a useful equivalence for ARG1, we are done.
3273	 Otherwise, set up for the next iteration.  */
3274      if (x == 0)
3275	break;
3276
3277      /* If we need to reverse the comparison, make sure that that is
3278	 possible -- we can't necessarily infer the value of GE from LT
3279	 with floating-point operands.  */
3280      if (reverse_code)
3281	{
3282	  enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3283	  if (reversed == UNKNOWN)
3284	    break;
3285	  else code = reversed;
3286	}
3287      else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3288	code = GET_CODE (x);
3289      arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3290    }
3291
3292  /* Return our results.  Return the modes from before fold_rtx
3293     because fold_rtx might produce const_int, and then it's too late.  */
3294  *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3295  *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3296
3297  return code;
3298}
3299
3300/* If X is a nontrivial arithmetic operation on an argument
3301   for which a constant value can be determined, return
3302   the result of operating on that value, as a constant.
3303   Otherwise, return X, possibly with one or more operands
3304   modified by recursive calls to this function.
3305
3306   If X is a register whose contents are known, we do NOT
3307   return those contents here.  equiv_constant is called to
3308   perform that task.
3309
3310   INSN is the insn that we may be modifying.  If it is 0, make a copy
3311   of X before modifying it.  */
3312
3313static rtx
3314fold_rtx (x, insn)
3315     rtx x;
3316     rtx insn;
3317{
3318  enum rtx_code code;
3319  enum machine_mode mode;
3320  const char *fmt;
3321  int i;
3322  rtx new = 0;
3323  int copied = 0;
3324  int must_swap = 0;
3325
3326  /* Folded equivalents of first two operands of X.  */
3327  rtx folded_arg0;
3328  rtx folded_arg1;
3329
3330  /* Constant equivalents of first three operands of X;
3331     0 when no such equivalent is known.  */
3332  rtx const_arg0;
3333  rtx const_arg1;
3334  rtx const_arg2;
3335
3336  /* The mode of the first operand of X.  We need this for sign and zero
3337     extends.  */
3338  enum machine_mode mode_arg0;
3339
3340  if (x == 0)
3341    return x;
3342
3343  mode = GET_MODE (x);
3344  code = GET_CODE (x);
3345  switch (code)
3346    {
3347    case CONST:
3348    case CONST_INT:
3349    case CONST_DOUBLE:
3350    case CONST_VECTOR:
3351    case SYMBOL_REF:
3352    case LABEL_REF:
3353    case REG:
3354      /* No use simplifying an EXPR_LIST
3355	 since they are used only for lists of args
3356	 in a function call's REG_EQUAL note.  */
3357    case EXPR_LIST:
3358      /* Changing anything inside an ADDRESSOF is incorrect; we don't
3359	 want to (e.g.,) make (addressof (const_int 0)) just because
3360	 the location is known to be zero.  */
3361    case ADDRESSOF:
3362      return x;
3363
3364#ifdef HAVE_cc0
3365    case CC0:
3366      return prev_insn_cc0;
3367#endif
3368
3369    case PC:
3370      /* If the next insn is a CODE_LABEL followed by a jump table,
3371	 PC's value is a LABEL_REF pointing to that label.  That
3372	 lets us fold switch statements on the VAX.  */
3373      if (insn && GET_CODE (insn) == JUMP_INSN)
3374	{
3375	  rtx next = next_nonnote_insn (insn);
3376
3377	  if (next && GET_CODE (next) == CODE_LABEL
3378	      && NEXT_INSN (next) != 0
3379	      && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3380	      && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3381		  || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3382	    return gen_rtx_LABEL_REF (Pmode, next);
3383	}
3384      break;
3385
3386    case SUBREG:
3387      /* See if we previously assigned a constant value to this SUBREG.  */
3388      if ((new = lookup_as_function (x, CONST_INT)) != 0
3389	  || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3390	return new;
3391
3392      /* If this is a paradoxical SUBREG, we have no idea what value the
3393	 extra bits would have.  However, if the operand is equivalent
3394	 to a SUBREG whose operand is the same as our mode, and all the
3395	 modes are within a word, we can just use the inner operand
3396	 because these SUBREGs just say how to treat the register.
3397
3398	 Similarly if we find an integer constant.  */
3399
3400      if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3401	{
3402	  enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3403	  struct table_elt *elt;
3404
3405	  if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3406	      && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3407	      && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3408				imode)) != 0)
3409	    for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3410	      {
3411		if (CONSTANT_P (elt->exp)
3412		    && GET_MODE (elt->exp) == VOIDmode)
3413		  return elt->exp;
3414
3415		if (GET_CODE (elt->exp) == SUBREG
3416		    && GET_MODE (SUBREG_REG (elt->exp)) == mode
3417		    && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3418		  return copy_rtx (SUBREG_REG (elt->exp));
3419	      }
3420
3421	  return x;
3422	}
3423
3424      /* Fold SUBREG_REG.  If it changed, see if we can simplify the SUBREG.
3425	 We might be able to if the SUBREG is extracting a single word in an
3426	 integral mode or extracting the low part.  */
3427
3428      folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3429      const_arg0 = equiv_constant (folded_arg0);
3430      if (const_arg0)
3431	folded_arg0 = const_arg0;
3432
3433      if (folded_arg0 != SUBREG_REG (x))
3434	{
3435	  new = simplify_subreg (mode, folded_arg0,
3436				 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3437	  if (new)
3438	    return new;
3439	}
3440
3441      /* If this is a narrowing SUBREG and our operand is a REG, see if
3442	 we can find an equivalence for REG that is an arithmetic operation
3443	 in a wider mode where both operands are paradoxical SUBREGs
3444	 from objects of our result mode.  In that case, we couldn't report
3445	 an equivalent value for that operation, since we don't know what the
3446	 extra bits will be.  But we can find an equivalence for this SUBREG
3447	 by folding that operation is the narrow mode.  This allows us to
3448	 fold arithmetic in narrow modes when the machine only supports
3449	 word-sized arithmetic.
3450
3451	 Also look for a case where we have a SUBREG whose operand is the
3452	 same as our result.  If both modes are smaller than a word, we
3453	 are simply interpreting a register in different modes and we
3454	 can use the inner value.  */
3455
3456      if (GET_CODE (folded_arg0) == REG
3457	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3458	  && subreg_lowpart_p (x))
3459	{
3460	  struct table_elt *elt;
3461
3462	  /* We can use HASH here since we know that canon_hash won't be
3463	     called.  */
3464	  elt = lookup (folded_arg0,
3465			HASH (folded_arg0, GET_MODE (folded_arg0)),
3466			GET_MODE (folded_arg0));
3467
3468	  if (elt)
3469	    elt = elt->first_same_value;
3470
3471	  for (; elt; elt = elt->next_same_value)
3472	    {
3473	      enum rtx_code eltcode = GET_CODE (elt->exp);
3474
3475	      /* Just check for unary and binary operations.  */
3476	      if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3477		  && GET_CODE (elt->exp) != SIGN_EXTEND
3478		  && GET_CODE (elt->exp) != ZERO_EXTEND
3479		  && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3480		  && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3481		  && (GET_MODE_CLASS (mode)
3482		      == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3483		{
3484		  rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3485
3486		  if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3487		    op0 = fold_rtx (op0, NULL_RTX);
3488
3489		  op0 = equiv_constant (op0);
3490		  if (op0)
3491		    new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3492						    op0, mode);
3493		}
3494	      else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3495			|| GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3496		       && eltcode != DIV && eltcode != MOD
3497		       && eltcode != UDIV && eltcode != UMOD
3498		       && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3499		       && eltcode != ROTATE && eltcode != ROTATERT
3500		       && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3501			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3502				== mode))
3503			   || CONSTANT_P (XEXP (elt->exp, 0)))
3504		       && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3505			    && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3506				== mode))
3507			   || CONSTANT_P (XEXP (elt->exp, 1))))
3508		{
3509		  rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3510		  rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3511
3512		  if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3513		    op0 = fold_rtx (op0, NULL_RTX);
3514
3515		  if (op0)
3516		    op0 = equiv_constant (op0);
3517
3518		  if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3519		    op1 = fold_rtx (op1, NULL_RTX);
3520
3521		  if (op1)
3522		    op1 = equiv_constant (op1);
3523
3524		  /* If we are looking for the low SImode part of
3525		     (ashift:DI c (const_int 32)), it doesn't work
3526		     to compute that in SImode, because a 32-bit shift
3527		     in SImode is unpredictable.  We know the value is 0.  */
3528		  if (op0 && op1
3529		      && GET_CODE (elt->exp) == ASHIFT
3530		      && GET_CODE (op1) == CONST_INT
3531		      && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3532		    {
3533		      if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3534
3535			/* If the count fits in the inner mode's width,
3536			   but exceeds the outer mode's width,
3537			   the value will get truncated to 0
3538			   by the subreg.  */
3539			new = const0_rtx;
3540		      else
3541			/* If the count exceeds even the inner mode's width,
3542			   don't fold this expression.  */
3543			new = 0;
3544		    }
3545		  else if (op0 && op1)
3546		    new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3547						     op0, op1);
3548		}
3549
3550	      else if (GET_CODE (elt->exp) == SUBREG
3551		       && GET_MODE (SUBREG_REG (elt->exp)) == mode
3552		       && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3553			   <= UNITS_PER_WORD)
3554		       && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3555		new = copy_rtx (SUBREG_REG (elt->exp));
3556
3557	      if (new)
3558		return new;
3559	    }
3560	}
3561
3562      return x;
3563
3564    case NOT:
3565    case NEG:
3566      /* If we have (NOT Y), see if Y is known to be (NOT Z).
3567	 If so, (NOT Y) simplifies to Z.  Similarly for NEG.  */
3568      new = lookup_as_function (XEXP (x, 0), code);
3569      if (new)
3570	return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3571      break;
3572
3573    case MEM:
3574      /* If we are not actually processing an insn, don't try to find the
3575	 best address.  Not only don't we care, but we could modify the
3576	 MEM in an invalid way since we have no insn to validate against.  */
3577      if (insn != 0)
3578	find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3579
3580      {
3581	/* Even if we don't fold in the insn itself,
3582	   we can safely do so here, in hopes of getting a constant.  */
3583	rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3584	rtx base = 0;
3585	HOST_WIDE_INT offset = 0;
3586
3587	if (GET_CODE (addr) == REG
3588	    && REGNO_QTY_VALID_P (REGNO (addr)))
3589	  {
3590	    int addr_q = REG_QTY (REGNO (addr));
3591	    struct qty_table_elem *addr_ent = &qty_table[addr_q];
3592
3593	    if (GET_MODE (addr) == addr_ent->mode
3594		&& addr_ent->const_rtx != NULL_RTX)
3595	      addr = addr_ent->const_rtx;
3596	  }
3597
3598	/* If address is constant, split it into a base and integer offset.  */
3599	if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3600	  base = addr;
3601	else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3602		 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3603	  {
3604	    base = XEXP (XEXP (addr, 0), 0);
3605	    offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3606	  }
3607	else if (GET_CODE (addr) == LO_SUM
3608		 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3609	  base = XEXP (addr, 1);
3610	else if (GET_CODE (addr) == ADDRESSOF)
3611	  return change_address (x, VOIDmode, addr);
3612
3613	/* If this is a constant pool reference, we can fold it into its
3614	   constant to allow better value tracking.  */
3615	if (base && GET_CODE (base) == SYMBOL_REF
3616	    && CONSTANT_POOL_ADDRESS_P (base))
3617	  {
3618	    rtx constant = get_pool_constant (base);
3619	    enum machine_mode const_mode = get_pool_mode (base);
3620	    rtx new;
3621
3622	    if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3623	      constant_pool_entries_cost = COST (constant);
3624
3625	    /* If we are loading the full constant, we have an equivalence.  */
3626	    if (offset == 0 && mode == const_mode)
3627	      return constant;
3628
3629	    /* If this actually isn't a constant (weird!), we can't do
3630	       anything.  Otherwise, handle the two most common cases:
3631	       extracting a word from a multi-word constant, and extracting
3632	       the low-order bits.  Other cases don't seem common enough to
3633	       worry about.  */
3634	    if (! CONSTANT_P (constant))
3635	      return x;
3636
3637	    if (GET_MODE_CLASS (mode) == MODE_INT
3638		&& GET_MODE_SIZE (mode) == UNITS_PER_WORD
3639		&& offset % UNITS_PER_WORD == 0
3640		&& (new = operand_subword (constant,
3641					   offset / UNITS_PER_WORD,
3642					   0, const_mode)) != 0)
3643	      return new;
3644
3645	    if (((BYTES_BIG_ENDIAN
3646		  && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3647		 || (! BYTES_BIG_ENDIAN && offset == 0))
3648		&& (new = gen_lowpart_if_possible (mode, constant)) != 0)
3649	      return new;
3650	  }
3651
3652	/* If this is a reference to a label at a known position in a jump
3653	   table, we also know its value.  */
3654	if (base && GET_CODE (base) == LABEL_REF)
3655	  {
3656	    rtx label = XEXP (base, 0);
3657	    rtx table_insn = NEXT_INSN (label);
3658
3659	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3660		&& GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3661	      {
3662		rtx table = PATTERN (table_insn);
3663
3664		if (offset >= 0
3665		    && (offset / GET_MODE_SIZE (GET_MODE (table))
3666			< XVECLEN (table, 0)))
3667		  return XVECEXP (table, 0,
3668				  offset / GET_MODE_SIZE (GET_MODE (table)));
3669	      }
3670	    if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3671		&& GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3672	      {
3673		rtx table = PATTERN (table_insn);
3674
3675		if (offset >= 0
3676		    && (offset / GET_MODE_SIZE (GET_MODE (table))
3677			< XVECLEN (table, 1)))
3678		  {
3679		    offset /= GET_MODE_SIZE (GET_MODE (table));
3680		    new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3681					 XEXP (table, 0));
3682
3683		    if (GET_MODE (table) != Pmode)
3684		      new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3685
3686		    /* Indicate this is a constant.  This isn't a
3687		       valid form of CONST, but it will only be used
3688		       to fold the next insns and then discarded, so
3689		       it should be safe.
3690
3691		       Note this expression must be explicitly discarded,
3692		       by cse_insn, else it may end up in a REG_EQUAL note
3693		       and "escape" to cause problems elsewhere.  */
3694		    return gen_rtx_CONST (GET_MODE (new), new);
3695		  }
3696	      }
3697	  }
3698
3699	return x;
3700      }
3701
3702#ifdef NO_FUNCTION_CSE
3703    case CALL:
3704      if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3705	return x;
3706      break;
3707#endif
3708
3709    case ASM_OPERANDS:
3710      for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3711	validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3712			 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3713      break;
3714
3715    default:
3716      break;
3717    }
3718
3719  const_arg0 = 0;
3720  const_arg1 = 0;
3721  const_arg2 = 0;
3722  mode_arg0 = VOIDmode;
3723
3724  /* Try folding our operands.
3725     Then see which ones have constant values known.  */
3726
3727  fmt = GET_RTX_FORMAT (code);
3728  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3729    if (fmt[i] == 'e')
3730      {
3731	rtx arg = XEXP (x, i);
3732	rtx folded_arg = arg, const_arg = 0;
3733	enum machine_mode mode_arg = GET_MODE (arg);
3734	rtx cheap_arg, expensive_arg;
3735	rtx replacements[2];
3736	int j;
3737
3738	/* Most arguments are cheap, so handle them specially.  */
3739	switch (GET_CODE (arg))
3740	  {
3741	  case REG:
3742	    /* This is the same as calling equiv_constant; it is duplicated
3743	       here for speed.  */
3744	    if (REGNO_QTY_VALID_P (REGNO (arg)))
3745	      {
3746		int arg_q = REG_QTY (REGNO (arg));
3747		struct qty_table_elem *arg_ent = &qty_table[arg_q];
3748
3749		if (arg_ent->const_rtx != NULL_RTX
3750		    && GET_CODE (arg_ent->const_rtx) != REG
3751		    && GET_CODE (arg_ent->const_rtx) != PLUS)
3752		  const_arg
3753		    = gen_lowpart_if_possible (GET_MODE (arg),
3754					       arg_ent->const_rtx);
3755	      }
3756	    break;
3757
3758	  case CONST:
3759	  case CONST_INT:
3760	  case SYMBOL_REF:
3761	  case LABEL_REF:
3762	  case CONST_DOUBLE:
3763	  case CONST_VECTOR:
3764	    const_arg = arg;
3765	    break;
3766
3767#ifdef HAVE_cc0
3768	  case CC0:
3769	    folded_arg = prev_insn_cc0;
3770	    mode_arg = prev_insn_cc0_mode;
3771	    const_arg = equiv_constant (folded_arg);
3772	    break;
3773#endif
3774
3775	  default:
3776	    folded_arg = fold_rtx (arg, insn);
3777	    const_arg = equiv_constant (folded_arg);
3778	  }
3779
3780	/* For the first three operands, see if the operand
3781	   is constant or equivalent to a constant.  */
3782	switch (i)
3783	  {
3784	  case 0:
3785	    folded_arg0 = folded_arg;
3786	    const_arg0 = const_arg;
3787	    mode_arg0 = mode_arg;
3788	    break;
3789	  case 1:
3790	    folded_arg1 = folded_arg;
3791	    const_arg1 = const_arg;
3792	    break;
3793	  case 2:
3794	    const_arg2 = const_arg;
3795	    break;
3796	  }
3797
3798	/* Pick the least expensive of the folded argument and an
3799	   equivalent constant argument.  */
3800	if (const_arg == 0 || const_arg == folded_arg
3801	    || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3802	  cheap_arg = folded_arg, expensive_arg = const_arg;
3803	else
3804	  cheap_arg = const_arg, expensive_arg = folded_arg;
3805
3806	/* Try to replace the operand with the cheapest of the two
3807	   possibilities.  If it doesn't work and this is either of the first
3808	   two operands of a commutative operation, try swapping them.
3809	   If THAT fails, try the more expensive, provided it is cheaper
3810	   than what is already there.  */
3811
3812	if (cheap_arg == XEXP (x, i))
3813	  continue;
3814
3815	if (insn == 0 && ! copied)
3816	  {
3817	    x = copy_rtx (x);
3818	    copied = 1;
3819	  }
3820
3821	/* Order the replacements from cheapest to most expensive.  */
3822	replacements[0] = cheap_arg;
3823	replacements[1] = expensive_arg;
3824
3825	for (j = 0; j < 2 && replacements[j];  j++)
3826	  {
3827	    int old_cost = COST_IN (XEXP (x, i), code);
3828	    int new_cost = COST_IN (replacements[j], code);
3829
3830	    /* Stop if what existed before was cheaper.  Prefer constants
3831	       in the case of a tie.  */
3832	    if (new_cost > old_cost
3833		|| (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3834	      break;
3835
3836	    if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3837	      break;
3838
3839	    if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3840		|| code == LTGT || code == UNEQ || code == ORDERED
3841		|| code == UNORDERED)
3842	      {
3843		validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3844		validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3845
3846		if (apply_change_group ())
3847		  {
3848		    /* Swap them back to be invalid so that this loop can
3849		       continue and flag them to be swapped back later.  */
3850		    rtx tem;
3851
3852		    tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3853				       XEXP (x, 1) = tem;
3854		    must_swap = 1;
3855		    break;
3856		  }
3857	      }
3858	  }
3859      }
3860
3861    else
3862      {
3863	if (fmt[i] == 'E')
3864	  /* Don't try to fold inside of a vector of expressions.
3865	     Doing nothing is harmless.  */
3866	  {;}
3867      }
3868
3869  /* If a commutative operation, place a constant integer as the second
3870     operand unless the first operand is also a constant integer.  Otherwise,
3871     place any constant second unless the first operand is also a constant.  */
3872
3873  if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3874      || code == LTGT || code == UNEQ || code == ORDERED
3875      || code == UNORDERED)
3876    {
3877      if (must_swap || (const_arg0
3878	  		&& (const_arg1 == 0
3879	      		    || (GET_CODE (const_arg0) == CONST_INT
3880			        && GET_CODE (const_arg1) != CONST_INT))))
3881	{
3882	  rtx tem = XEXP (x, 0);
3883
3884	  if (insn == 0 && ! copied)
3885	    {
3886	      x = copy_rtx (x);
3887	      copied = 1;
3888	    }
3889
3890	  validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3891	  validate_change (insn, &XEXP (x, 1), tem, 1);
3892	  if (apply_change_group ())
3893	    {
3894	      tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3895	      tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3896	    }
3897	}
3898    }
3899
3900  /* If X is an arithmetic operation, see if we can simplify it.  */
3901
3902  switch (GET_RTX_CLASS (code))
3903    {
3904    case '1':
3905      {
3906	int is_const = 0;
3907
3908	/* We can't simplify extension ops unless we know the
3909	   original mode.  */
3910	if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3911	    && mode_arg0 == VOIDmode)
3912	  break;
3913
3914	/* If we had a CONST, strip it off and put it back later if we
3915	   fold.  */
3916	if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3917	  is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3918
3919	new = simplify_unary_operation (code, mode,
3920					const_arg0 ? const_arg0 : folded_arg0,
3921					mode_arg0);
3922	if (new != 0 && is_const)
3923	  new = gen_rtx_CONST (mode, new);
3924      }
3925      break;
3926
3927    case '<':
3928      /* See what items are actually being compared and set FOLDED_ARG[01]
3929	 to those values and CODE to the actual comparison code.  If any are
3930	 constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
3931	 do anything if both operands are already known to be constant.  */
3932
3933      if (const_arg0 == 0 || const_arg1 == 0)
3934	{
3935	  struct table_elt *p0, *p1;
3936	  rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3937	  enum machine_mode mode_arg1;
3938
3939#ifdef FLOAT_STORE_FLAG_VALUE
3940	  if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3941	    {
3942	      true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3943		      (FLOAT_STORE_FLAG_VALUE (mode), mode));
3944	      false_rtx = CONST0_RTX (mode);
3945	    }
3946#endif
3947
3948	  code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3949				       &mode_arg0, &mode_arg1);
3950	  const_arg0 = equiv_constant (folded_arg0);
3951	  const_arg1 = equiv_constant (folded_arg1);
3952
3953	  /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3954	     what kinds of things are being compared, so we can't do
3955	     anything with this comparison.  */
3956
3957	  if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3958	    break;
3959
3960	  /* If we do not now have two constants being compared, see
3961	     if we can nevertheless deduce some things about the
3962	     comparison.  */
3963	  if (const_arg0 == 0 || const_arg1 == 0)
3964	    {
3965	      /* Is FOLDED_ARG0 frame-pointer plus a constant?  Or
3966		 non-explicit constant?  These aren't zero, but we
3967		 don't know their sign.  */
3968	      if (const_arg1 == const0_rtx
3969		  && (NONZERO_BASE_PLUS_P (folded_arg0)
3970#if 0  /* Sad to say, on sysvr4, #pragma weak can make a symbol address
3971	  come out as 0.  */
3972		      || GET_CODE (folded_arg0) == SYMBOL_REF
3973#endif
3974		      || GET_CODE (folded_arg0) == LABEL_REF
3975		      || GET_CODE (folded_arg0) == CONST))
3976		{
3977		  if (code == EQ)
3978		    return false_rtx;
3979		  else if (code == NE)
3980		    return true_rtx;
3981		}
3982
3983	      /* See if the two operands are the same.  */
3984
3985	      if (folded_arg0 == folded_arg1
3986		  || (GET_CODE (folded_arg0) == REG
3987		      && GET_CODE (folded_arg1) == REG
3988		      && (REG_QTY (REGNO (folded_arg0))
3989			  == REG_QTY (REGNO (folded_arg1))))
3990		  || ((p0 = lookup (folded_arg0,
3991				    (safe_hash (folded_arg0, mode_arg0)
3992				     & HASH_MASK), mode_arg0))
3993		      && (p1 = lookup (folded_arg1,
3994				       (safe_hash (folded_arg1, mode_arg0)
3995					& HASH_MASK), mode_arg0))
3996		      && p0->first_same_value == p1->first_same_value))
3997		{
3998		   /* Sadly two equal NaNs are not equivalent.  */
3999		   if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4000		       || ! FLOAT_MODE_P (mode_arg0)
4001		       || flag_unsafe_math_optimizations)
4002		      return ((code == EQ || code == LE || code == GE
4003			       || code == LEU || code == GEU || code == UNEQ
4004			       || code == UNLE || code == UNGE || code == ORDERED)
4005			      ? true_rtx : false_rtx);
4006		   /* Take care for the FP compares we can resolve.  */
4007		   if (code == UNEQ || code == UNLE || code == UNGE)
4008		     return true_rtx;
4009		   if (code == LTGT || code == LT || code == GT)
4010		     return false_rtx;
4011		}
4012
4013	      /* If FOLDED_ARG0 is a register, see if the comparison we are
4014		 doing now is either the same as we did before or the reverse
4015		 (we only check the reverse if not floating-point).  */
4016	      else if (GET_CODE (folded_arg0) == REG)
4017		{
4018		  int qty = REG_QTY (REGNO (folded_arg0));
4019
4020		  if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
4021		    {
4022		      struct qty_table_elem *ent = &qty_table[qty];
4023
4024		      if ((comparison_dominates_p (ent->comparison_code, code)
4025			   || (! FLOAT_MODE_P (mode_arg0)
4026			       && comparison_dominates_p (ent->comparison_code,
4027						          reverse_condition (code))))
4028			  && (rtx_equal_p (ent->comparison_const, folded_arg1)
4029			      || (const_arg1
4030				  && rtx_equal_p (ent->comparison_const,
4031						  const_arg1))
4032			      || (GET_CODE (folded_arg1) == REG
4033				  && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4034			return (comparison_dominates_p (ent->comparison_code, code)
4035				? true_rtx : false_rtx);
4036		    }
4037		}
4038	    }
4039	}
4040
4041      /* If we are comparing against zero, see if the first operand is
4042	 equivalent to an IOR with a constant.  If so, we may be able to
4043	 determine the result of this comparison.  */
4044
4045      if (const_arg1 == const0_rtx)
4046	{
4047	  rtx y = lookup_as_function (folded_arg0, IOR);
4048	  rtx inner_const;
4049
4050	  if (y != 0
4051	      && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4052	      && GET_CODE (inner_const) == CONST_INT
4053	      && INTVAL (inner_const) != 0)
4054	    {
4055	      int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4056	      int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4057			      && (INTVAL (inner_const)
4058				  & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4059	      rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4060
4061#ifdef FLOAT_STORE_FLAG_VALUE
4062	      if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4063		{
4064		  true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4065			  (FLOAT_STORE_FLAG_VALUE (mode), mode));
4066		  false_rtx = CONST0_RTX (mode);
4067		}
4068#endif
4069
4070	      switch (code)
4071		{
4072		case EQ:
4073		  return false_rtx;
4074		case NE:
4075		  return true_rtx;
4076		case LT:  case LE:
4077		  if (has_sign)
4078		    return true_rtx;
4079		  break;
4080		case GT:  case GE:
4081		  if (has_sign)
4082		    return false_rtx;
4083		  break;
4084		default:
4085		  break;
4086		}
4087	    }
4088	}
4089
4090      new = simplify_relational_operation (code,
4091					   (mode_arg0 != VOIDmode
4092					    ? mode_arg0
4093					    : (GET_MODE (const_arg0
4094							 ? const_arg0
4095							 : folded_arg0)
4096					       != VOIDmode)
4097					    ? GET_MODE (const_arg0
4098							? const_arg0
4099							: folded_arg0)
4100					    : GET_MODE (const_arg1
4101							? const_arg1
4102							: folded_arg1)),
4103					   const_arg0 ? const_arg0 : folded_arg0,
4104					   const_arg1 ? const_arg1 : folded_arg1);
4105#ifdef FLOAT_STORE_FLAG_VALUE
4106      if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4107	{
4108	  if (new == const0_rtx)
4109	    new = CONST0_RTX (mode);
4110	  else
4111	    new = (CONST_DOUBLE_FROM_REAL_VALUE
4112		   (FLOAT_STORE_FLAG_VALUE (mode), mode));
4113	}
4114#endif
4115      break;
4116
4117    case '2':
4118    case 'c':
4119      switch (code)
4120	{
4121	case PLUS:
4122	  /* If the second operand is a LABEL_REF, see if the first is a MINUS
4123	     with that LABEL_REF as its second operand.  If so, the result is
4124	     the first operand of that MINUS.  This handles switches with an
4125	     ADDR_DIFF_VEC table.  */
4126	  if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4127	    {
4128	      rtx y
4129		= GET_CODE (folded_arg0) == MINUS ? folded_arg0
4130		: lookup_as_function (folded_arg0, MINUS);
4131
4132	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4133		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4134		return XEXP (y, 0);
4135
4136	      /* Now try for a CONST of a MINUS like the above.  */
4137	      if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4138			: lookup_as_function (folded_arg0, CONST))) != 0
4139		  && GET_CODE (XEXP (y, 0)) == MINUS
4140		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4141		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4142		return XEXP (XEXP (y, 0), 0);
4143	    }
4144
4145	  /* Likewise if the operands are in the other order.  */
4146	  if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4147	    {
4148	      rtx y
4149		= GET_CODE (folded_arg1) == MINUS ? folded_arg1
4150		: lookup_as_function (folded_arg1, MINUS);
4151
4152	      if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4153		  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4154		return XEXP (y, 0);
4155
4156	      /* Now try for a CONST of a MINUS like the above.  */
4157	      if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4158			: lookup_as_function (folded_arg1, CONST))) != 0
4159		  && GET_CODE (XEXP (y, 0)) == MINUS
4160		  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4161		  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4162		return XEXP (XEXP (y, 0), 0);
4163	    }
4164
4165	  /* If second operand is a register equivalent to a negative
4166	     CONST_INT, see if we can find a register equivalent to the
4167	     positive constant.  Make a MINUS if so.  Don't do this for
4168	     a non-negative constant since we might then alternate between
4169	     choosing positive and negative constants.  Having the positive
4170	     constant previously-used is the more common case.  Be sure
4171	     the resulting constant is non-negative; if const_arg1 were
4172	     the smallest negative number this would overflow: depending
4173	     on the mode, this would either just be the same value (and
4174	     hence not save anything) or be incorrect.  */
4175	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4176	      && INTVAL (const_arg1) < 0
4177	      /* This used to test
4178
4179	         -INTVAL (const_arg1) >= 0
4180
4181		 But The Sun V5.0 compilers mis-compiled that test.  So
4182		 instead we test for the problematic value in a more direct
4183		 manner and hope the Sun compilers get it correct.  */
4184	      && INTVAL (const_arg1) !=
4185	        ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4186	      && GET_CODE (folded_arg1) == REG)
4187	    {
4188	      rtx new_const = GEN_INT (-INTVAL (const_arg1));
4189	      struct table_elt *p
4190		= lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4191			  mode);
4192
4193	      if (p)
4194		for (p = p->first_same_value; p; p = p->next_same_value)
4195		  if (GET_CODE (p->exp) == REG)
4196		    return simplify_gen_binary (MINUS, mode, folded_arg0,
4197						canon_reg (p->exp, NULL_RTX));
4198	    }
4199	  goto from_plus;
4200
4201	case MINUS:
4202	  /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4203	     If so, produce (PLUS Z C2-C).  */
4204	  if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4205	    {
4206	      rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4207	      if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4208		return fold_rtx (plus_constant (copy_rtx (y),
4209						-INTVAL (const_arg1)),
4210				 NULL_RTX);
4211	    }
4212
4213	  /* Fall through.  */
4214
4215	from_plus:
4216	case SMIN:    case SMAX:      case UMIN:    case UMAX:
4217	case IOR:     case AND:       case XOR:
4218	case MULT:    case DIV:       case UDIV:
4219	case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
4220	  /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4221	     is known to be of similar form, we may be able to replace the
4222	     operation with a combined operation.  This may eliminate the
4223	     intermediate operation if every use is simplified in this way.
4224	     Note that the similar optimization done by combine.c only works
4225	     if the intermediate operation's result has only one reference.  */
4226
4227	  if (GET_CODE (folded_arg0) == REG
4228	      && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4229	    {
4230	      int is_shift
4231		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4232	      rtx y = lookup_as_function (folded_arg0, code);
4233	      rtx inner_const;
4234	      enum rtx_code associate_code;
4235	      rtx new_const;
4236
4237	      if (y == 0
4238		  || 0 == (inner_const
4239			   = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4240		  || GET_CODE (inner_const) != CONST_INT
4241		  /* If we have compiled a statement like
4242		     "if (x == (x & mask1))", and now are looking at
4243		     "x & mask2", we will have a case where the first operand
4244		     of Y is the same as our first operand.  Unless we detect
4245		     this case, an infinite loop will result.  */
4246		  || XEXP (y, 0) == folded_arg0)
4247		break;
4248
4249	      /* Don't associate these operations if they are a PLUS with the
4250		 same constant and it is a power of two.  These might be doable
4251		 with a pre- or post-increment.  Similarly for two subtracts of
4252		 identical powers of two with post decrement.  */
4253
4254	      if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
4255		  && ((HAVE_PRE_INCREMENT
4256			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4257		      || (HAVE_POST_INCREMENT
4258			  && exact_log2 (INTVAL (const_arg1)) >= 0)
4259		      || (HAVE_PRE_DECREMENT
4260			  && exact_log2 (- INTVAL (const_arg1)) >= 0)
4261		      || (HAVE_POST_DECREMENT
4262			  && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4263		break;
4264
4265	      /* Compute the code used to compose the constants.  For example,
4266		 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT.  */
4267
4268	      associate_code
4269		= (code == MULT || code == DIV || code == UDIV ? MULT
4270		   : is_shift || code == PLUS || code == MINUS ? PLUS : code);
4271
4272	      new_const = simplify_binary_operation (associate_code, mode,
4273						     const_arg1, inner_const);
4274
4275	      if (new_const == 0)
4276		break;
4277
4278	      /* If we are associating shift operations, don't let this
4279		 produce a shift of the size of the object or larger.
4280		 This could occur when we follow a sign-extend by a right
4281		 shift on a machine that does a sign-extend as a pair
4282		 of shifts.  */
4283
4284	      if (is_shift && GET_CODE (new_const) == CONST_INT
4285		  && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4286		{
4287		  /* As an exception, we can turn an ASHIFTRT of this
4288		     form into a shift of the number of bits - 1.  */
4289		  if (code == ASHIFTRT)
4290		    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4291		  else
4292		    break;
4293		}
4294
4295	      y = copy_rtx (XEXP (y, 0));
4296
4297	      /* If Y contains our first operand (the most common way this
4298		 can happen is if Y is a MEM), we would do into an infinite
4299		 loop if we tried to fold it.  So don't in that case.  */
4300
4301	      if (! reg_mentioned_p (folded_arg0, y))
4302		y = fold_rtx (y, insn);
4303
4304	      return simplify_gen_binary (code, mode, y, new_const);
4305	    }
4306	  break;
4307
4308	default:
4309	  break;
4310	}
4311
4312      new = simplify_binary_operation (code, mode,
4313				       const_arg0 ? const_arg0 : folded_arg0,
4314				       const_arg1 ? const_arg1 : folded_arg1);
4315      break;
4316
4317    case 'o':
4318      /* (lo_sum (high X) X) is simply X.  */
4319      if (code == LO_SUM && const_arg0 != 0
4320	  && GET_CODE (const_arg0) == HIGH
4321	  && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4322	return const_arg1;
4323      break;
4324
4325    case '3':
4326    case 'b':
4327      new = simplify_ternary_operation (code, mode, mode_arg0,
4328					const_arg0 ? const_arg0 : folded_arg0,
4329					const_arg1 ? const_arg1 : folded_arg1,
4330					const_arg2 ? const_arg2 : XEXP (x, 2));
4331      break;
4332
4333    case 'x':
4334      /* Always eliminate CONSTANT_P_RTX at this stage.  */
4335      if (code == CONSTANT_P_RTX)
4336	return (const_arg0 ? const1_rtx : const0_rtx);
4337      break;
4338    }
4339
4340  return new ? new : x;
4341}
4342
4343/* Return a constant value currently equivalent to X.
4344   Return 0 if we don't know one.  */
4345
4346static rtx
4347equiv_constant (x)
4348     rtx x;
4349{
4350  if (GET_CODE (x) == REG
4351      && REGNO_QTY_VALID_P (REGNO (x)))
4352    {
4353      int x_q = REG_QTY (REGNO (x));
4354      struct qty_table_elem *x_ent = &qty_table[x_q];
4355
4356      if (x_ent->const_rtx)
4357	x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4358    }
4359
4360  if (x == 0 || CONSTANT_P (x))
4361    return x;
4362
4363  /* If X is a MEM, try to fold it outside the context of any insn to see if
4364     it might be equivalent to a constant.  That handles the case where it
4365     is a constant-pool reference.  Then try to look it up in the hash table
4366     in case it is something whose value we have seen before.  */
4367
4368  if (GET_CODE (x) == MEM)
4369    {
4370      struct table_elt *elt;
4371
4372      x = fold_rtx (x, NULL_RTX);
4373      if (CONSTANT_P (x))
4374	return x;
4375
4376      elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4377      if (elt == 0)
4378	return 0;
4379
4380      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4381	if (elt->is_const && CONSTANT_P (elt->exp))
4382	  return elt->exp;
4383    }
4384
4385  return 0;
4386}
4387
4388/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4389   number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4390   least-significant part of X.
4391   MODE specifies how big a part of X to return.
4392
4393   If the requested operation cannot be done, 0 is returned.
4394
4395   This is similar to gen_lowpart in emit-rtl.c.  */
4396
4397rtx
4398gen_lowpart_if_possible (mode, x)
4399     enum machine_mode mode;
4400     rtx x;
4401{
4402  rtx result = gen_lowpart_common (mode, x);
4403
4404  if (result)
4405    return result;
4406  else if (GET_CODE (x) == MEM)
4407    {
4408      /* This is the only other case we handle.  */
4409      int offset = 0;
4410      rtx new;
4411
4412      if (WORDS_BIG_ENDIAN)
4413	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4414		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4415      if (BYTES_BIG_ENDIAN)
4416	/* Adjust the address so that the address-after-the-data is
4417	   unchanged.  */
4418	offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4419		   - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4420
4421      new = adjust_address_nv (x, mode, offset);
4422      if (! memory_address_p (mode, XEXP (new, 0)))
4423	return 0;
4424
4425      return new;
4426    }
4427  else
4428    return 0;
4429}
4430
4431/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4432   branch.  It will be zero if not.
4433
4434   In certain cases, this can cause us to add an equivalence.  For example,
4435   if we are following the taken case of
4436   	if (i == 2)
4437   we can add the fact that `i' and '2' are now equivalent.
4438
4439   In any case, we can record that this comparison was passed.  If the same
4440   comparison is seen later, we will know its value.  */
4441
4442static void
4443record_jump_equiv (insn, taken)
4444     rtx insn;
4445     int taken;
4446{
4447  int cond_known_true;
4448  rtx op0, op1;
4449  rtx set;
4450  enum machine_mode mode, mode0, mode1;
4451  int reversed_nonequality = 0;
4452  enum rtx_code code;
4453
4454  /* Ensure this is the right kind of insn.  */
4455  if (! any_condjump_p (insn))
4456    return;
4457  set = pc_set (insn);
4458
4459  /* See if this jump condition is known true or false.  */
4460  if (taken)
4461    cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4462  else
4463    cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4464
4465  /* Get the type of comparison being done and the operands being compared.
4466     If we had to reverse a non-equality condition, record that fact so we
4467     know that it isn't valid for floating-point.  */
4468  code = GET_CODE (XEXP (SET_SRC (set), 0));
4469  op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4470  op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4471
4472  code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4473  if (! cond_known_true)
4474    {
4475      code = reversed_comparison_code_parts (code, op0, op1, insn);
4476
4477      /* Don't remember if we can't find the inverse.  */
4478      if (code == UNKNOWN)
4479	return;
4480    }
4481
4482  /* The mode is the mode of the non-constant.  */
4483  mode = mode0;
4484  if (mode1 != VOIDmode)
4485    mode = mode1;
4486
4487  record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4488}
4489
4490/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4491   REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4492   Make any useful entries we can with that information.  Called from
4493   above function and called recursively.  */
4494
4495static void
4496record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4497     enum rtx_code code;
4498     enum machine_mode mode;
4499     rtx op0, op1;
4500     int reversed_nonequality;
4501{
4502  unsigned op0_hash, op1_hash;
4503  int op0_in_memory, op1_in_memory;
4504  struct table_elt *op0_elt, *op1_elt;
4505
4506  /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4507     we know that they are also equal in the smaller mode (this is also
4508     true for all smaller modes whether or not there is a SUBREG, but
4509     is not worth testing for with no SUBREG).  */
4510
4511  /* Note that GET_MODE (op0) may not equal MODE.  */
4512  if (code == EQ && GET_CODE (op0) == SUBREG
4513      && (GET_MODE_SIZE (GET_MODE (op0))
4514	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4515    {
4516      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4517      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4518
4519      record_jump_cond (code, mode, SUBREG_REG (op0),
4520			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4521			reversed_nonequality);
4522    }
4523
4524  if (code == EQ && GET_CODE (op1) == SUBREG
4525      && (GET_MODE_SIZE (GET_MODE (op1))
4526	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4527    {
4528      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4529      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4530
4531      record_jump_cond (code, mode, SUBREG_REG (op1),
4532			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4533			reversed_nonequality);
4534    }
4535
4536  /* Similarly, if this is an NE comparison, and either is a SUBREG
4537     making a smaller mode, we know the whole thing is also NE.  */
4538
4539  /* Note that GET_MODE (op0) may not equal MODE;
4540     if we test MODE instead, we can get an infinite recursion
4541     alternating between two modes each wider than MODE.  */
4542
4543  if (code == NE && GET_CODE (op0) == SUBREG
4544      && subreg_lowpart_p (op0)
4545      && (GET_MODE_SIZE (GET_MODE (op0))
4546	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4547    {
4548      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4549      rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4550
4551      record_jump_cond (code, mode, SUBREG_REG (op0),
4552			tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4553			reversed_nonequality);
4554    }
4555
4556  if (code == NE && GET_CODE (op1) == SUBREG
4557      && subreg_lowpart_p (op1)
4558      && (GET_MODE_SIZE (GET_MODE (op1))
4559	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4560    {
4561      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4562      rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4563
4564      record_jump_cond (code, mode, SUBREG_REG (op1),
4565			tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4566			reversed_nonequality);
4567    }
4568
4569  /* Hash both operands.  */
4570
4571  do_not_record = 0;
4572  hash_arg_in_memory = 0;
4573  op0_hash = HASH (op0, mode);
4574  op0_in_memory = hash_arg_in_memory;
4575
4576  if (do_not_record)
4577    return;
4578
4579  do_not_record = 0;
4580  hash_arg_in_memory = 0;
4581  op1_hash = HASH (op1, mode);
4582  op1_in_memory = hash_arg_in_memory;
4583
4584  if (do_not_record)
4585    return;
4586
4587  /* Look up both operands.  */
4588  op0_elt = lookup (op0, op0_hash, mode);
4589  op1_elt = lookup (op1, op1_hash, mode);
4590
4591  /* If both operands are already equivalent or if they are not in the
4592     table but are identical, do nothing.  */
4593  if ((op0_elt != 0 && op1_elt != 0
4594       && op0_elt->first_same_value == op1_elt->first_same_value)
4595      || op0 == op1 || rtx_equal_p (op0, op1))
4596    return;
4597
4598  /* If we aren't setting two things equal all we can do is save this
4599     comparison.   Similarly if this is floating-point.  In the latter
4600     case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4601     If we record the equality, we might inadvertently delete code
4602     whose intent was to change -0 to +0.  */
4603
4604  if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4605    {
4606      struct qty_table_elem *ent;
4607      int qty;
4608
4609      /* If we reversed a floating-point comparison, if OP0 is not a
4610	 register, or if OP1 is neither a register or constant, we can't
4611	 do anything.  */
4612
4613      if (GET_CODE (op1) != REG)
4614	op1 = equiv_constant (op1);
4615
4616      if ((reversed_nonequality && FLOAT_MODE_P (mode))
4617	  || GET_CODE (op0) != REG || op1 == 0)
4618	return;
4619
4620      /* Put OP0 in the hash table if it isn't already.  This gives it a
4621	 new quantity number.  */
4622      if (op0_elt == 0)
4623	{
4624	  if (insert_regs (op0, NULL, 0))
4625	    {
4626	      rehash_using_reg (op0);
4627	      op0_hash = HASH (op0, mode);
4628
4629	      /* If OP0 is contained in OP1, this changes its hash code
4630		 as well.  Faster to rehash than to check, except
4631		 for the simple case of a constant.  */
4632	      if (! CONSTANT_P (op1))
4633		op1_hash = HASH (op1,mode);
4634	    }
4635
4636	  op0_elt = insert (op0, NULL, op0_hash, mode);
4637	  op0_elt->in_memory = op0_in_memory;
4638	}
4639
4640      qty = REG_QTY (REGNO (op0));
4641      ent = &qty_table[qty];
4642
4643      ent->comparison_code = code;
4644      if (GET_CODE (op1) == REG)
4645	{
4646	  /* Look it up again--in case op0 and op1 are the same.  */
4647	  op1_elt = lookup (op1, op1_hash, mode);
4648
4649	  /* Put OP1 in the hash table so it gets a new quantity number.  */
4650	  if (op1_elt == 0)
4651	    {
4652	      if (insert_regs (op1, NULL, 0))
4653		{
4654		  rehash_using_reg (op1);
4655		  op1_hash = HASH (op1, mode);
4656		}
4657
4658	      op1_elt = insert (op1, NULL, op1_hash, mode);
4659	      op1_elt->in_memory = op1_in_memory;
4660	    }
4661
4662	  ent->comparison_const = NULL_RTX;
4663	  ent->comparison_qty = REG_QTY (REGNO (op1));
4664	}
4665      else
4666	{
4667	  ent->comparison_const = op1;
4668	  ent->comparison_qty = -1;
4669	}
4670
4671      return;
4672    }
4673
4674  /* If either side is still missing an equivalence, make it now,
4675     then merge the equivalences.  */
4676
4677  if (op0_elt == 0)
4678    {
4679      if (insert_regs (op0, NULL, 0))
4680	{
4681	  rehash_using_reg (op0);
4682	  op0_hash = HASH (op0, mode);
4683	}
4684
4685      op0_elt = insert (op0, NULL, op0_hash, mode);
4686      op0_elt->in_memory = op0_in_memory;
4687    }
4688
4689  if (op1_elt == 0)
4690    {
4691      if (insert_regs (op1, NULL, 0))
4692	{
4693	  rehash_using_reg (op1);
4694	  op1_hash = HASH (op1, mode);
4695	}
4696
4697      op1_elt = insert (op1, NULL, op1_hash, mode);
4698      op1_elt->in_memory = op1_in_memory;
4699    }
4700
4701  merge_equiv_classes (op0_elt, op1_elt);
4702  last_jump_equiv_class = op0_elt;
4703}
4704
4705/* CSE processing for one instruction.
4706   First simplify sources and addresses of all assignments
4707   in the instruction, using previously-computed equivalents values.
4708   Then install the new sources and destinations in the table
4709   of available values.
4710
4711   If LIBCALL_INSN is nonzero, don't record any equivalence made in
4712   the insn.  It means that INSN is inside libcall block.  In this
4713   case LIBCALL_INSN is the corresponding insn with REG_LIBCALL.  */
4714
4715/* Data on one SET contained in the instruction.  */
4716
4717struct set
4718{
4719  /* The SET rtx itself.  */
4720  rtx rtl;
4721  /* The SET_SRC of the rtx (the original value, if it is changing).  */
4722  rtx src;
4723  /* The hash-table element for the SET_SRC of the SET.  */
4724  struct table_elt *src_elt;
4725  /* Hash value for the SET_SRC.  */
4726  unsigned src_hash;
4727  /* Hash value for the SET_DEST.  */
4728  unsigned dest_hash;
4729  /* The SET_DEST, with SUBREG, etc., stripped.  */
4730  rtx inner_dest;
4731  /* Nonzero if the SET_SRC is in memory.  */
4732  char src_in_memory;
4733  /* Nonzero if the SET_SRC contains something
4734     whose value cannot be predicted and understood.  */
4735  char src_volatile;
4736  /* Original machine mode, in case it becomes a CONST_INT.  */
4737  enum machine_mode mode;
4738  /* A constant equivalent for SET_SRC, if any.  */
4739  rtx src_const;
4740  /* Original SET_SRC value used for libcall notes.  */
4741  rtx orig_src;
4742  /* Hash value of constant equivalent for SET_SRC.  */
4743  unsigned src_const_hash;
4744  /* Table entry for constant equivalent for SET_SRC, if any.  */
4745  struct table_elt *src_const_elt;
4746};
4747
4748static void
4749cse_insn (insn, libcall_insn)
4750     rtx insn;
4751     rtx libcall_insn;
4752{
4753  rtx x = PATTERN (insn);
4754  int i;
4755  rtx tem;
4756  int n_sets = 0;
4757
4758#ifdef HAVE_cc0
4759  /* Records what this insn does to set CC0.  */
4760  rtx this_insn_cc0 = 0;
4761  enum machine_mode this_insn_cc0_mode = VOIDmode;
4762#endif
4763
4764  rtx src_eqv = 0;
4765  struct table_elt *src_eqv_elt = 0;
4766  int src_eqv_volatile = 0;
4767  int src_eqv_in_memory = 0;
4768  unsigned src_eqv_hash = 0;
4769
4770  struct set *sets = (struct set *) 0;
4771
4772  this_insn = insn;
4773
4774  /* Find all the SETs and CLOBBERs in this instruction.
4775     Record all the SETs in the array `set' and count them.
4776     Also determine whether there is a CLOBBER that invalidates
4777     all memory references, or all references at varying addresses.  */
4778
4779  if (GET_CODE (insn) == CALL_INSN)
4780    {
4781      for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4782	{
4783	  if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4784	    invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4785	  XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4786	}
4787    }
4788
4789  if (GET_CODE (x) == SET)
4790    {
4791      sets = (struct set *) alloca (sizeof (struct set));
4792      sets[0].rtl = x;
4793
4794      /* Ignore SETs that are unconditional jumps.
4795	 They never need cse processing, so this does not hurt.
4796	 The reason is not efficiency but rather
4797	 so that we can test at the end for instructions
4798	 that have been simplified to unconditional jumps
4799	 and not be misled by unchanged instructions
4800	 that were unconditional jumps to begin with.  */
4801      if (SET_DEST (x) == pc_rtx
4802	  && GET_CODE (SET_SRC (x)) == LABEL_REF)
4803	;
4804
4805      /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4806	 The hard function value register is used only once, to copy to
4807	 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4808	 Ensure we invalidate the destination register.  On the 80386 no
4809	 other code would invalidate it since it is a fixed_reg.
4810	 We need not check the return of apply_change_group; see canon_reg.  */
4811
4812      else if (GET_CODE (SET_SRC (x)) == CALL)
4813	{
4814	  canon_reg (SET_SRC (x), insn);
4815	  apply_change_group ();
4816	  fold_rtx (SET_SRC (x), insn);
4817	  invalidate (SET_DEST (x), VOIDmode);
4818	}
4819      else
4820	n_sets = 1;
4821    }
4822  else if (GET_CODE (x) == PARALLEL)
4823    {
4824      int lim = XVECLEN (x, 0);
4825
4826      sets = (struct set *) alloca (lim * sizeof (struct set));
4827
4828      /* Find all regs explicitly clobbered in this insn,
4829	 and ensure they are not replaced with any other regs
4830	 elsewhere in this insn.
4831	 When a reg that is clobbered is also used for input,
4832	 we should presume that that is for a reason,
4833	 and we should not substitute some other register
4834	 which is not supposed to be clobbered.
4835	 Therefore, this loop cannot be merged into the one below
4836	 because a CALL may precede a CLOBBER and refer to the
4837	 value clobbered.  We must not let a canonicalization do
4838	 anything in that case.  */
4839      for (i = 0; i < lim; i++)
4840	{
4841	  rtx y = XVECEXP (x, 0, i);
4842	  if (GET_CODE (y) == CLOBBER)
4843	    {
4844	      rtx clobbered = XEXP (y, 0);
4845
4846	      if (GET_CODE (clobbered) == REG
4847		  || GET_CODE (clobbered) == SUBREG)
4848		invalidate (clobbered, VOIDmode);
4849	      else if (GET_CODE (clobbered) == STRICT_LOW_PART
4850		       || GET_CODE (clobbered) == ZERO_EXTRACT)
4851		invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4852	    }
4853	}
4854
4855      for (i = 0; i < lim; i++)
4856	{
4857	  rtx y = XVECEXP (x, 0, i);
4858	  if (GET_CODE (y) == SET)
4859	    {
4860	      /* As above, we ignore unconditional jumps and call-insns and
4861		 ignore the result of apply_change_group.  */
4862	      if (GET_CODE (SET_SRC (y)) == CALL)
4863		{
4864		  canon_reg (SET_SRC (y), insn);
4865		  apply_change_group ();
4866		  fold_rtx (SET_SRC (y), insn);
4867		  invalidate (SET_DEST (y), VOIDmode);
4868		}
4869	      else if (SET_DEST (y) == pc_rtx
4870		       && GET_CODE (SET_SRC (y)) == LABEL_REF)
4871		;
4872	      else
4873		sets[n_sets++].rtl = y;
4874	    }
4875	  else if (GET_CODE (y) == CLOBBER)
4876	    {
4877	      /* If we clobber memory, canon the address.
4878		 This does nothing when a register is clobbered
4879		 because we have already invalidated the reg.  */
4880	      if (GET_CODE (XEXP (y, 0)) == MEM)
4881		canon_reg (XEXP (y, 0), NULL_RTX);
4882	    }
4883	  else if (GET_CODE (y) == USE
4884		   && ! (GET_CODE (XEXP (y, 0)) == REG
4885			 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4886	    canon_reg (y, NULL_RTX);
4887	  else if (GET_CODE (y) == CALL)
4888	    {
4889	      /* The result of apply_change_group can be ignored; see
4890		 canon_reg.  */
4891	      canon_reg (y, insn);
4892	      apply_change_group ();
4893	      fold_rtx (y, insn);
4894	    }
4895	}
4896    }
4897  else if (GET_CODE (x) == CLOBBER)
4898    {
4899      if (GET_CODE (XEXP (x, 0)) == MEM)
4900	canon_reg (XEXP (x, 0), NULL_RTX);
4901    }
4902
4903  /* Canonicalize a USE of a pseudo register or memory location.  */
4904  else if (GET_CODE (x) == USE
4905	   && ! (GET_CODE (XEXP (x, 0)) == REG
4906		 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4907    canon_reg (XEXP (x, 0), NULL_RTX);
4908  else if (GET_CODE (x) == CALL)
4909    {
4910      /* The result of apply_change_group can be ignored; see canon_reg.  */
4911      canon_reg (x, insn);
4912      apply_change_group ();
4913      fold_rtx (x, insn);
4914    }
4915
4916  /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4917     is a STRICT_LOW_PART.  The latter condition is necessary because SRC_EQV
4918     is handled specially for this case, and if it isn't set, then there will
4919     be no equivalence for the destination.  */
4920  if (n_sets == 1 && REG_NOTES (insn) != 0
4921      && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4922      && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4923	  || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4924    {
4925      src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4926      XEXP (tem, 0) = src_eqv;
4927    }
4928
4929  /* Canonicalize sources and addresses of destinations.
4930     We do this in a separate pass to avoid problems when a MATCH_DUP is
4931     present in the insn pattern.  In that case, we want to ensure that
4932     we don't break the duplicate nature of the pattern.  So we will replace
4933     both operands at the same time.  Otherwise, we would fail to find an
4934     equivalent substitution in the loop calling validate_change below.
4935
4936     We used to suppress canonicalization of DEST if it appears in SRC,
4937     but we don't do this any more.  */
4938
4939  for (i = 0; i < n_sets; i++)
4940    {
4941      rtx dest = SET_DEST (sets[i].rtl);
4942      rtx src = SET_SRC (sets[i].rtl);
4943      rtx new = canon_reg (src, insn);
4944      int insn_code;
4945
4946      sets[i].orig_src = src;
4947      if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4948	   && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4949	       != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4950	  || (insn_code = recog_memoized (insn)) < 0
4951	  || insn_data[insn_code].n_dups > 0)
4952	validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4953      else
4954	SET_SRC (sets[i].rtl) = new;
4955
4956      if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4957	{
4958	  validate_change (insn, &XEXP (dest, 1),
4959			   canon_reg (XEXP (dest, 1), insn), 1);
4960	  validate_change (insn, &XEXP (dest, 2),
4961			   canon_reg (XEXP (dest, 2), insn), 1);
4962	}
4963
4964      while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4965	     || GET_CODE (dest) == ZERO_EXTRACT
4966	     || GET_CODE (dest) == SIGN_EXTRACT)
4967	dest = XEXP (dest, 0);
4968
4969      if (GET_CODE (dest) == MEM)
4970	canon_reg (dest, insn);
4971    }
4972
4973  /* Now that we have done all the replacements, we can apply the change
4974     group and see if they all work.  Note that this will cause some
4975     canonicalizations that would have worked individually not to be applied
4976     because some other canonicalization didn't work, but this should not
4977     occur often.
4978
4979     The result of apply_change_group can be ignored; see canon_reg.  */
4980
4981  apply_change_group ();
4982
4983  /* Set sets[i].src_elt to the class each source belongs to.
4984     Detect assignments from or to volatile things
4985     and set set[i] to zero so they will be ignored
4986     in the rest of this function.
4987
4988     Nothing in this loop changes the hash table or the register chains.  */
4989
4990  for (i = 0; i < n_sets; i++)
4991    {
4992      rtx src, dest;
4993      rtx src_folded;
4994      struct table_elt *elt = 0, *p;
4995      enum machine_mode mode;
4996      rtx src_eqv_here;
4997      rtx src_const = 0;
4998      rtx src_related = 0;
4999      struct table_elt *src_const_elt = 0;
5000      int src_cost = MAX_COST;
5001      int src_eqv_cost = MAX_COST;
5002      int src_folded_cost = MAX_COST;
5003      int src_related_cost = MAX_COST;
5004      int src_elt_cost = MAX_COST;
5005      int src_regcost = MAX_COST;
5006      int src_eqv_regcost = MAX_COST;
5007      int src_folded_regcost = MAX_COST;
5008      int src_related_regcost = MAX_COST;
5009      int src_elt_regcost = MAX_COST;
5010      /* Set non-zero if we need to call force_const_mem on with the
5011	 contents of src_folded before using it.  */
5012      int src_folded_force_flag = 0;
5013
5014      dest = SET_DEST (sets[i].rtl);
5015      src = SET_SRC (sets[i].rtl);
5016
5017      /* If SRC is a constant that has no machine mode,
5018	 hash it with the destination's machine mode.
5019	 This way we can keep different modes separate.  */
5020
5021      mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5022      sets[i].mode = mode;
5023
5024      if (src_eqv)
5025	{
5026	  enum machine_mode eqvmode = mode;
5027	  if (GET_CODE (dest) == STRICT_LOW_PART)
5028	    eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5029	  do_not_record = 0;
5030	  hash_arg_in_memory = 0;
5031	  src_eqv_hash = HASH (src_eqv, eqvmode);
5032
5033	  /* Find the equivalence class for the equivalent expression.  */
5034
5035	  if (!do_not_record)
5036	    src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5037
5038	  src_eqv_volatile = do_not_record;
5039	  src_eqv_in_memory = hash_arg_in_memory;
5040	}
5041
5042      /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5043	 value of the INNER register, not the destination.  So it is not
5044	 a valid substitution for the source.  But save it for later.  */
5045      if (GET_CODE (dest) == STRICT_LOW_PART)
5046	src_eqv_here = 0;
5047      else
5048	src_eqv_here = src_eqv;
5049
5050      /* Simplify and foldable subexpressions in SRC.  Then get the fully-
5051	 simplified result, which may not necessarily be valid.  */
5052      src_folded = fold_rtx (src, insn);
5053
5054#if 0
5055      /* ??? This caused bad code to be generated for the m68k port with -O2.
5056	 Suppose src is (CONST_INT -1), and that after truncation src_folded
5057	 is (CONST_INT 3).  Suppose src_folded is then used for src_const.
5058	 At the end we will add src and src_const to the same equivalence
5059	 class.  We now have 3 and -1 on the same equivalence class.  This
5060	 causes later instructions to be mis-optimized.  */
5061      /* If storing a constant in a bitfield, pre-truncate the constant
5062	 so we will be able to record it later.  */
5063      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5064	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5065	{
5066	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5067
5068	  if (GET_CODE (src) == CONST_INT
5069	      && GET_CODE (width) == CONST_INT
5070	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5071	      && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5072	    src_folded
5073	      = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5074					  << INTVAL (width)) - 1));
5075	}
5076#endif
5077
5078      /* Compute SRC's hash code, and also notice if it
5079	 should not be recorded at all.  In that case,
5080	 prevent any further processing of this assignment.  */
5081      do_not_record = 0;
5082      hash_arg_in_memory = 0;
5083
5084      sets[i].src = src;
5085      sets[i].src_hash = HASH (src, mode);
5086      sets[i].src_volatile = do_not_record;
5087      sets[i].src_in_memory = hash_arg_in_memory;
5088
5089      /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5090	 a pseudo, do not record SRC.  Using SRC as a replacement for
5091	 anything else will be incorrect in that situation.  Note that
5092	 this usually occurs only for stack slots, in which case all the
5093	 RTL would be referring to SRC, so we don't lose any optimization
5094	 opportunities by not having SRC in the hash table.  */
5095
5096      if (GET_CODE (src) == MEM
5097	  && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5098	  && GET_CODE (dest) == REG
5099	  && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5100	sets[i].src_volatile = 1;
5101
5102#if 0
5103      /* It is no longer clear why we used to do this, but it doesn't
5104	 appear to still be needed.  So let's try without it since this
5105	 code hurts cse'ing widened ops.  */
5106      /* If source is a perverse subreg (such as QI treated as an SI),
5107	 treat it as volatile.  It may do the work of an SI in one context
5108	 where the extra bits are not being used, but cannot replace an SI
5109	 in general.  */
5110      if (GET_CODE (src) == SUBREG
5111	  && (GET_MODE_SIZE (GET_MODE (src))
5112	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5113	sets[i].src_volatile = 1;
5114#endif
5115
5116      /* Locate all possible equivalent forms for SRC.  Try to replace
5117         SRC in the insn with each cheaper equivalent.
5118
5119         We have the following types of equivalents: SRC itself, a folded
5120         version, a value given in a REG_EQUAL note, or a value related
5121	 to a constant.
5122
5123         Each of these equivalents may be part of an additional class
5124         of equivalents (if more than one is in the table, they must be in
5125         the same class; we check for this).
5126
5127	 If the source is volatile, we don't do any table lookups.
5128
5129         We note any constant equivalent for possible later use in a
5130         REG_NOTE.  */
5131
5132      if (!sets[i].src_volatile)
5133	elt = lookup (src, sets[i].src_hash, mode);
5134
5135      sets[i].src_elt = elt;
5136
5137      if (elt && src_eqv_here && src_eqv_elt)
5138	{
5139	  if (elt->first_same_value != src_eqv_elt->first_same_value)
5140	    {
5141	      /* The REG_EQUAL is indicating that two formerly distinct
5142		 classes are now equivalent.  So merge them.  */
5143	      merge_equiv_classes (elt, src_eqv_elt);
5144	      src_eqv_hash = HASH (src_eqv, elt->mode);
5145	      src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5146	    }
5147
5148	  src_eqv_here = 0;
5149	}
5150
5151      else if (src_eqv_elt)
5152	elt = src_eqv_elt;
5153
5154      /* Try to find a constant somewhere and record it in `src_const'.
5155	 Record its table element, if any, in `src_const_elt'.  Look in
5156	 any known equivalences first.  (If the constant is not in the
5157	 table, also set `sets[i].src_const_hash').  */
5158      if (elt)
5159	for (p = elt->first_same_value; p; p = p->next_same_value)
5160	  if (p->is_const)
5161	    {
5162	      src_const = p->exp;
5163	      src_const_elt = elt;
5164	      break;
5165	    }
5166
5167      if (src_const == 0
5168	  && (CONSTANT_P (src_folded)
5169	      /* Consider (minus (label_ref L1) (label_ref L2)) as
5170		 "constant" here so we will record it. This allows us
5171		 to fold switch statements when an ADDR_DIFF_VEC is used.  */
5172	      || (GET_CODE (src_folded) == MINUS
5173		  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5174		  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5175	src_const = src_folded, src_const_elt = elt;
5176      else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5177	src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5178
5179      /* If we don't know if the constant is in the table, get its
5180	 hash code and look it up.  */
5181      if (src_const && src_const_elt == 0)
5182	{
5183	  sets[i].src_const_hash = HASH (src_const, mode);
5184	  src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5185	}
5186
5187      sets[i].src_const = src_const;
5188      sets[i].src_const_elt = src_const_elt;
5189
5190      /* If the constant and our source are both in the table, mark them as
5191	 equivalent.  Otherwise, if a constant is in the table but the source
5192	 isn't, set ELT to it.  */
5193      if (src_const_elt && elt
5194	  && src_const_elt->first_same_value != elt->first_same_value)
5195	merge_equiv_classes (elt, src_const_elt);
5196      else if (src_const_elt && elt == 0)
5197	elt = src_const_elt;
5198
5199      /* See if there is a register linearly related to a constant
5200         equivalent of SRC.  */
5201      if (src_const
5202	  && (GET_CODE (src_const) == CONST
5203	      || (src_const_elt && src_const_elt->related_value != 0)))
5204	{
5205	  src_related = use_related_value (src_const, src_const_elt);
5206	  if (src_related)
5207	    {
5208	      struct table_elt *src_related_elt
5209		= lookup (src_related, HASH (src_related, mode), mode);
5210	      if (src_related_elt && elt)
5211		{
5212		  if (elt->first_same_value
5213		      != src_related_elt->first_same_value)
5214		    /* This can occur when we previously saw a CONST
5215		       involving a SYMBOL_REF and then see the SYMBOL_REF
5216		       twice.  Merge the involved classes.  */
5217		    merge_equiv_classes (elt, src_related_elt);
5218
5219		  src_related = 0;
5220		  src_related_elt = 0;
5221		}
5222	      else if (src_related_elt && elt == 0)
5223		elt = src_related_elt;
5224	    }
5225	}
5226
5227      /* See if we have a CONST_INT that is already in a register in a
5228	 wider mode.  */
5229
5230      if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5231	  && GET_MODE_CLASS (mode) == MODE_INT
5232	  && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5233	{
5234	  enum machine_mode wider_mode;
5235
5236	  for (wider_mode = GET_MODE_WIDER_MODE (mode);
5237	       GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5238	       && src_related == 0;
5239	       wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5240	    {
5241	      struct table_elt *const_elt
5242		= lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5243
5244	      if (const_elt == 0)
5245		continue;
5246
5247	      for (const_elt = const_elt->first_same_value;
5248		   const_elt; const_elt = const_elt->next_same_value)
5249		if (GET_CODE (const_elt->exp) == REG)
5250		  {
5251		    src_related = gen_lowpart_if_possible (mode,
5252							   const_elt->exp);
5253		    break;
5254		  }
5255	    }
5256	}
5257
5258      /* Another possibility is that we have an AND with a constant in
5259	 a mode narrower than a word.  If so, it might have been generated
5260	 as part of an "if" which would narrow the AND.  If we already
5261	 have done the AND in a wider mode, we can use a SUBREG of that
5262	 value.  */
5263
5264      if (flag_expensive_optimizations && ! src_related
5265	  && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5266	  && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5267	{
5268	  enum machine_mode tmode;
5269	  rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5270
5271	  for (tmode = GET_MODE_WIDER_MODE (mode);
5272	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5273	       tmode = GET_MODE_WIDER_MODE (tmode))
5274	    {
5275	      rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5276	      struct table_elt *larger_elt;
5277
5278	      if (inner)
5279		{
5280		  PUT_MODE (new_and, tmode);
5281		  XEXP (new_and, 0) = inner;
5282		  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5283		  if (larger_elt == 0)
5284		    continue;
5285
5286		  for (larger_elt = larger_elt->first_same_value;
5287		       larger_elt; larger_elt = larger_elt->next_same_value)
5288		    if (GET_CODE (larger_elt->exp) == REG)
5289		      {
5290			src_related
5291			  = gen_lowpart_if_possible (mode, larger_elt->exp);
5292			break;
5293		      }
5294
5295		  if (src_related)
5296		    break;
5297		}
5298	    }
5299	}
5300
5301#ifdef LOAD_EXTEND_OP
5302      /* See if a MEM has already been loaded with a widening operation;
5303	 if it has, we can use a subreg of that.  Many CISC machines
5304	 also have such operations, but this is only likely to be
5305	 beneficial these machines.  */
5306
5307      if (flag_expensive_optimizations && src_related == 0
5308	  && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5309	  && GET_MODE_CLASS (mode) == MODE_INT
5310	  && GET_CODE (src) == MEM && ! do_not_record
5311	  && LOAD_EXTEND_OP (mode) != NIL)
5312	{
5313	  enum machine_mode tmode;
5314
5315	  /* Set what we are trying to extend and the operation it might
5316	     have been extended with.  */
5317	  PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5318	  XEXP (memory_extend_rtx, 0) = src;
5319
5320	  for (tmode = GET_MODE_WIDER_MODE (mode);
5321	       GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5322	       tmode = GET_MODE_WIDER_MODE (tmode))
5323	    {
5324	      struct table_elt *larger_elt;
5325
5326	      PUT_MODE (memory_extend_rtx, tmode);
5327	      larger_elt = lookup (memory_extend_rtx,
5328				   HASH (memory_extend_rtx, tmode), tmode);
5329	      if (larger_elt == 0)
5330		continue;
5331
5332	      for (larger_elt = larger_elt->first_same_value;
5333		   larger_elt; larger_elt = larger_elt->next_same_value)
5334		if (GET_CODE (larger_elt->exp) == REG)
5335		  {
5336		    src_related = gen_lowpart_if_possible (mode,
5337							   larger_elt->exp);
5338		    break;
5339		  }
5340
5341	      if (src_related)
5342		break;
5343	    }
5344	}
5345#endif /* LOAD_EXTEND_OP */
5346
5347      if (src == src_folded)
5348	src_folded = 0;
5349
5350      /* At this point, ELT, if non-zero, points to a class of expressions
5351         equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5352	 and SRC_RELATED, if non-zero, each contain additional equivalent
5353	 expressions.  Prune these latter expressions by deleting expressions
5354	 already in the equivalence class.
5355
5356	 Check for an equivalent identical to the destination.  If found,
5357	 this is the preferred equivalent since it will likely lead to
5358	 elimination of the insn.  Indicate this by placing it in
5359	 `src_related'.  */
5360
5361      if (elt)
5362	elt = elt->first_same_value;
5363      for (p = elt; p; p = p->next_same_value)
5364	{
5365	  enum rtx_code code = GET_CODE (p->exp);
5366
5367	  /* If the expression is not valid, ignore it.  Then we do not
5368	     have to check for validity below.  In most cases, we can use
5369	     `rtx_equal_p', since canonicalization has already been done.  */
5370	  if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5371	    continue;
5372
5373	  /* Also skip paradoxical subregs, unless that's what we're
5374	     looking for.  */
5375	  if (code == SUBREG
5376	      && (GET_MODE_SIZE (GET_MODE (p->exp))
5377		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5378	      && ! (src != 0
5379		    && GET_CODE (src) == SUBREG
5380		    && GET_MODE (src) == GET_MODE (p->exp)
5381		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5382			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5383	    continue;
5384
5385	  if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5386	    src = 0;
5387	  else if (src_folded && GET_CODE (src_folded) == code
5388		   && rtx_equal_p (src_folded, p->exp))
5389	    src_folded = 0;
5390	  else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5391		   && rtx_equal_p (src_eqv_here, p->exp))
5392	    src_eqv_here = 0;
5393	  else if (src_related && GET_CODE (src_related) == code
5394		   && rtx_equal_p (src_related, p->exp))
5395	    src_related = 0;
5396
5397	  /* This is the same as the destination of the insns, we want
5398	     to prefer it.  Copy it to src_related.  The code below will
5399	     then give it a negative cost.  */
5400	  if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5401	    src_related = dest;
5402	}
5403
5404      /* Find the cheapest valid equivalent, trying all the available
5405         possibilities.  Prefer items not in the hash table to ones
5406         that are when they are equal cost.  Note that we can never
5407         worsen an insn as the current contents will also succeed.
5408	 If we find an equivalent identical to the destination, use it as best,
5409	 since this insn will probably be eliminated in that case.  */
5410      if (src)
5411	{
5412	  if (rtx_equal_p (src, dest))
5413	    src_cost = src_regcost = -1;
5414	  else
5415	    {
5416	      src_cost = COST (src);
5417	      src_regcost = approx_reg_cost (src);
5418	    }
5419	}
5420
5421      if (src_eqv_here)
5422	{
5423	  if (rtx_equal_p (src_eqv_here, dest))
5424	    src_eqv_cost = src_eqv_regcost = -1;
5425	  else
5426	    {
5427	      src_eqv_cost = COST (src_eqv_here);
5428	      src_eqv_regcost = approx_reg_cost (src_eqv_here);
5429	    }
5430	}
5431
5432      if (src_folded)
5433	{
5434	  if (rtx_equal_p (src_folded, dest))
5435	    src_folded_cost = src_folded_regcost = -1;
5436	  else
5437	    {
5438	      src_folded_cost = COST (src_folded);
5439	      src_folded_regcost = approx_reg_cost (src_folded);
5440	    }
5441	}
5442
5443      if (src_related)
5444	{
5445	  if (rtx_equal_p (src_related, dest))
5446	    src_related_cost = src_related_regcost = -1;
5447	  else
5448	    {
5449	      src_related_cost = COST (src_related);
5450	      src_related_regcost = approx_reg_cost (src_related);
5451	    }
5452	}
5453
5454      /* If this was an indirect jump insn, a known label will really be
5455	 cheaper even though it looks more expensive.  */
5456      if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5457	src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5458
5459      /* Terminate loop when replacement made.  This must terminate since
5460         the current contents will be tested and will always be valid.  */
5461      while (1)
5462	{
5463	  rtx trial;
5464
5465	  /* Skip invalid entries.  */
5466	  while (elt && GET_CODE (elt->exp) != REG
5467		 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5468	    elt = elt->next_same_value;
5469
5470	  /* A paradoxical subreg would be bad here: it'll be the right
5471	     size, but later may be adjusted so that the upper bits aren't
5472	     what we want.  So reject it.  */
5473	  if (elt != 0
5474	      && GET_CODE (elt->exp) == SUBREG
5475	      && (GET_MODE_SIZE (GET_MODE (elt->exp))
5476		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5477	      /* It is okay, though, if the rtx we're trying to match
5478		 will ignore any of the bits we can't predict.  */
5479	      && ! (src != 0
5480		    && GET_CODE (src) == SUBREG
5481		    && GET_MODE (src) == GET_MODE (elt->exp)
5482		    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5483			< GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5484	    {
5485	      elt = elt->next_same_value;
5486	      continue;
5487	    }
5488
5489          if (elt)
5490	    {
5491	      src_elt_cost = elt->cost;
5492	      src_elt_regcost = elt->regcost;
5493	    }
5494
5495          /* Find cheapest and skip it for the next time.   For items
5496	     of equal cost, use this order:
5497	     src_folded, src, src_eqv, src_related and hash table entry.  */
5498	  if (src_folded
5499	      && preferrable (src_folded_cost, src_folded_regcost,
5500			      src_cost, src_regcost) <= 0
5501	      && preferrable (src_folded_cost, src_folded_regcost,
5502			      src_eqv_cost, src_eqv_regcost) <= 0
5503	      && preferrable (src_folded_cost, src_folded_regcost,
5504			      src_related_cost, src_related_regcost) <= 0
5505	      && preferrable (src_folded_cost, src_folded_regcost,
5506			      src_elt_cost, src_elt_regcost) <= 0)
5507	    {
5508	      trial = src_folded, src_folded_cost = MAX_COST;
5509	      if (src_folded_force_flag)
5510		trial = force_const_mem (mode, trial);
5511	    }
5512	  else if (src
5513		   && preferrable (src_cost, src_regcost,
5514				   src_eqv_cost, src_eqv_regcost) <= 0
5515		   && preferrable (src_cost, src_regcost,
5516				   src_related_cost, src_related_regcost) <= 0
5517		   && preferrable (src_cost, src_regcost,
5518				   src_elt_cost, src_elt_regcost) <= 0)
5519	    trial = src, src_cost = MAX_COST;
5520	  else if (src_eqv_here
5521		   && preferrable (src_eqv_cost, src_eqv_regcost,
5522				   src_related_cost, src_related_regcost) <= 0
5523		   && preferrable (src_eqv_cost, src_eqv_regcost,
5524				   src_elt_cost, src_elt_regcost) <= 0)
5525	    trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5526	  else if (src_related
5527		   && preferrable (src_related_cost, src_related_regcost,
5528				   src_elt_cost, src_elt_regcost) <= 0)
5529  	    trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5530	  else
5531	    {
5532	      trial = copy_rtx (elt->exp);
5533	      elt = elt->next_same_value;
5534	      src_elt_cost = MAX_COST;
5535	    }
5536
5537	  /* We don't normally have an insn matching (set (pc) (pc)), so
5538	     check for this separately here.  We will delete such an
5539	     insn below.
5540
5541	     For other cases such as a table jump or conditional jump
5542	     where we know the ultimate target, go ahead and replace the
5543	     operand.  While that may not make a valid insn, we will
5544	     reemit the jump below (and also insert any necessary
5545	     barriers).  */
5546	  if (n_sets == 1 && dest == pc_rtx
5547	      && (trial == pc_rtx
5548		  || (GET_CODE (trial) == LABEL_REF
5549		      && ! condjump_p (insn))))
5550	    {
5551	      SET_SRC (sets[i].rtl) = trial;
5552	      cse_jumps_altered = 1;
5553	      break;
5554	    }
5555
5556	  /* Look for a substitution that makes a valid insn.  */
5557	  else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5558	    {
5559	      /* If we just made a substitution inside a libcall, then we
5560		 need to make the same substitution in any notes attached
5561		 to the RETVAL insn.  */
5562	      if (libcall_insn
5563		  && (GET_CODE (sets[i].orig_src) == REG
5564		      || GET_CODE (sets[i].orig_src) == SUBREG
5565		      || GET_CODE (sets[i].orig_src) == MEM))
5566		replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5567			     canon_reg (SET_SRC (sets[i].rtl), insn));
5568
5569	      /* The result of apply_change_group can be ignored; see
5570		 canon_reg.  */
5571
5572	      validate_change (insn, &SET_SRC (sets[i].rtl),
5573			       canon_reg (SET_SRC (sets[i].rtl), insn),
5574			       1);
5575	      apply_change_group ();
5576	      break;
5577	    }
5578
5579	  /* If we previously found constant pool entries for
5580	     constants and this is a constant, try making a
5581	     pool entry.  Put it in src_folded unless we already have done
5582	     this since that is where it likely came from.  */
5583
5584	  else if (constant_pool_entries_cost
5585		   && CONSTANT_P (trial)
5586		   /* Reject cases that will abort in decode_rtx_const.
5587		      On the alpha when simplifying a switch, we get
5588		      (const (truncate (minus (label_ref) (label_ref)))).  */
5589		   && ! (GET_CODE (trial) == CONST
5590			 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5591		   /* Likewise on IA-64, except without the truncate.  */
5592		   && ! (GET_CODE (trial) == CONST
5593			 && GET_CODE (XEXP (trial, 0)) == MINUS
5594			 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5595			 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5596		   && (src_folded == 0
5597		       || (GET_CODE (src_folded) != MEM
5598			   && ! src_folded_force_flag))
5599		   && GET_MODE_CLASS (mode) != MODE_CC
5600		   && mode != VOIDmode)
5601	    {
5602	      src_folded_force_flag = 1;
5603	      src_folded = trial;
5604	      src_folded_cost = constant_pool_entries_cost;
5605	    }
5606	}
5607
5608      src = SET_SRC (sets[i].rtl);
5609
5610      /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5611	 However, there is an important exception:  If both are registers
5612	 that are not the head of their equivalence class, replace SET_SRC
5613	 with the head of the class.  If we do not do this, we will have
5614	 both registers live over a portion of the basic block.  This way,
5615	 their lifetimes will likely abut instead of overlapping.  */
5616      if (GET_CODE (dest) == REG
5617	  && REGNO_QTY_VALID_P (REGNO (dest)))
5618	{
5619	  int dest_q = REG_QTY (REGNO (dest));
5620	  struct qty_table_elem *dest_ent = &qty_table[dest_q];
5621
5622	  if (dest_ent->mode == GET_MODE (dest)
5623	      && dest_ent->first_reg != REGNO (dest)
5624	      && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5625	      /* Don't do this if the original insn had a hard reg as
5626		 SET_SRC or SET_DEST.  */
5627	      && (GET_CODE (sets[i].src) != REG
5628		  || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5629	      && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5630	    /* We can't call canon_reg here because it won't do anything if
5631	       SRC is a hard register.  */
5632	    {
5633	      int src_q = REG_QTY (REGNO (src));
5634	      struct qty_table_elem *src_ent = &qty_table[src_q];
5635	      int first = src_ent->first_reg;
5636	      rtx new_src
5637		= (first >= FIRST_PSEUDO_REGISTER
5638		   ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5639
5640	      /* We must use validate-change even for this, because this
5641		 might be a special no-op instruction, suitable only to
5642		 tag notes onto.  */
5643	      if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5644		{
5645		  src = new_src;
5646		  /* If we had a constant that is cheaper than what we are now
5647		     setting SRC to, use that constant.  We ignored it when we
5648		     thought we could make this into a no-op.  */
5649		  if (src_const && COST (src_const) < COST (src)
5650		      && validate_change (insn, &SET_SRC (sets[i].rtl),
5651					  src_const, 0))
5652		    src = src_const;
5653		}
5654	    }
5655	}
5656
5657      /* If we made a change, recompute SRC values.  */
5658      if (src != sets[i].src)
5659	{
5660	  cse_altered = 1;
5661	  do_not_record = 0;
5662	  hash_arg_in_memory = 0;
5663	  sets[i].src = src;
5664	  sets[i].src_hash = HASH (src, mode);
5665	  sets[i].src_volatile = do_not_record;
5666	  sets[i].src_in_memory = hash_arg_in_memory;
5667	  sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5668	}
5669
5670      /* If this is a single SET, we are setting a register, and we have an
5671	 equivalent constant, we want to add a REG_NOTE.   We don't want
5672	 to write a REG_EQUAL note for a constant pseudo since verifying that
5673	 that pseudo hasn't been eliminated is a pain.  Such a note also
5674	 won't help anything.
5675
5676	 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5677	 which can be created for a reference to a compile time computable
5678	 entry in a jump table.  */
5679
5680      if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5681	  && GET_CODE (src_const) != REG
5682	  && ! (GET_CODE (src_const) == CONST
5683		&& GET_CODE (XEXP (src_const, 0)) == MINUS
5684		&& GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5685		&& GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5686	{
5687	  /* Make sure that the rtx is not shared with any other insn.  */
5688	  src_const = copy_rtx (src_const);
5689
5690	  /* Record the actual constant value in a REG_EQUAL note, making
5691	     a new one if one does not already exist.  */
5692	  set_unique_reg_note (insn, REG_EQUAL, src_const);
5693
5694          /* If storing a constant value in a register that
5695	     previously held the constant value 0,
5696	     record this fact with a REG_WAS_0 note on this insn.
5697
5698	     Note that the *register* is required to have previously held 0,
5699	     not just any register in the quantity and we must point to the
5700	     insn that set that register to zero.
5701
5702	     Rather than track each register individually, we just see if
5703	     the last set for this quantity was for this register.  */
5704
5705	  if (REGNO_QTY_VALID_P (REGNO (dest)))
5706	    {
5707	      int dest_q = REG_QTY (REGNO (dest));
5708	      struct qty_table_elem *dest_ent = &qty_table[dest_q];
5709
5710	      if (dest_ent->const_rtx == const0_rtx)
5711		{
5712		  /* See if we previously had a REG_WAS_0 note.  */
5713		  rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5714		  rtx const_insn = dest_ent->const_insn;
5715
5716		  if ((tem = single_set (const_insn)) != 0
5717		      && rtx_equal_p (SET_DEST (tem), dest))
5718		    {
5719		      if (note)
5720			XEXP (note, 0) = const_insn;
5721		      else
5722			REG_NOTES (insn)
5723			  = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5724					       REG_NOTES (insn));
5725		    }
5726		}
5727	    }
5728	}
5729
5730      /* Now deal with the destination.  */
5731      do_not_record = 0;
5732
5733      /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5734	 to the MEM or REG within it.  */
5735      while (GET_CODE (dest) == SIGN_EXTRACT
5736	     || GET_CODE (dest) == ZERO_EXTRACT
5737	     || GET_CODE (dest) == SUBREG
5738	     || GET_CODE (dest) == STRICT_LOW_PART)
5739	dest = XEXP (dest, 0);
5740
5741      sets[i].inner_dest = dest;
5742
5743      if (GET_CODE (dest) == MEM)
5744	{
5745#ifdef PUSH_ROUNDING
5746	  /* Stack pushes invalidate the stack pointer.  */
5747	  rtx addr = XEXP (dest, 0);
5748	  if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5749	      && XEXP (addr, 0) == stack_pointer_rtx)
5750	    invalidate (stack_pointer_rtx, Pmode);
5751#endif
5752	  dest = fold_rtx (dest, insn);
5753	}
5754
5755      /* Compute the hash code of the destination now,
5756	 before the effects of this instruction are recorded,
5757	 since the register values used in the address computation
5758	 are those before this instruction.  */
5759      sets[i].dest_hash = HASH (dest, mode);
5760
5761      /* Don't enter a bit-field in the hash table
5762	 because the value in it after the store
5763	 may not equal what was stored, due to truncation.  */
5764
5765      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5766	  || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5767	{
5768	  rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5769
5770	  if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5771	      && GET_CODE (width) == CONST_INT
5772	      && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5773	      && ! (INTVAL (src_const)
5774		    & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5775	    /* Exception: if the value is constant,
5776	       and it won't be truncated, record it.  */
5777	    ;
5778	  else
5779	    {
5780	      /* This is chosen so that the destination will be invalidated
5781		 but no new value will be recorded.
5782		 We must invalidate because sometimes constant
5783		 values can be recorded for bitfields.  */
5784	      sets[i].src_elt = 0;
5785	      sets[i].src_volatile = 1;
5786	      src_eqv = 0;
5787	      src_eqv_elt = 0;
5788	    }
5789	}
5790
5791      /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5792	 the insn.  */
5793      else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5794	{
5795	  /* One less use of the label this insn used to jump to.  */
5796	  delete_insn (insn);
5797	  cse_jumps_altered = 1;
5798	  /* No more processing for this set.  */
5799	  sets[i].rtl = 0;
5800	}
5801
5802      /* If this SET is now setting PC to a label, we know it used to
5803	 be a conditional or computed branch.  */
5804      else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5805	{
5806	  /* Now emit a BARRIER after the unconditional jump.  */
5807	  if (NEXT_INSN (insn) == 0
5808	      || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5809	    emit_barrier_after (insn);
5810
5811	  /* We reemit the jump in as many cases as possible just in
5812	     case the form of an unconditional jump is significantly
5813	     different than a computed jump or conditional jump.
5814
5815	     If this insn has multiple sets, then reemitting the
5816	     jump is nontrivial.  So instead we just force rerecognition
5817	     and hope for the best.  */
5818	  if (n_sets == 1)
5819	    {
5820	      rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5821
5822	      JUMP_LABEL (new) = XEXP (src, 0);
5823	      LABEL_NUSES (XEXP (src, 0))++;
5824	      insn = new;
5825
5826	      /* Now emit a BARRIER after the unconditional jump.  */
5827	      if (NEXT_INSN (insn) == 0
5828		  || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5829		emit_barrier_after (insn);
5830	    }
5831	  else
5832	    INSN_CODE (insn) = -1;
5833
5834	  never_reached_warning (insn, NULL);
5835
5836	  /* Do not bother deleting any unreachable code,
5837	     let jump/flow do that.  */
5838
5839	  cse_jumps_altered = 1;
5840	  sets[i].rtl = 0;
5841	}
5842
5843      /* If destination is volatile, invalidate it and then do no further
5844	 processing for this assignment.  */
5845
5846      else if (do_not_record)
5847	{
5848	  if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5849	    invalidate (dest, VOIDmode);
5850	  else if (GET_CODE (dest) == MEM)
5851	    {
5852	      /* Outgoing arguments for a libcall don't
5853		 affect any recorded expressions.  */
5854	      if (! libcall_insn || insn == libcall_insn)
5855		invalidate (dest, VOIDmode);
5856	    }
5857	  else if (GET_CODE (dest) == STRICT_LOW_PART
5858		   || GET_CODE (dest) == ZERO_EXTRACT)
5859	    invalidate (XEXP (dest, 0), GET_MODE (dest));
5860	  sets[i].rtl = 0;
5861	}
5862
5863      if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5864	sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5865
5866#ifdef HAVE_cc0
5867      /* If setting CC0, record what it was set to, or a constant, if it
5868	 is equivalent to a constant.  If it is being set to a floating-point
5869	 value, make a COMPARE with the appropriate constant of 0.  If we
5870	 don't do this, later code can interpret this as a test against
5871	 const0_rtx, which can cause problems if we try to put it into an
5872	 insn as a floating-point operand.  */
5873      if (dest == cc0_rtx)
5874	{
5875	  this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5876	  this_insn_cc0_mode = mode;
5877	  if (FLOAT_MODE_P (mode))
5878	    this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5879					     CONST0_RTX (mode));
5880	}
5881#endif
5882    }
5883
5884  /* Now enter all non-volatile source expressions in the hash table
5885     if they are not already present.
5886     Record their equivalence classes in src_elt.
5887     This way we can insert the corresponding destinations into
5888     the same classes even if the actual sources are no longer in them
5889     (having been invalidated).  */
5890
5891  if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5892      && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5893    {
5894      struct table_elt *elt;
5895      struct table_elt *classp = sets[0].src_elt;
5896      rtx dest = SET_DEST (sets[0].rtl);
5897      enum machine_mode eqvmode = GET_MODE (dest);
5898
5899      if (GET_CODE (dest) == STRICT_LOW_PART)
5900	{
5901	  eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5902	  classp = 0;
5903	}
5904      if (insert_regs (src_eqv, classp, 0))
5905	{
5906	  rehash_using_reg (src_eqv);
5907	  src_eqv_hash = HASH (src_eqv, eqvmode);
5908	}
5909      elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5910      elt->in_memory = src_eqv_in_memory;
5911      src_eqv_elt = elt;
5912
5913      /* Check to see if src_eqv_elt is the same as a set source which
5914	 does not yet have an elt, and if so set the elt of the set source
5915	 to src_eqv_elt.  */
5916      for (i = 0; i < n_sets; i++)
5917	if (sets[i].rtl && sets[i].src_elt == 0
5918	    && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5919	  sets[i].src_elt = src_eqv_elt;
5920    }
5921
5922  for (i = 0; i < n_sets; i++)
5923    if (sets[i].rtl && ! sets[i].src_volatile
5924	&& ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5925      {
5926	if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5927	  {
5928	    /* REG_EQUAL in setting a STRICT_LOW_PART
5929	       gives an equivalent for the entire destination register,
5930	       not just for the subreg being stored in now.
5931	       This is a more interesting equivalence, so we arrange later
5932	       to treat the entire reg as the destination.  */
5933	    sets[i].src_elt = src_eqv_elt;
5934	    sets[i].src_hash = src_eqv_hash;
5935	  }
5936	else
5937	  {
5938	    /* Insert source and constant equivalent into hash table, if not
5939	       already present.  */
5940	    struct table_elt *classp = src_eqv_elt;
5941	    rtx src = sets[i].src;
5942	    rtx dest = SET_DEST (sets[i].rtl);
5943	    enum machine_mode mode
5944	      = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5945
5946	    if (sets[i].src_elt == 0)
5947	      {
5948		/* Don't put a hard register source into the table if this is
5949		   the last insn of a libcall.  In this case, we only need
5950		   to put src_eqv_elt in src_elt.  */
5951		if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5952		  {
5953		    struct table_elt *elt;
5954
5955		    /* Note that these insert_regs calls cannot remove
5956		       any of the src_elt's, because they would have failed to
5957		       match if not still valid.  */
5958		    if (insert_regs (src, classp, 0))
5959		      {
5960			rehash_using_reg (src);
5961			sets[i].src_hash = HASH (src, mode);
5962		      }
5963		    elt = insert (src, classp, sets[i].src_hash, mode);
5964		    elt->in_memory = sets[i].src_in_memory;
5965		    sets[i].src_elt = classp = elt;
5966		  }
5967		else
5968		  sets[i].src_elt = classp;
5969	      }
5970	    if (sets[i].src_const && sets[i].src_const_elt == 0
5971		&& src != sets[i].src_const
5972		&& ! rtx_equal_p (sets[i].src_const, src))
5973	      sets[i].src_elt = insert (sets[i].src_const, classp,
5974					sets[i].src_const_hash, mode);
5975	  }
5976      }
5977    else if (sets[i].src_elt == 0)
5978      /* If we did not insert the source into the hash table (e.g., it was
5979	 volatile), note the equivalence class for the REG_EQUAL value, if any,
5980	 so that the destination goes into that class.  */
5981      sets[i].src_elt = src_eqv_elt;
5982
5983  invalidate_from_clobbers (x);
5984
5985  /* Some registers are invalidated by subroutine calls.  Memory is
5986     invalidated by non-constant calls.  */
5987
5988  if (GET_CODE (insn) == CALL_INSN)
5989    {
5990      if (! CONST_OR_PURE_CALL_P (insn))
5991	invalidate_memory ();
5992      invalidate_for_call ();
5993    }
5994
5995  /* Now invalidate everything set by this instruction.
5996     If a SUBREG or other funny destination is being set,
5997     sets[i].rtl is still nonzero, so here we invalidate the reg
5998     a part of which is being set.  */
5999
6000  for (i = 0; i < n_sets; i++)
6001    if (sets[i].rtl)
6002      {
6003	/* We can't use the inner dest, because the mode associated with
6004	   a ZERO_EXTRACT is significant.  */
6005	rtx dest = SET_DEST (sets[i].rtl);
6006
6007	/* Needed for registers to remove the register from its
6008	   previous quantity's chain.
6009	   Needed for memory if this is a nonvarying address, unless
6010	   we have just done an invalidate_memory that covers even those.  */
6011	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6012	  invalidate (dest, VOIDmode);
6013	else if (GET_CODE (dest) == MEM)
6014	  {
6015	    /* Outgoing arguments for a libcall don't
6016	       affect any recorded expressions.  */
6017	    if (! libcall_insn || insn == libcall_insn)
6018	      invalidate (dest, VOIDmode);
6019	  }
6020	else if (GET_CODE (dest) == STRICT_LOW_PART
6021		 || GET_CODE (dest) == ZERO_EXTRACT)
6022	  invalidate (XEXP (dest, 0), GET_MODE (dest));
6023      }
6024
6025  /* A volatile ASM invalidates everything.  */
6026  if (GET_CODE (insn) == INSN
6027      && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
6028      && MEM_VOLATILE_P (PATTERN (insn)))
6029    flush_hash_table ();
6030
6031  /* Make sure registers mentioned in destinations
6032     are safe for use in an expression to be inserted.
6033     This removes from the hash table
6034     any invalid entry that refers to one of these registers.
6035
6036     We don't care about the return value from mention_regs because
6037     we are going to hash the SET_DEST values unconditionally.  */
6038
6039  for (i = 0; i < n_sets; i++)
6040    {
6041      if (sets[i].rtl)
6042	{
6043	  rtx x = SET_DEST (sets[i].rtl);
6044
6045	  if (GET_CODE (x) != REG)
6046	    mention_regs (x);
6047	  else
6048	    {
6049	      /* We used to rely on all references to a register becoming
6050		 inaccessible when a register changes to a new quantity,
6051		 since that changes the hash code.  However, that is not
6052		 safe, since after HASH_SIZE new quantities we get a
6053		 hash 'collision' of a register with its own invalid
6054		 entries.  And since SUBREGs have been changed not to
6055		 change their hash code with the hash code of the register,
6056		 it wouldn't work any longer at all.  So we have to check
6057		 for any invalid references lying around now.
6058		 This code is similar to the REG case in mention_regs,
6059		 but it knows that reg_tick has been incremented, and
6060		 it leaves reg_in_table as -1 .  */
6061	      unsigned int regno = REGNO (x);
6062	      unsigned int endregno
6063		= regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6064			   : HARD_REGNO_NREGS (regno, GET_MODE (x)));
6065	      unsigned int i;
6066
6067	      for (i = regno; i < endregno; i++)
6068		{
6069		  if (REG_IN_TABLE (i) >= 0)
6070		    {
6071		      remove_invalid_refs (i);
6072		      REG_IN_TABLE (i) = -1;
6073		    }
6074		}
6075	    }
6076	}
6077    }
6078
6079  /* We may have just removed some of the src_elt's from the hash table.
6080     So replace each one with the current head of the same class.  */
6081
6082  for (i = 0; i < n_sets; i++)
6083    if (sets[i].rtl)
6084      {
6085	if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6086	  /* If elt was removed, find current head of same class,
6087	     or 0 if nothing remains of that class.  */
6088	  {
6089	    struct table_elt *elt = sets[i].src_elt;
6090
6091	    while (elt && elt->prev_same_value)
6092	      elt = elt->prev_same_value;
6093
6094	    while (elt && elt->first_same_value == 0)
6095	      elt = elt->next_same_value;
6096	    sets[i].src_elt = elt ? elt->first_same_value : 0;
6097	  }
6098      }
6099
6100  /* Now insert the destinations into their equivalence classes.  */
6101
6102  for (i = 0; i < n_sets; i++)
6103    if (sets[i].rtl)
6104      {
6105	rtx dest = SET_DEST (sets[i].rtl);
6106	rtx inner_dest = sets[i].inner_dest;
6107	struct table_elt *elt;
6108
6109	/* Don't record value if we are not supposed to risk allocating
6110	   floating-point values in registers that might be wider than
6111	   memory.  */
6112	if ((flag_float_store
6113	     && GET_CODE (dest) == MEM
6114	     && FLOAT_MODE_P (GET_MODE (dest)))
6115	    /* Don't record BLKmode values, because we don't know the
6116	       size of it, and can't be sure that other BLKmode values
6117	       have the same or smaller size.  */
6118	    || GET_MODE (dest) == BLKmode
6119	    /* Don't record values of destinations set inside a libcall block
6120	       since we might delete the libcall.  Things should have been set
6121	       up so we won't want to reuse such a value, but we play it safe
6122	       here.  */
6123	    || libcall_insn
6124	    /* If we didn't put a REG_EQUAL value or a source into the hash
6125	       table, there is no point is recording DEST.  */
6126	    || sets[i].src_elt == 0
6127	    /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6128	       or SIGN_EXTEND, don't record DEST since it can cause
6129	       some tracking to be wrong.
6130
6131	       ??? Think about this more later.  */
6132	    || (GET_CODE (dest) == SUBREG
6133		&& (GET_MODE_SIZE (GET_MODE (dest))
6134		    > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6135		&& (GET_CODE (sets[i].src) == SIGN_EXTEND
6136		    || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6137	  continue;
6138
6139	/* STRICT_LOW_PART isn't part of the value BEING set,
6140	   and neither is the SUBREG inside it.
6141	   Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
6142	if (GET_CODE (dest) == STRICT_LOW_PART)
6143	  dest = SUBREG_REG (XEXP (dest, 0));
6144
6145	if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6146	  /* Registers must also be inserted into chains for quantities.  */
6147	  if (insert_regs (dest, sets[i].src_elt, 1))
6148	    {
6149	      /* If `insert_regs' changes something, the hash code must be
6150		 recalculated.  */
6151	      rehash_using_reg (dest);
6152	      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6153	    }
6154
6155	if (GET_CODE (inner_dest) == MEM
6156	    && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6157	  /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6158	     that (MEM (ADDRESSOF (X))) is equivalent to Y.
6159	     Consider the case in which the address of the MEM is
6160	     passed to a function, which alters the MEM.  Then, if we
6161	     later use Y instead of the MEM we'll miss the update.  */
6162	  elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6163	else
6164	  elt = insert (dest, sets[i].src_elt,
6165			sets[i].dest_hash, GET_MODE (dest));
6166
6167	elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6168			  && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6169			      || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
6170							  0))));
6171
6172	/* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6173	   narrower than M2, and both M1 and M2 are the same number of words,
6174	   we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6175	   make that equivalence as well.
6176
6177	   However, BAR may have equivalences for which gen_lowpart_if_possible
6178	   will produce a simpler value than gen_lowpart_if_possible applied to
6179	   BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6180	   BAR's equivalences.  If we don't get a simplified form, make
6181	   the SUBREG.  It will not be used in an equivalence, but will
6182	   cause two similar assignments to be detected.
6183
6184	   Note the loop below will find SUBREG_REG (DEST) since we have
6185	   already entered SRC and DEST of the SET in the table.  */
6186
6187	if (GET_CODE (dest) == SUBREG
6188	    && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6189		 / UNITS_PER_WORD)
6190		== (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6191	    && (GET_MODE_SIZE (GET_MODE (dest))
6192		>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6193	    && sets[i].src_elt != 0)
6194	  {
6195	    enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6196	    struct table_elt *elt, *classp = 0;
6197
6198	    for (elt = sets[i].src_elt->first_same_value; elt;
6199		 elt = elt->next_same_value)
6200	      {
6201		rtx new_src = 0;
6202		unsigned src_hash;
6203		struct table_elt *src_elt;
6204
6205		/* Ignore invalid entries.  */
6206		if (GET_CODE (elt->exp) != REG
6207		    && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6208		  continue;
6209
6210		new_src = gen_lowpart_if_possible (new_mode, elt->exp);
6211		if (new_src == 0)
6212		  new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
6213
6214		src_hash = HASH (new_src, new_mode);
6215		src_elt = lookup (new_src, src_hash, new_mode);
6216
6217		/* Put the new source in the hash table is if isn't
6218		   already.  */
6219		if (src_elt == 0)
6220		  {
6221		    if (insert_regs (new_src, classp, 0))
6222		      {
6223			rehash_using_reg (new_src);
6224			src_hash = HASH (new_src, new_mode);
6225		      }
6226		    src_elt = insert (new_src, classp, src_hash, new_mode);
6227		    src_elt->in_memory = elt->in_memory;
6228		  }
6229		else if (classp && classp != src_elt->first_same_value)
6230		  /* Show that two things that we've seen before are
6231		     actually the same.  */
6232		  merge_equiv_classes (src_elt, classp);
6233
6234		classp = src_elt->first_same_value;
6235		/* Ignore invalid entries.  */
6236		while (classp
6237		       && GET_CODE (classp->exp) != REG
6238		       && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6239		  classp = classp->next_same_value;
6240	      }
6241	  }
6242      }
6243
6244  /* Special handling for (set REG0 REG1) where REG0 is the
6245     "cheapest", cheaper than REG1.  After cse, REG1 will probably not
6246     be used in the sequel, so (if easily done) change this insn to
6247     (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6248     that computed their value.  Then REG1 will become a dead store
6249     and won't cloud the situation for later optimizations.
6250
6251     Do not make this change if REG1 is a hard register, because it will
6252     then be used in the sequel and we may be changing a two-operand insn
6253     into a three-operand insn.
6254
6255     Also do not do this if we are operating on a copy of INSN.
6256
6257     Also don't do this if INSN ends a libcall; this would cause an unrelated
6258     register to be set in the middle of a libcall, and we then get bad code
6259     if the libcall is deleted.  */
6260
6261  if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6262      && NEXT_INSN (PREV_INSN (insn)) == insn
6263      && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6264      && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6265      && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6266    {
6267      int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6268      struct qty_table_elem *src_ent = &qty_table[src_q];
6269
6270      if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6271	  && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6272	{
6273	  rtx prev = prev_nonnote_insn (insn);
6274
6275	  /* Do not swap the registers around if the previous instruction
6276	     attaches a REG_EQUIV note to REG1.
6277
6278	     ??? It's not entirely clear whether we can transfer a REG_EQUIV
6279	     from the pseudo that originally shadowed an incoming argument
6280	     to another register.  Some uses of REG_EQUIV might rely on it
6281	     being attached to REG1 rather than REG2.
6282
6283	     This section previously turned the REG_EQUIV into a REG_EQUAL
6284	     note.  We cannot do that because REG_EQUIV may provide an
6285	     uninitialised stack slot when REG_PARM_STACK_SPACE is used.  */
6286
6287	  if (prev != 0 && GET_CODE (prev) == INSN
6288	      && GET_CODE (PATTERN (prev)) == SET
6289	      && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6290	      && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6291	    {
6292	      rtx dest = SET_DEST (sets[0].rtl);
6293	      rtx src = SET_SRC (sets[0].rtl);
6294	      rtx note;
6295
6296	      validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6297	      validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6298	      validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6299	      apply_change_group ();
6300
6301	      /* If there was a REG_WAS_0 note on PREV, remove it.  Move
6302		 any REG_WAS_0 note on INSN to PREV.  */
6303	      note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6304	      if (note)
6305		remove_note (prev, note);
6306
6307	      note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6308	      if (note)
6309		{
6310		  remove_note (insn, note);
6311		  XEXP (note, 1) = REG_NOTES (prev);
6312		  REG_NOTES (prev) = note;
6313		}
6314
6315	      /* If INSN has a REG_EQUAL note, and this note mentions
6316		 REG0, then we must delete it, because the value in
6317		 REG0 has changed.  If the note's value is REG1, we must
6318		 also delete it because that is now this insn's dest.  */
6319	      note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6320	      if (note != 0
6321		  && (reg_mentioned_p (dest, XEXP (note, 0))
6322		      || rtx_equal_p (src, XEXP (note, 0))))
6323		remove_note (insn, note);
6324	    }
6325	}
6326    }
6327
6328  /* If this is a conditional jump insn, record any known equivalences due to
6329     the condition being tested.  */
6330
6331  last_jump_equiv_class = 0;
6332  if (GET_CODE (insn) == JUMP_INSN
6333      && n_sets == 1 && GET_CODE (x) == SET
6334      && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6335    record_jump_equiv (insn, 0);
6336
6337#ifdef HAVE_cc0
6338  /* If the previous insn set CC0 and this insn no longer references CC0,
6339     delete the previous insn.  Here we use the fact that nothing expects CC0
6340     to be valid over an insn, which is true until the final pass.  */
6341  if (prev_insn && GET_CODE (prev_insn) == INSN
6342      && (tem = single_set (prev_insn)) != 0
6343      && SET_DEST (tem) == cc0_rtx
6344      && ! reg_mentioned_p (cc0_rtx, x))
6345    delete_insn (prev_insn);
6346
6347  prev_insn_cc0 = this_insn_cc0;
6348  prev_insn_cc0_mode = this_insn_cc0_mode;
6349#endif
6350
6351  prev_insn = insn;
6352}
6353
6354/* Remove from the hash table all expressions that reference memory.  */
6355
6356static void
6357invalidate_memory ()
6358{
6359  int i;
6360  struct table_elt *p, *next;
6361
6362  for (i = 0; i < HASH_SIZE; i++)
6363    for (p = table[i]; p; p = next)
6364      {
6365	next = p->next_same_hash;
6366	if (p->in_memory)
6367	  remove_from_table (p, i);
6368      }
6369}
6370
6371/* If ADDR is an address that implicitly affects the stack pointer, return
6372   1 and update the register tables to show the effect.  Else, return 0.  */
6373
6374static int
6375addr_affects_sp_p (addr)
6376     rtx addr;
6377{
6378  if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6379      && GET_CODE (XEXP (addr, 0)) == REG
6380      && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6381    {
6382      if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6383	REG_TICK (STACK_POINTER_REGNUM)++;
6384
6385      /* This should be *very* rare.  */
6386      if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6387	invalidate (stack_pointer_rtx, VOIDmode);
6388
6389      return 1;
6390    }
6391
6392  return 0;
6393}
6394
6395/* Perform invalidation on the basis of everything about an insn
6396   except for invalidating the actual places that are SET in it.
6397   This includes the places CLOBBERed, and anything that might
6398   alias with something that is SET or CLOBBERed.
6399
6400   X is the pattern of the insn.  */
6401
6402static void
6403invalidate_from_clobbers (x)
6404     rtx x;
6405{
6406  if (GET_CODE (x) == CLOBBER)
6407    {
6408      rtx ref = XEXP (x, 0);
6409      if (ref)
6410	{
6411	  if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6412	      || GET_CODE (ref) == MEM)
6413	    invalidate (ref, VOIDmode);
6414	  else if (GET_CODE (ref) == STRICT_LOW_PART
6415		   || GET_CODE (ref) == ZERO_EXTRACT)
6416	    invalidate (XEXP (ref, 0), GET_MODE (ref));
6417	}
6418    }
6419  else if (GET_CODE (x) == PARALLEL)
6420    {
6421      int i;
6422      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6423	{
6424	  rtx y = XVECEXP (x, 0, i);
6425	  if (GET_CODE (y) == CLOBBER)
6426	    {
6427	      rtx ref = XEXP (y, 0);
6428	      if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6429		  || GET_CODE (ref) == MEM)
6430		invalidate (ref, VOIDmode);
6431	      else if (GET_CODE (ref) == STRICT_LOW_PART
6432		       || GET_CODE (ref) == ZERO_EXTRACT)
6433		invalidate (XEXP (ref, 0), GET_MODE (ref));
6434	    }
6435	}
6436    }
6437}
6438
6439/* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
6440   and replace any registers in them with either an equivalent constant
6441   or the canonical form of the register.  If we are inside an address,
6442   only do this if the address remains valid.
6443
6444   OBJECT is 0 except when within a MEM in which case it is the MEM.
6445
6446   Return the replacement for X.  */
6447
6448static rtx
6449cse_process_notes (x, object)
6450     rtx x;
6451     rtx object;
6452{
6453  enum rtx_code code = GET_CODE (x);
6454  const char *fmt = GET_RTX_FORMAT (code);
6455  int i;
6456
6457  switch (code)
6458    {
6459    case CONST_INT:
6460    case CONST:
6461    case SYMBOL_REF:
6462    case LABEL_REF:
6463    case CONST_DOUBLE:
6464    case CONST_VECTOR:
6465    case PC:
6466    case CC0:
6467    case LO_SUM:
6468      return x;
6469
6470    case MEM:
6471      validate_change (x, &XEXP (x, 0),
6472		       cse_process_notes (XEXP (x, 0), x), 0);
6473      return x;
6474
6475    case EXPR_LIST:
6476    case INSN_LIST:
6477      if (REG_NOTE_KIND (x) == REG_EQUAL)
6478	XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6479      if (XEXP (x, 1))
6480	XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6481      return x;
6482
6483    case SIGN_EXTEND:
6484    case ZERO_EXTEND:
6485    case SUBREG:
6486      {
6487	rtx new = cse_process_notes (XEXP (x, 0), object);
6488	/* We don't substitute VOIDmode constants into these rtx,
6489	   since they would impede folding.  */
6490	if (GET_MODE (new) != VOIDmode)
6491	  validate_change (object, &XEXP (x, 0), new, 0);
6492	return x;
6493      }
6494
6495    case REG:
6496      i = REG_QTY (REGNO (x));
6497
6498      /* Return a constant or a constant register.  */
6499      if (REGNO_QTY_VALID_P (REGNO (x)))
6500	{
6501	  struct qty_table_elem *ent = &qty_table[i];
6502
6503	  if (ent->const_rtx != NULL_RTX
6504	      && (CONSTANT_P (ent->const_rtx)
6505		  || GET_CODE (ent->const_rtx) == REG))
6506	    {
6507	      rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6508	      if (new)
6509		return new;
6510	    }
6511	}
6512
6513      /* Otherwise, canonicalize this register.  */
6514      return canon_reg (x, NULL_RTX);
6515
6516    default:
6517      break;
6518    }
6519
6520  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6521    if (fmt[i] == 'e')
6522      validate_change (object, &XEXP (x, i),
6523		       cse_process_notes (XEXP (x, i), object), 0);
6524
6525  return x;
6526}
6527
6528/* Find common subexpressions between the end test of a loop and the beginning
6529   of the loop.  LOOP_START is the CODE_LABEL at the start of a loop.
6530
6531   Often we have a loop where an expression in the exit test is used
6532   in the body of the loop.  For example "while (*p) *q++ = *p++;".
6533   Because of the way we duplicate the loop exit test in front of the loop,
6534   however, we don't detect that common subexpression.  This will be caught
6535   when global cse is implemented, but this is a quite common case.
6536
6537   This function handles the most common cases of these common expressions.
6538   It is called after we have processed the basic block ending with the
6539   NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6540   jumps to a label used only once.  */
6541
6542static void
6543cse_around_loop (loop_start)
6544     rtx loop_start;
6545{
6546  rtx insn;
6547  int i;
6548  struct table_elt *p;
6549
6550  /* If the jump at the end of the loop doesn't go to the start, we don't
6551     do anything.  */
6552  for (insn = PREV_INSN (loop_start);
6553       insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6554       insn = PREV_INSN (insn))
6555    ;
6556
6557  if (insn == 0
6558      || GET_CODE (insn) != NOTE
6559      || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6560    return;
6561
6562  /* If the last insn of the loop (the end test) was an NE comparison,
6563     we will interpret it as an EQ comparison, since we fell through
6564     the loop.  Any equivalences resulting from that comparison are
6565     therefore not valid and must be invalidated.  */
6566  if (last_jump_equiv_class)
6567    for (p = last_jump_equiv_class->first_same_value; p;
6568	 p = p->next_same_value)
6569      {
6570	if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6571	    || (GET_CODE (p->exp) == SUBREG
6572		&& GET_CODE (SUBREG_REG (p->exp)) == REG))
6573	  invalidate (p->exp, VOIDmode);
6574	else if (GET_CODE (p->exp) == STRICT_LOW_PART
6575		 || GET_CODE (p->exp) == ZERO_EXTRACT)
6576	  invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6577      }
6578
6579  /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6580     a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6581
6582     The only thing we do with SET_DEST is invalidate entries, so we
6583     can safely process each SET in order.  It is slightly less efficient
6584     to do so, but we only want to handle the most common cases.
6585
6586     The gen_move_insn call in cse_set_around_loop may create new pseudos.
6587     These pseudos won't have valid entries in any of the tables indexed
6588     by register number, such as reg_qty.  We avoid out-of-range array
6589     accesses by not processing any instructions created after cse started.  */
6590
6591  for (insn = NEXT_INSN (loop_start);
6592       GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6593       && INSN_UID (insn) < max_insn_uid
6594       && ! (GET_CODE (insn) == NOTE
6595	     && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6596       insn = NEXT_INSN (insn))
6597    {
6598      if (INSN_P (insn)
6599	  && (GET_CODE (PATTERN (insn)) == SET
6600	      || GET_CODE (PATTERN (insn)) == CLOBBER))
6601	cse_set_around_loop (PATTERN (insn), insn, loop_start);
6602      else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6603	for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6604	  if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6605	      || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6606	    cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6607				 loop_start);
6608    }
6609}
6610
6611/* Process one SET of an insn that was skipped.  We ignore CLOBBERs
6612   since they are done elsewhere.  This function is called via note_stores.  */
6613
6614static void
6615invalidate_skipped_set (dest, set, data)
6616     rtx set;
6617     rtx dest;
6618     void *data ATTRIBUTE_UNUSED;
6619{
6620  enum rtx_code code = GET_CODE (dest);
6621
6622  if (code == MEM
6623      && ! addr_affects_sp_p (dest)	/* If this is not a stack push ...  */
6624      /* There are times when an address can appear varying and be a PLUS
6625	 during this scan when it would be a fixed address were we to know
6626	 the proper equivalences.  So invalidate all memory if there is
6627	 a BLKmode or nonscalar memory reference or a reference to a
6628	 variable address.  */
6629      && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6630	  || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6631    {
6632      invalidate_memory ();
6633      return;
6634    }
6635
6636  if (GET_CODE (set) == CLOBBER
6637#ifdef HAVE_cc0
6638      || dest == cc0_rtx
6639#endif
6640      || dest == pc_rtx)
6641    return;
6642
6643  if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6644    invalidate (XEXP (dest, 0), GET_MODE (dest));
6645  else if (code == REG || code == SUBREG || code == MEM)
6646    invalidate (dest, VOIDmode);
6647}
6648
6649/* Invalidate all insns from START up to the end of the function or the
6650   next label.  This called when we wish to CSE around a block that is
6651   conditionally executed.  */
6652
6653static void
6654invalidate_skipped_block (start)
6655     rtx start;
6656{
6657  rtx insn;
6658
6659  for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6660       insn = NEXT_INSN (insn))
6661    {
6662      if (! INSN_P (insn))
6663	continue;
6664
6665      if (GET_CODE (insn) == CALL_INSN)
6666	{
6667	  if (! CONST_OR_PURE_CALL_P (insn))
6668	    invalidate_memory ();
6669	  invalidate_for_call ();
6670	}
6671
6672      invalidate_from_clobbers (PATTERN (insn));
6673      note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6674    }
6675}
6676
6677/* If modifying X will modify the value in *DATA (which is really an
6678   `rtx *'), indicate that fact by setting the pointed to value to
6679   NULL_RTX.  */
6680
6681static void
6682cse_check_loop_start (x, set, data)
6683     rtx x;
6684     rtx set ATTRIBUTE_UNUSED;
6685     void *data;
6686{
6687  rtx *cse_check_loop_start_value = (rtx *) data;
6688
6689  if (*cse_check_loop_start_value == NULL_RTX
6690      || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6691    return;
6692
6693  if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6694      || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6695    *cse_check_loop_start_value = NULL_RTX;
6696}
6697
6698/* X is a SET or CLOBBER contained in INSN that was found near the start of
6699   a loop that starts with the label at LOOP_START.
6700
6701   If X is a SET, we see if its SET_SRC is currently in our hash table.
6702   If so, we see if it has a value equal to some register used only in the
6703   loop exit code (as marked by jump.c).
6704
6705   If those two conditions are true, we search backwards from the start of
6706   the loop to see if that same value was loaded into a register that still
6707   retains its value at the start of the loop.
6708
6709   If so, we insert an insn after the load to copy the destination of that
6710   load into the equivalent register and (try to) replace our SET_SRC with that
6711   register.
6712
6713   In any event, we invalidate whatever this SET or CLOBBER modifies.  */
6714
6715static void
6716cse_set_around_loop (x, insn, loop_start)
6717     rtx x;
6718     rtx insn;
6719     rtx loop_start;
6720{
6721  struct table_elt *src_elt;
6722
6723  /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6724     are setting PC or CC0 or whose SET_SRC is already a register.  */
6725  if (GET_CODE (x) == SET
6726      && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6727      && GET_CODE (SET_SRC (x)) != REG)
6728    {
6729      src_elt = lookup (SET_SRC (x),
6730			HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6731			GET_MODE (SET_DEST (x)));
6732
6733      if (src_elt)
6734	for (src_elt = src_elt->first_same_value; src_elt;
6735	     src_elt = src_elt->next_same_value)
6736	  if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6737	      && COST (src_elt->exp) < COST (SET_SRC (x)))
6738	    {
6739	      rtx p, set;
6740
6741	      /* Look for an insn in front of LOOP_START that sets
6742		 something in the desired mode to SET_SRC (x) before we hit
6743		 a label or CALL_INSN.  */
6744
6745	      for (p = prev_nonnote_insn (loop_start);
6746		   p && GET_CODE (p) != CALL_INSN
6747		   && GET_CODE (p) != CODE_LABEL;
6748		   p = prev_nonnote_insn  (p))
6749		if ((set = single_set (p)) != 0
6750		    && GET_CODE (SET_DEST (set)) == REG
6751		    && GET_MODE (SET_DEST (set)) == src_elt->mode
6752		    && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6753		  {
6754		    /* We now have to ensure that nothing between P
6755		       and LOOP_START modified anything referenced in
6756		       SET_SRC (x).  We know that nothing within the loop
6757		       can modify it, or we would have invalidated it in
6758		       the hash table.  */
6759		    rtx q;
6760		    rtx cse_check_loop_start_value = SET_SRC (x);
6761		    for (q = p; q != loop_start; q = NEXT_INSN (q))
6762		      if (INSN_P (q))
6763			note_stores (PATTERN (q),
6764				     cse_check_loop_start,
6765				     &cse_check_loop_start_value);
6766
6767		    /* If nothing was changed and we can replace our
6768		       SET_SRC, add an insn after P to copy its destination
6769		       to what we will be replacing SET_SRC with.  */
6770		    if (cse_check_loop_start_value
6771			&& validate_change (insn, &SET_SRC (x),
6772					    src_elt->exp, 0))
6773		      {
6774			/* If this creates new pseudos, this is unsafe,
6775			   because the regno of new pseudo is unsuitable
6776			   to index into reg_qty when cse_insn processes
6777			   the new insn.  Therefore, if a new pseudo was
6778			   created, discard this optimization.  */
6779			int nregs = max_reg_num ();
6780			rtx move
6781			  = gen_move_insn (src_elt->exp, SET_DEST (set));
6782			if (nregs != max_reg_num ())
6783			  {
6784			    if (! validate_change (insn, &SET_SRC (x),
6785						   SET_SRC (set), 0))
6786			      abort ();
6787			  }
6788			else
6789			  emit_insn_after (move, p);
6790		      }
6791		    break;
6792		  }
6793	    }
6794    }
6795
6796  /* Deal with the destination of X affecting the stack pointer.  */
6797  addr_affects_sp_p (SET_DEST (x));
6798
6799  /* See comment on similar code in cse_insn for explanation of these
6800     tests.  */
6801  if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6802      || GET_CODE (SET_DEST (x)) == MEM)
6803    invalidate (SET_DEST (x), VOIDmode);
6804  else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6805	   || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6806    invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6807}
6808
6809/* Find the end of INSN's basic block and return its range,
6810   the total number of SETs in all the insns of the block, the last insn of the
6811   block, and the branch path.
6812
6813   The branch path indicates which branches should be followed.  If a non-zero
6814   path size is specified, the block should be rescanned and a different set
6815   of branches will be taken.  The branch path is only used if
6816   FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
6817
6818   DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6819   used to describe the block.  It is filled in with the information about
6820   the current block.  The incoming structure's branch path, if any, is used
6821   to construct the output branch path.  */
6822
6823void
6824cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6825     rtx insn;
6826     struct cse_basic_block_data *data;
6827     int follow_jumps;
6828     int after_loop;
6829     int skip_blocks;
6830{
6831  rtx p = insn, q;
6832  int nsets = 0;
6833  int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6834  rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6835  int path_size = data->path_size;
6836  int path_entry = 0;
6837  int i;
6838
6839  /* Update the previous branch path, if any.  If the last branch was
6840     previously TAKEN, mark it NOT_TAKEN.  If it was previously NOT_TAKEN,
6841     shorten the path by one and look at the previous branch.  We know that
6842     at least one branch must have been taken if PATH_SIZE is non-zero.  */
6843  while (path_size > 0)
6844    {
6845      if (data->path[path_size - 1].status != NOT_TAKEN)
6846	{
6847	  data->path[path_size - 1].status = NOT_TAKEN;
6848	  break;
6849	}
6850      else
6851	path_size--;
6852    }
6853
6854  /* If the first instruction is marked with QImode, that means we've
6855     already processed this block.  Our caller will look at DATA->LAST
6856     to figure out where to go next.  We want to return the next block
6857     in the instruction stream, not some branched-to block somewhere
6858     else.  We accomplish this by pretending our called forbid us to
6859     follow jumps, or skip blocks.  */
6860  if (GET_MODE (insn) == QImode)
6861    follow_jumps = skip_blocks = 0;
6862
6863  /* Scan to end of this basic block.  */
6864  while (p && GET_CODE (p) != CODE_LABEL)
6865    {
6866      /* Don't cse out the end of a loop.  This makes a difference
6867	 only for the unusual loops that always execute at least once;
6868	 all other loops have labels there so we will stop in any case.
6869	 Cse'ing out the end of the loop is dangerous because it
6870	 might cause an invariant expression inside the loop
6871	 to be reused after the end of the loop.  This would make it
6872	 hard to move the expression out of the loop in loop.c,
6873	 especially if it is one of several equivalent expressions
6874	 and loop.c would like to eliminate it.
6875
6876	 If we are running after loop.c has finished, we can ignore
6877	 the NOTE_INSN_LOOP_END.  */
6878
6879      if (! after_loop && GET_CODE (p) == NOTE
6880	  && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6881	break;
6882
6883      /* Don't cse over a call to setjmp; on some machines (eg VAX)
6884	 the regs restored by the longjmp come from
6885	 a later time than the setjmp.  */
6886      if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6887	  && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6888	break;
6889
6890      /* A PARALLEL can have lots of SETs in it,
6891	 especially if it is really an ASM_OPERANDS.  */
6892      if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6893	nsets += XVECLEN (PATTERN (p), 0);
6894      else if (GET_CODE (p) != NOTE)
6895	nsets += 1;
6896
6897      /* Ignore insns made by CSE; they cannot affect the boundaries of
6898	 the basic block.  */
6899
6900      if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6901	high_cuid = INSN_CUID (p);
6902      if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6903	low_cuid = INSN_CUID (p);
6904
6905      /* See if this insn is in our branch path.  If it is and we are to
6906	 take it, do so.  */
6907      if (path_entry < path_size && data->path[path_entry].branch == p)
6908	{
6909	  if (data->path[path_entry].status != NOT_TAKEN)
6910	    p = JUMP_LABEL (p);
6911
6912	  /* Point to next entry in path, if any.  */
6913	  path_entry++;
6914	}
6915
6916      /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6917	 was specified, we haven't reached our maximum path length, there are
6918	 insns following the target of the jump, this is the only use of the
6919	 jump label, and the target label is preceded by a BARRIER.
6920
6921	 Alternatively, we can follow the jump if it branches around a
6922	 block of code and there are no other branches into the block.
6923	 In this case invalidate_skipped_block will be called to invalidate any
6924	 registers set in the block when following the jump.  */
6925
6926      else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6927	       && GET_CODE (p) == JUMP_INSN
6928	       && GET_CODE (PATTERN (p)) == SET
6929	       && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6930	       && JUMP_LABEL (p) != 0
6931	       && LABEL_NUSES (JUMP_LABEL (p)) == 1
6932	       && NEXT_INSN (JUMP_LABEL (p)) != 0)
6933	{
6934	  for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6935	    if ((GET_CODE (q) != NOTE
6936		 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6937		 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
6938		     && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6939		&& (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6940	      break;
6941
6942	  /* If we ran into a BARRIER, this code is an extension of the
6943	     basic block when the branch is taken.  */
6944	  if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6945	    {
6946	      /* Don't allow ourself to keep walking around an
6947		 always-executed loop.  */
6948	      if (next_real_insn (q) == next)
6949		{
6950		  p = NEXT_INSN (p);
6951		  continue;
6952		}
6953
6954	      /* Similarly, don't put a branch in our path more than once.  */
6955	      for (i = 0; i < path_entry; i++)
6956		if (data->path[i].branch == p)
6957		  break;
6958
6959	      if (i != path_entry)
6960		break;
6961
6962	      data->path[path_entry].branch = p;
6963	      data->path[path_entry++].status = TAKEN;
6964
6965	      /* This branch now ends our path.  It was possible that we
6966		 didn't see this branch the last time around (when the
6967		 insn in front of the target was a JUMP_INSN that was
6968		 turned into a no-op).  */
6969	      path_size = path_entry;
6970
6971	      p = JUMP_LABEL (p);
6972	      /* Mark block so we won't scan it again later.  */
6973	      PUT_MODE (NEXT_INSN (p), QImode);
6974	    }
6975	  /* Detect a branch around a block of code.  */
6976	  else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6977	    {
6978	      rtx tmp;
6979
6980	      if (next_real_insn (q) == next)
6981		{
6982		  p = NEXT_INSN (p);
6983		  continue;
6984		}
6985
6986	      for (i = 0; i < path_entry; i++)
6987		if (data->path[i].branch == p)
6988		  break;
6989
6990	      if (i != path_entry)
6991		break;
6992
6993	      /* This is no_labels_between_p (p, q) with an added check for
6994		 reaching the end of a function (in case Q precedes P).  */
6995	      for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6996		if (GET_CODE (tmp) == CODE_LABEL)
6997		  break;
6998
6999	      if (tmp == q)
7000		{
7001		  data->path[path_entry].branch = p;
7002		  data->path[path_entry++].status = AROUND;
7003
7004		  path_size = path_entry;
7005
7006		  p = JUMP_LABEL (p);
7007		  /* Mark block so we won't scan it again later.  */
7008		  PUT_MODE (NEXT_INSN (p), QImode);
7009		}
7010	    }
7011	}
7012      p = NEXT_INSN (p);
7013    }
7014
7015  data->low_cuid = low_cuid;
7016  data->high_cuid = high_cuid;
7017  data->nsets = nsets;
7018  data->last = p;
7019
7020  /* If all jumps in the path are not taken, set our path length to zero
7021     so a rescan won't be done.  */
7022  for (i = path_size - 1; i >= 0; i--)
7023    if (data->path[i].status != NOT_TAKEN)
7024      break;
7025
7026  if (i == -1)
7027    data->path_size = 0;
7028  else
7029    data->path_size = path_size;
7030
7031  /* End the current branch path.  */
7032  data->path[path_size].branch = 0;
7033}
7034
7035/* Perform cse on the instructions of a function.
7036   F is the first instruction.
7037   NREGS is one plus the highest pseudo-reg number used in the instruction.
7038
7039   AFTER_LOOP is 1 if this is the cse call done after loop optimization
7040   (only if -frerun-cse-after-loop).
7041
7042   Returns 1 if jump_optimize should be redone due to simplifications
7043   in conditional jump instructions.  */
7044
7045int
7046cse_main (f, nregs, after_loop, file)
7047     rtx f;
7048     int nregs;
7049     int after_loop;
7050     FILE *file;
7051{
7052  struct cse_basic_block_data val;
7053  rtx insn = f;
7054  int i;
7055
7056  cse_jumps_altered = 0;
7057  recorded_label_ref = 0;
7058  constant_pool_entries_cost = 0;
7059  val.path_size = 0;
7060
7061  init_recog ();
7062  init_alias_analysis ();
7063
7064  max_reg = nregs;
7065
7066  max_insn_uid = get_max_uid ();
7067
7068  reg_eqv_table = (struct reg_eqv_elem *)
7069    xmalloc (nregs * sizeof (struct reg_eqv_elem));
7070
7071#ifdef LOAD_EXTEND_OP
7072
7073  /* Allocate scratch rtl here.  cse_insn will fill in the memory reference
7074     and change the code and mode as appropriate.  */
7075  memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7076#endif
7077
7078  /* Reset the counter indicating how many elements have been made
7079     thus far.  */
7080  n_elements_made = 0;
7081
7082  /* Find the largest uid.  */
7083
7084  max_uid = get_max_uid ();
7085  uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
7086
7087  /* Compute the mapping from uids to cuids.
7088     CUIDs are numbers assigned to insns, like uids,
7089     except that cuids increase monotonically through the code.
7090     Don't assign cuids to line-number NOTEs, so that the distance in cuids
7091     between two insns is not affected by -g.  */
7092
7093  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7094    {
7095      if (GET_CODE (insn) != NOTE
7096	  || NOTE_LINE_NUMBER (insn) < 0)
7097	INSN_CUID (insn) = ++i;
7098      else
7099	/* Give a line number note the same cuid as preceding insn.  */
7100	INSN_CUID (insn) = i;
7101    }
7102
7103  ggc_push_context ();
7104
7105  /* Loop over basic blocks.
7106     Compute the maximum number of qty's needed for each basic block
7107     (which is 2 for each SET).  */
7108  insn = f;
7109  while (insn)
7110    {
7111      cse_altered = 0;
7112      cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7113			      flag_cse_skip_blocks);
7114
7115      /* If this basic block was already processed or has no sets, skip it.  */
7116      if (val.nsets == 0 || GET_MODE (insn) == QImode)
7117	{
7118	  PUT_MODE (insn, VOIDmode);
7119	  insn = (val.last ? NEXT_INSN (val.last) : 0);
7120	  val.path_size = 0;
7121	  continue;
7122	}
7123
7124      cse_basic_block_start = val.low_cuid;
7125      cse_basic_block_end = val.high_cuid;
7126      max_qty = val.nsets * 2;
7127
7128      if (file)
7129	fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7130		 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7131		 val.nsets);
7132
7133      /* Make MAX_QTY bigger to give us room to optimize
7134	 past the end of this basic block, if that should prove useful.  */
7135      if (max_qty < 500)
7136	max_qty = 500;
7137
7138      max_qty += max_reg;
7139
7140      /* If this basic block is being extended by following certain jumps,
7141         (see `cse_end_of_basic_block'), we reprocess the code from the start.
7142         Otherwise, we start after this basic block.  */
7143      if (val.path_size > 0)
7144	cse_basic_block (insn, val.last, val.path, 0);
7145      else
7146	{
7147	  int old_cse_jumps_altered = cse_jumps_altered;
7148	  rtx temp;
7149
7150	  /* When cse changes a conditional jump to an unconditional
7151	     jump, we want to reprocess the block, since it will give
7152	     us a new branch path to investigate.  */
7153	  cse_jumps_altered = 0;
7154	  temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7155	  if (cse_jumps_altered == 0
7156	      || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7157	    insn = temp;
7158
7159	  cse_jumps_altered |= old_cse_jumps_altered;
7160	}
7161
7162      if (cse_altered)
7163	ggc_collect ();
7164
7165#ifdef USE_C_ALLOCA
7166      alloca (0);
7167#endif
7168    }
7169
7170  ggc_pop_context ();
7171
7172  if (max_elements_made < n_elements_made)
7173    max_elements_made = n_elements_made;
7174
7175  /* Clean up.  */
7176  end_alias_analysis ();
7177  free (uid_cuid);
7178  free (reg_eqv_table);
7179
7180  return cse_jumps_altered || recorded_label_ref;
7181}
7182
7183/* Process a single basic block.  FROM and TO and the limits of the basic
7184   block.  NEXT_BRANCH points to the branch path when following jumps or
7185   a null path when not following jumps.
7186
7187   AROUND_LOOP is non-zero if we are to try to cse around to the start of a
7188   loop.  This is true when we are being called for the last time on a
7189   block and this CSE pass is before loop.c.  */
7190
7191static rtx
7192cse_basic_block (from, to, next_branch, around_loop)
7193     rtx from, to;
7194     struct branch_path *next_branch;
7195     int around_loop;
7196{
7197  rtx insn;
7198  int to_usage = 0;
7199  rtx libcall_insn = NULL_RTX;
7200  int num_insns = 0;
7201
7202  /* This array is undefined before max_reg, so only allocate
7203     the space actually needed and adjust the start.  */
7204
7205  qty_table
7206    = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
7207					 * sizeof (struct qty_table_elem));
7208  qty_table -= max_reg;
7209
7210  new_basic_block ();
7211
7212  /* TO might be a label.  If so, protect it from being deleted.  */
7213  if (to != 0 && GET_CODE (to) == CODE_LABEL)
7214    ++LABEL_NUSES (to);
7215
7216  for (insn = from; insn != to; insn = NEXT_INSN (insn))
7217    {
7218      enum rtx_code code = GET_CODE (insn);
7219
7220      /* If we have processed 1,000 insns, flush the hash table to
7221	 avoid extreme quadratic behavior.  We must not include NOTEs
7222	 in the count since there may be more of them when generating
7223	 debugging information.  If we clear the table at different
7224	 times, code generated with -g -O might be different than code
7225	 generated with -O but not -g.
7226
7227	 ??? This is a real kludge and needs to be done some other way.
7228	 Perhaps for 2.9.  */
7229      if (code != NOTE && num_insns++ > 1000)
7230	{
7231	  flush_hash_table ();
7232	  num_insns = 0;
7233	}
7234
7235      /* See if this is a branch that is part of the path.  If so, and it is
7236	 to be taken, do so.  */
7237      if (next_branch->branch == insn)
7238	{
7239	  enum taken status = next_branch++->status;
7240	  if (status != NOT_TAKEN)
7241	    {
7242	      if (status == TAKEN)
7243		record_jump_equiv (insn, 1);
7244	      else
7245		invalidate_skipped_block (NEXT_INSN (insn));
7246
7247	      /* Set the last insn as the jump insn; it doesn't affect cc0.
7248		 Then follow this branch.  */
7249#ifdef HAVE_cc0
7250	      prev_insn_cc0 = 0;
7251#endif
7252	      prev_insn = insn;
7253	      insn = JUMP_LABEL (insn);
7254	      continue;
7255	    }
7256	}
7257
7258      if (GET_MODE (insn) == QImode)
7259	PUT_MODE (insn, VOIDmode);
7260
7261      if (GET_RTX_CLASS (code) == 'i')
7262	{
7263	  rtx p;
7264
7265	  /* Process notes first so we have all notes in canonical forms when
7266	     looking for duplicate operations.  */
7267
7268	  if (REG_NOTES (insn))
7269	    REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7270
7271	  /* Track when we are inside in LIBCALL block.  Inside such a block,
7272	     we do not want to record destinations.  The last insn of a
7273	     LIBCALL block is not considered to be part of the block, since
7274	     its destination is the result of the block and hence should be
7275	     recorded.  */
7276
7277	  if (REG_NOTES (insn) != 0)
7278	    {
7279	      if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7280		libcall_insn = XEXP (p, 0);
7281	      else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7282		libcall_insn = 0;
7283	    }
7284
7285	  cse_insn (insn, libcall_insn);
7286
7287	  /* If we haven't already found an insn where we added a LABEL_REF,
7288	     check this one.  */
7289	  if (GET_CODE (insn) == INSN && ! recorded_label_ref
7290	      && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7291			       (void *) insn))
7292	    recorded_label_ref = 1;
7293	}
7294
7295      /* If INSN is now an unconditional jump, skip to the end of our
7296	 basic block by pretending that we just did the last insn in the
7297	 basic block.  If we are jumping to the end of our block, show
7298	 that we can have one usage of TO.  */
7299
7300      if (any_uncondjump_p (insn))
7301	{
7302	  if (to == 0)
7303	    {
7304	      free (qty_table + max_reg);
7305	      return 0;
7306	    }
7307
7308	  if (JUMP_LABEL (insn) == to)
7309	    to_usage = 1;
7310
7311	  /* Maybe TO was deleted because the jump is unconditional.
7312	     If so, there is nothing left in this basic block.  */
7313	  /* ??? Perhaps it would be smarter to set TO
7314	     to whatever follows this insn,
7315	     and pretend the basic block had always ended here.  */
7316	  if (INSN_DELETED_P (to))
7317	    break;
7318
7319	  insn = PREV_INSN (to);
7320	}
7321
7322      /* See if it is ok to keep on going past the label
7323	 which used to end our basic block.  Remember that we incremented
7324	 the count of that label, so we decrement it here.  If we made
7325	 a jump unconditional, TO_USAGE will be one; in that case, we don't
7326	 want to count the use in that jump.  */
7327
7328      if (to != 0 && NEXT_INSN (insn) == to
7329	  && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7330	{
7331	  struct cse_basic_block_data val;
7332	  rtx prev;
7333
7334	  insn = NEXT_INSN (to);
7335
7336	  /* If TO was the last insn in the function, we are done.  */
7337	  if (insn == 0)
7338	    {
7339	      free (qty_table + max_reg);
7340	      return 0;
7341	    }
7342
7343	  /* If TO was preceded by a BARRIER we are done with this block
7344	     because it has no continuation.  */
7345	  prev = prev_nonnote_insn (to);
7346	  if (prev && GET_CODE (prev) == BARRIER)
7347	    {
7348	      free (qty_table + max_reg);
7349	      return insn;
7350	    }
7351
7352	  /* Find the end of the following block.  Note that we won't be
7353	     following branches in this case.  */
7354	  to_usage = 0;
7355	  val.path_size = 0;
7356	  cse_end_of_basic_block (insn, &val, 0, 0, 0);
7357
7358	  /* If the tables we allocated have enough space left
7359	     to handle all the SETs in the next basic block,
7360	     continue through it.  Otherwise, return,
7361	     and that block will be scanned individually.  */
7362	  if (val.nsets * 2 + next_qty > max_qty)
7363	    break;
7364
7365	  cse_basic_block_start = val.low_cuid;
7366	  cse_basic_block_end = val.high_cuid;
7367	  to = val.last;
7368
7369	  /* Prevent TO from being deleted if it is a label.  */
7370	  if (to != 0 && GET_CODE (to) == CODE_LABEL)
7371	    ++LABEL_NUSES (to);
7372
7373	  /* Back up so we process the first insn in the extension.  */
7374	  insn = PREV_INSN (insn);
7375	}
7376    }
7377
7378  if (next_qty > max_qty)
7379    abort ();
7380
7381  /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7382     the previous insn is the only insn that branches to the head of a loop,
7383     we can cse into the loop.  Don't do this if we changed the jump
7384     structure of a loop unless we aren't going to be following jumps.  */
7385
7386  insn = prev_nonnote_insn(to);
7387  if ((cse_jumps_altered == 0
7388       || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7389      && around_loop && to != 0
7390      && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7391      && GET_CODE (insn) == JUMP_INSN
7392      && JUMP_LABEL (insn) != 0
7393      && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7394    cse_around_loop (JUMP_LABEL (insn));
7395
7396  free (qty_table + max_reg);
7397
7398  return to ? NEXT_INSN (to) : 0;
7399}
7400
7401/* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7402   there isn't a REG_LABEL note.  Return one if so.  DATA is the insn.  */
7403
7404static int
7405check_for_label_ref (rtl, data)
7406     rtx *rtl;
7407     void *data;
7408{
7409  rtx insn = (rtx) data;
7410
7411  /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7412     we must rerun jump since it needs to place the note.  If this is a
7413     LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7414     since no REG_LABEL will be added.  */
7415  return (GET_CODE (*rtl) == LABEL_REF
7416	  && ! LABEL_REF_NONLOCAL_P (*rtl)
7417	  && LABEL_P (XEXP (*rtl, 0))
7418	  && INSN_UID (XEXP (*rtl, 0)) != 0
7419	  && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7420}
7421
7422/* Count the number of times registers are used (not set) in X.
7423   COUNTS is an array in which we accumulate the count, INCR is how much
7424   we count each register usage.
7425
7426   Don't count a usage of DEST, which is the SET_DEST of a SET which
7427   contains X in its SET_SRC.  This is because such a SET does not
7428   modify the liveness of DEST.  */
7429
7430static void
7431count_reg_usage (x, counts, dest, incr)
7432     rtx x;
7433     int *counts;
7434     rtx dest;
7435     int incr;
7436{
7437  enum rtx_code code;
7438  const char *fmt;
7439  int i, j;
7440
7441  if (x == 0)
7442    return;
7443
7444  switch (code = GET_CODE (x))
7445    {
7446    case REG:
7447      if (x != dest)
7448	counts[REGNO (x)] += incr;
7449      return;
7450
7451    case PC:
7452    case CC0:
7453    case CONST:
7454    case CONST_INT:
7455    case CONST_DOUBLE:
7456    case CONST_VECTOR:
7457    case SYMBOL_REF:
7458    case LABEL_REF:
7459      return;
7460
7461    case CLOBBER:
7462      /* If we are clobbering a MEM, mark any registers inside the address
7463         as being used.  */
7464      if (GET_CODE (XEXP (x, 0)) == MEM)
7465	count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7466      return;
7467
7468    case SET:
7469      /* Unless we are setting a REG, count everything in SET_DEST.  */
7470      if (GET_CODE (SET_DEST (x)) != REG)
7471	count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7472
7473      /* If SRC has side-effects, then we can't delete this insn, so the
7474	 usage of SET_DEST inside SRC counts.
7475
7476	 ??? Strictly-speaking, we might be preserving this insn
7477	 because some other SET has side-effects, but that's hard
7478	 to do and can't happen now.  */
7479      count_reg_usage (SET_SRC (x), counts,
7480		       side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7481		       incr);
7482      return;
7483
7484    case CALL_INSN:
7485      count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7486      /* Fall through.  */
7487
7488    case INSN:
7489    case JUMP_INSN:
7490      count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7491
7492      /* Things used in a REG_EQUAL note aren't dead since loop may try to
7493	 use them.  */
7494
7495      count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7496      return;
7497
7498    case EXPR_LIST:
7499    case INSN_LIST:
7500      if (REG_NOTE_KIND (x) == REG_EQUAL
7501	  || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
7502	count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7503      count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7504      return;
7505
7506    default:
7507      break;
7508    }
7509
7510  fmt = GET_RTX_FORMAT (code);
7511  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7512    {
7513      if (fmt[i] == 'e')
7514	count_reg_usage (XEXP (x, i), counts, dest, incr);
7515      else if (fmt[i] == 'E')
7516	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7517	  count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7518    }
7519}
7520
7521/* Return true if set is live.  */
7522static bool
7523set_live_p (set, insn, counts)
7524     rtx set;
7525     rtx insn ATTRIBUTE_UNUSED;	/* Only used with HAVE_cc0.  */
7526     int *counts;
7527{
7528#ifdef HAVE_cc0
7529  rtx tem;
7530#endif
7531
7532  if (set_noop_p (set))
7533    ;
7534
7535#ifdef HAVE_cc0
7536  else if (GET_CODE (SET_DEST (set)) == CC0
7537	   && !side_effects_p (SET_SRC (set))
7538	   && ((tem = next_nonnote_insn (insn)) == 0
7539	       || !INSN_P (tem)
7540	       || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7541    return false;
7542#endif
7543  else if (GET_CODE (SET_DEST (set)) != REG
7544	   || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7545	   || counts[REGNO (SET_DEST (set))] != 0
7546	   || side_effects_p (SET_SRC (set))
7547	   /* An ADDRESSOF expression can turn into a use of the
7548	      internal arg pointer, so always consider the
7549	      internal arg pointer live.  If it is truly dead,
7550	      flow will delete the initializing insn.  */
7551	   || (SET_DEST (set) == current_function_internal_arg_pointer))
7552    return true;
7553  return false;
7554}
7555
7556/* Return true if insn is live.  */
7557
7558static bool
7559insn_live_p (insn, counts)
7560     rtx insn;
7561     int *counts;
7562{
7563  int i;
7564  if (GET_CODE (PATTERN (insn)) == SET)
7565    return set_live_p (PATTERN (insn), insn, counts);
7566  else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7567    {
7568      for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7569	{
7570	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
7571
7572	  if (GET_CODE (elt) == SET)
7573	    {
7574	      if (set_live_p (elt, insn, counts))
7575		return true;
7576	    }
7577	  else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7578	    return true;
7579	}
7580      return false;
7581    }
7582  else
7583    return true;
7584}
7585
7586/* Return true if libcall is dead as a whole.  */
7587
7588static bool
7589dead_libcall_p (insn)
7590     rtx insn;
7591{
7592  rtx note;
7593  /* See if there's a REG_EQUAL note on this insn and try to
7594     replace the source with the REG_EQUAL expression.
7595
7596     We assume that insns with REG_RETVALs can only be reg->reg
7597     copies at this point.  */
7598  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7599  if (note)
7600    {
7601      rtx set = single_set (insn);
7602      rtx new = simplify_rtx (XEXP (note, 0));
7603
7604      if (!new)
7605	new = XEXP (note, 0);
7606
7607      if (set && validate_change (insn, &SET_SRC (set), new, 0))
7608	{
7609	  remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7610	  return true;
7611	}
7612    }
7613  return false;
7614}
7615
7616/* Scan all the insns and delete any that are dead; i.e., they store a register
7617   that is never used or they copy a register to itself.
7618
7619   This is used to remove insns made obviously dead by cse, loop or other
7620   optimizations.  It improves the heuristics in loop since it won't try to
7621   move dead invariants out of loops or make givs for dead quantities.  The
7622   remaining passes of the compilation are also sped up.  */
7623
7624void
7625delete_trivially_dead_insns (insns, nreg, preserve_basic_blocks)
7626     rtx insns;
7627     int nreg;
7628     int preserve_basic_blocks;
7629{
7630  int *counts;
7631  rtx insn, prev;
7632  int i;
7633  int in_libcall = 0, dead_libcall = 0;
7634  basic_block bb;
7635
7636  /* First count the number of times each register is used.  */
7637  counts = (int *) xcalloc (nreg, sizeof (int));
7638  for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7639    count_reg_usage (insn, counts, NULL_RTX, 1);
7640
7641  /* Go from the last insn to the first and delete insns that only set unused
7642     registers or copy a register to itself.  As we delete an insn, remove
7643     usage counts for registers it uses.
7644
7645     The first jump optimization pass may leave a real insn as the last
7646     insn in the function.   We must not skip that insn or we may end
7647     up deleting code that is not really dead.  */
7648  insn = get_last_insn ();
7649  if (! INSN_P (insn))
7650    insn = prev_real_insn (insn);
7651
7652  if (!preserve_basic_blocks)
7653    for (; insn; insn = prev)
7654      {
7655	int live_insn = 0;
7656
7657	prev = prev_real_insn (insn);
7658
7659	/* Don't delete any insns that are part of a libcall block unless
7660	   we can delete the whole libcall block.
7661
7662	   Flow or loop might get confused if we did that.  Remember
7663	   that we are scanning backwards.  */
7664	if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7665	  {
7666	    in_libcall = 1;
7667	    live_insn = 1;
7668	    dead_libcall = dead_libcall_p (insn);
7669	  }
7670	else if (in_libcall)
7671	  live_insn = ! dead_libcall;
7672	else
7673	  live_insn = insn_live_p (insn, counts);
7674
7675	/* If this is a dead insn, delete it and show registers in it aren't
7676	   being used.  */
7677
7678	if (! live_insn)
7679	  {
7680	    count_reg_usage (insn, counts, NULL_RTX, -1);
7681	    delete_related_insns (insn);
7682	  }
7683
7684	if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7685	  {
7686	    in_libcall = 0;
7687	    dead_libcall = 0;
7688	  }
7689      }
7690  else
7691    for (i = 0; i < n_basic_blocks; i++)
7692      for (bb = BASIC_BLOCK (i), insn = bb->end; insn != bb->head; insn = prev)
7693	{
7694	  int live_insn = 0;
7695
7696	  prev = PREV_INSN (insn);
7697	  if (!INSN_P (insn))
7698	    continue;
7699
7700	  /* Don't delete any insns that are part of a libcall block unless
7701	     we can delete the whole libcall block.
7702
7703	     Flow or loop might get confused if we did that.  Remember
7704	     that we are scanning backwards.  */
7705	  if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7706	    {
7707	      in_libcall = 1;
7708	      live_insn = 1;
7709	      dead_libcall = dead_libcall_p (insn);
7710	    }
7711	  else if (in_libcall)
7712	    live_insn = ! dead_libcall;
7713	  else
7714	    live_insn = insn_live_p (insn, counts);
7715
7716	  /* If this is a dead insn, delete it and show registers in it aren't
7717	     being used.  */
7718
7719	  if (! live_insn)
7720	    {
7721	      count_reg_usage (insn, counts, NULL_RTX, -1);
7722	      delete_insn (insn);
7723	    }
7724
7725	  if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7726	    {
7727	      in_libcall = 0;
7728	      dead_libcall = 0;
7729	    }
7730	}
7731
7732  /* Clean up.  */
7733  free (counts);
7734}
7735