combine.c revision 70635
1/* Optimize by combining instructions for GNU compiler.
2   Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING.  If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA.  */
20
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23   Portable Optimizer, but redone to work on our list-structured
24   representation for RTL instead of their string representation.
25
26   The LOG_LINKS of each insn identify the most recent assignment
27   to each REG used in the insn.  It is a list of previous insns,
28   each of which contains a SET for a REG that is used in this insn
29   and not used or set in between.  LOG_LINKs never cross basic blocks.
30   They were set up by the preceding pass (lifetime analysis).
31
32   We try to combine each pair of insns joined by a logical link.
33   We also try to combine triples of insns A, B and C when
34   C has a link back to B and B has a link back to A.
35
36   LOG_LINKS does not have links for use of the CC0.  They don't
37   need to, because the insn that sets the CC0 is always immediately
38   before the insn that tests it.  So we always regard a branch
39   insn as having a logical link to the preceding insn.  The same is true
40   for an insn explicitly using CC0.
41
42   We check (with use_crosses_set_p) to avoid combining in such a way
43   as to move a computation to a place where its value would be different.
44
45   Combination is done by mathematically substituting the previous
46   insn(s) values for the regs they set into the expressions in
47   the later insns that refer to these regs.  If the result is a valid insn
48   for our target machine, according to the machine description,
49   we install it, delete the earlier insns, and update the data flow
50   information (LOG_LINKS and REG_NOTES) for what we did.
51
52   There are a few exceptions where the dataflow information created by
53   flow.c aren't completely updated:
54
55   - reg_live_length is not updated
56   - reg_n_refs is not adjusted in the rare case when a register is
57     no longer required in a computation
58   - there are extremely rare cases (see distribute_regnotes) when a
59     REG_DEAD note is lost
60   - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61     removed because there is no way to know which register it was
62     linking
63
64   To simplify substitution, we combine only when the earlier insn(s)
65   consist of only a single assignment.  To simplify updating afterward,
66   we never combine when a subroutine call appears in the middle.
67
68   Since we do not represent assignments to CC0 explicitly except when that
69   is all an insn does, there is no LOG_LINKS entry in an insn that uses
70   the condition code for the insn that set the condition code.
71   Fortunately, these two insns must be consecutive.
72   Therefore, every JUMP_INSN is taken to have an implicit logical link
73   to the preceding insn.  This is not quite right, since non-jumps can
74   also use the condition code; but in practice such insns would not
75   combine anyway.  */
76
77#include "config.h"
78#include "system.h"
79#include "rtl.h" /* stdio.h must precede rtl.h for FFS.  */
80#include "flags.h"
81#include "regs.h"
82#include "hard-reg-set.h"
83#include "basic-block.h"
84#include "insn-config.h"
85/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
86#include "expr.h"
87#include "insn-flags.h"
88#include "insn-codes.h"
89#include "insn-attr.h"
90#include "recog.h"
91#include "real.h"
92#include "toplev.h"
93
94/* It is not safe to use ordinary gen_lowpart in combine.
95   Use gen_lowpart_for_combine instead.  See comments there.  */
96#define gen_lowpart dont_use_gen_lowpart_you_dummy
97
98/* Number of attempts to combine instructions in this function.  */
99
100static int combine_attempts;
101
102/* Number of attempts that got as far as substitution in this function.  */
103
104static int combine_merges;
105
106/* Number of instructions combined with added SETs in this function.  */
107
108static int combine_extras;
109
110/* Number of instructions combined in this function.  */
111
112static int combine_successes;
113
114/* Totals over entire compilation.  */
115
116static int total_attempts, total_merges, total_extras, total_successes;
117
118/* Define a default value for REVERSIBLE_CC_MODE.
119   We can never assume that a condition code mode is safe to reverse unless
120   the md tells us so.  */
121#ifndef REVERSIBLE_CC_MODE
122#define REVERSIBLE_CC_MODE(MODE) 0
123#endif
124
125/* Vector mapping INSN_UIDs to cuids.
126   The cuids are like uids but increase monotonically always.
127   Combine always uses cuids so that it can compare them.
128   But actually renumbering the uids, which we used to do,
129   proves to be a bad idea because it makes it hard to compare
130   the dumps produced by earlier passes with those from later passes.  */
131
132static int *uid_cuid;
133static int max_uid_cuid;
134
135/* Get the cuid of an insn.  */
136
137#define INSN_CUID(INSN) \
138(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
139
140/* Maximum register number, which is the size of the tables below.  */
141
142static int combine_max_regno;
143
144/* Record last point of death of (hard or pseudo) register n.  */
145
146static rtx *reg_last_death;
147
148/* Record last point of modification of (hard or pseudo) register n.  */
149
150static rtx *reg_last_set;
151
152/* Record the cuid of the last insn that invalidated memory
153   (anything that writes memory, and subroutine calls, but not pushes).  */
154
155static int mem_last_set;
156
157/* Record the cuid of the last CALL_INSN
158   so we can tell whether a potential combination crosses any calls.  */
159
160static int last_call_cuid;
161
162/* When `subst' is called, this is the insn that is being modified
163   (by combining in a previous insn).  The PATTERN of this insn
164   is still the old pattern partially modified and it should not be
165   looked at, but this may be used to examine the successors of the insn
166   to judge whether a simplification is valid.  */
167
168static rtx subst_insn;
169
170/* This is an insn that belongs before subst_insn, but is not currently
171   on the insn chain.  */
172
173static rtx subst_prev_insn;
174
175/* This is the lowest CUID that `subst' is currently dealing with.
176   get_last_value will not return a value if the register was set at or
177   after this CUID.  If not for this mechanism, we could get confused if
178   I2 or I1 in try_combine were an insn that used the old value of a register
179   to obtain a new value.  In that case, we might erroneously get the
180   new value of the register when we wanted the old one.  */
181
182static int subst_low_cuid;
183
184/* This contains any hard registers that are used in newpat; reg_dead_at_p
185   must consider all these registers to be always live.  */
186
187static HARD_REG_SET newpat_used_regs;
188
189/* This is an insn to which a LOG_LINKS entry has been added.  If this
190   insn is the earlier than I2 or I3, combine should rescan starting at
191   that location.  */
192
193static rtx added_links_insn;
194
195/* Basic block number of the block in which we are performing combines.  */
196static int this_basic_block;
197
198/* The next group of arrays allows the recording of the last value assigned
199   to (hard or pseudo) register n.  We use this information to see if a
200   operation being processed is redundant given a prior operation performed
201   on the register.  For example, an `and' with a constant is redundant if
202   all the zero bits are already known to be turned off.
203
204   We use an approach similar to that used by cse, but change it in the
205   following ways:
206
207   (1) We do not want to reinitialize at each label.
208   (2) It is useful, but not critical, to know the actual value assigned
209       to a register.  Often just its form is helpful.
210
211   Therefore, we maintain the following arrays:
212
213   reg_last_set_value		the last value assigned
214   reg_last_set_label		records the value of label_tick when the
215				register was assigned
216   reg_last_set_table_tick	records the value of label_tick when a
217				value using the register is assigned
218   reg_last_set_invalid		set to non-zero when it is not valid
219				to use the value of this register in some
220				register's value
221
222   To understand the usage of these tables, it is important to understand
223   the distinction between the value in reg_last_set_value being valid
224   and the register being validly contained in some other expression in the
225   table.
226
227   Entry I in reg_last_set_value is valid if it is non-zero, and either
228   reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
229
230   Register I may validly appear in any expression returned for the value
231   of another register if reg_n_sets[i] is 1.  It may also appear in the
232   value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
233   reg_last_set_invalid[j] is zero.
234
235   If an expression is found in the table containing a register which may
236   not validly appear in an expression, the register is replaced by
237   something that won't match, (clobber (const_int 0)).
238
239   reg_last_set_invalid[i] is set non-zero when register I is being assigned
240   to and reg_last_set_table_tick[i] == label_tick.  */
241
242/* Record last value assigned to (hard or pseudo) register n.  */
243
244static rtx *reg_last_set_value;
245
246/* Record the value of label_tick when the value for register n is placed in
247   reg_last_set_value[n].  */
248
249static int *reg_last_set_label;
250
251/* Record the value of label_tick when an expression involving register n
252   is placed in reg_last_set_value.  */
253
254static int *reg_last_set_table_tick;
255
256/* Set non-zero if references to register n in expressions should not be
257   used.  */
258
259static char *reg_last_set_invalid;
260
261/* Incremented for each label.  */
262
263static int label_tick;
264
265/* Some registers that are set more than once and used in more than one
266   basic block are nevertheless always set in similar ways.  For example,
267   a QImode register may be loaded from memory in two places on a machine
268   where byte loads zero extend.
269
270   We record in the following array what we know about the nonzero
271   bits of a register, specifically which bits are known to be zero.
272
273   If an entry is zero, it means that we don't know anything special.  */
274
275static unsigned HOST_WIDE_INT *reg_nonzero_bits;
276
277/* Mode used to compute significance in reg_nonzero_bits.  It is the largest
278   integer mode that can fit in HOST_BITS_PER_WIDE_INT.  */
279
280static enum machine_mode nonzero_bits_mode;
281
282/* Nonzero if we know that a register has some leading bits that are always
283   equal to the sign bit.  */
284
285static char *reg_sign_bit_copies;
286
287/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
288   It is zero while computing them and after combine has completed.  This
289   former test prevents propagating values based on previously set values,
290   which can be incorrect if a variable is modified in a loop.  */
291
292static int nonzero_sign_valid;
293
294/* These arrays are maintained in parallel with reg_last_set_value
295   and are used to store the mode in which the register was last set,
296   the bits that were known to be zero when it was last set, and the
297   number of sign bits copies it was known to have when it was last set.  */
298
299static enum machine_mode *reg_last_set_mode;
300static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
301static char *reg_last_set_sign_bit_copies;
302
303/* Record one modification to rtl structure
304   to be undone by storing old_contents into *where.
305   is_int is 1 if the contents are an int.  */
306
307struct undo
308{
309  struct undo *next;
310  int is_int;
311  union {rtx r; int i;} old_contents;
312  union {rtx *r; int *i;} where;
313};
314
315/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
316   num_undo says how many are currently recorded.
317
318   storage is nonzero if we must undo the allocation of new storage.
319   The value of storage is what to pass to obfree.
320
321   other_insn is nonzero if we have modified some other insn in the process
322   of working on subst_insn.  It must be verified too.
323
324   previous_undos is the value of undobuf.undos when we started processing
325   this substitution.  This will prevent gen_rtx_combine from re-used a piece
326   from the previous expression.  Doing so can produce circular rtl
327   structures.  */
328
329struct undobuf
330{
331  char *storage;
332  struct undo *undos;
333  struct undo *frees;
334  struct undo *previous_undos;
335  rtx other_insn;
336};
337
338static struct undobuf undobuf;
339
340/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
341   insn.  The substitution can be undone by undo_all.  If INTO is already
342   set to NEWVAL, do not record this change.  Because computing NEWVAL might
343   also call SUBST, we have to compute it before we put anything into
344   the undo table.  */
345
346#define SUBST(INTO, NEWVAL)  \
347 do { rtx _new = (NEWVAL);					\
348      struct undo *_buf;					\
349								\
350      if (undobuf.frees)					\
351	_buf = undobuf.frees, undobuf.frees = _buf->next;	\
352      else							\
353	_buf = (struct undo *) xmalloc (sizeof (struct undo));	\
354								\
355      _buf->is_int = 0;						\
356      _buf->where.r = &INTO;					\
357      _buf->old_contents.r = INTO;				\
358      INTO = _new;						\
359      if (_buf->old_contents.r == INTO)				\
360	_buf->next = undobuf.frees, undobuf.frees = _buf;	\
361      else							\
362	_buf->next = undobuf.undos, undobuf.undos = _buf;	\
363    } while (0)
364
365/* Similar to SUBST, but NEWVAL is an int expression.  Note that substitution
366   for the value of a HOST_WIDE_INT value (including CONST_INT) is
367   not safe.  */
368
369#define SUBST_INT(INTO, NEWVAL)  \
370 do { struct undo *_buf;					\
371								\
372      if (undobuf.frees)					\
373	_buf = undobuf.frees, undobuf.frees = _buf->next;	\
374      else							\
375	_buf = (struct undo *) xmalloc (sizeof (struct undo));	\
376								\
377      _buf->is_int = 1;						\
378      _buf->where.i = (int *) &INTO;				\
379      _buf->old_contents.i = INTO;				\
380      INTO = NEWVAL;						\
381      if (_buf->old_contents.i == INTO)				\
382	_buf->next = undobuf.frees, undobuf.frees = _buf;	\
383      else							\
384	_buf->next = undobuf.undos, undobuf.undos = _buf;	\
385     } while (0)
386
387/* Number of times the pseudo being substituted for
388   was found and replaced.  */
389
390static int n_occurrences;
391
392static void init_reg_last_arrays	PROTO((void));
393static void setup_incoming_promotions   PROTO((void));
394static void set_nonzero_bits_and_sign_copies  PROTO((rtx, rtx));
395static int cant_combine_insn_p	PROTO((rtx));
396static int can_combine_p	PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
397static int sets_function_arg_p	PROTO((rtx));
398static int combinable_i3pat	PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
399static rtx try_combine		PROTO((rtx, rtx, rtx));
400static void undo_all		PROTO((void));
401static rtx *find_split_point	PROTO((rtx *, rtx));
402static rtx subst		PROTO((rtx, rtx, rtx, int, int));
403static rtx simplify_rtx		PROTO((rtx, enum machine_mode, int, int));
404static rtx simplify_if_then_else  PROTO((rtx));
405static rtx simplify_set		PROTO((rtx));
406static rtx simplify_logical	PROTO((rtx, int));
407static rtx expand_compound_operation  PROTO((rtx));
408static rtx expand_field_assignment  PROTO((rtx));
409static rtx make_extraction	PROTO((enum machine_mode, rtx, int, rtx, int,
410				       int, int, int));
411static rtx extract_left_shift	PROTO((rtx, int));
412static rtx make_compound_operation  PROTO((rtx, enum rtx_code));
413static int get_pos_from_mask	PROTO((unsigned HOST_WIDE_INT, int *));
414static rtx force_to_mode	PROTO((rtx, enum machine_mode,
415				       unsigned HOST_WIDE_INT, rtx, int));
416static rtx if_then_else_cond	PROTO((rtx, rtx *, rtx *));
417static rtx known_cond		PROTO((rtx, enum rtx_code, rtx, rtx));
418static int rtx_equal_for_field_assignment_p PROTO((rtx, rtx));
419static rtx make_field_assignment  PROTO((rtx));
420static rtx apply_distributive_law  PROTO((rtx));
421static rtx simplify_and_const_int  PROTO((rtx, enum machine_mode, rtx,
422					  unsigned HOST_WIDE_INT));
423static unsigned HOST_WIDE_INT nonzero_bits  PROTO((rtx, enum machine_mode));
424static int num_sign_bit_copies  PROTO((rtx, enum machine_mode));
425static int merge_outer_ops	PROTO((enum rtx_code *, HOST_WIDE_INT *,
426				       enum rtx_code, HOST_WIDE_INT,
427				       enum machine_mode, int *));
428static rtx simplify_shift_const	PROTO((rtx, enum rtx_code, enum machine_mode,
429				       rtx, int));
430static int recog_for_combine	PROTO((rtx *, rtx, rtx *));
431static rtx gen_lowpart_for_combine  PROTO((enum machine_mode, rtx));
432static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
433				  ...));
434static rtx gen_binary		PROTO((enum rtx_code, enum machine_mode,
435				       rtx, rtx));
436static rtx gen_unary		PROTO((enum rtx_code, enum machine_mode,
437				       enum machine_mode, rtx));
438static enum rtx_code simplify_comparison  PROTO((enum rtx_code, rtx *, rtx *));
439static int reversible_comparison_p  PROTO((rtx));
440static void update_table_tick	PROTO((rtx));
441static void record_value_for_reg  PROTO((rtx, rtx, rtx));
442static void record_dead_and_set_regs_1  PROTO((rtx, rtx));
443static void record_dead_and_set_regs  PROTO((rtx));
444static int get_last_value_validate  PROTO((rtx *, rtx, int, int));
445static rtx get_last_value	PROTO((rtx));
446static int use_crosses_set_p	PROTO((rtx, int));
447static void reg_dead_at_p_1	PROTO((rtx, rtx));
448static int reg_dead_at_p	PROTO((rtx, rtx));
449static void move_deaths		PROTO((rtx, rtx, int, rtx, rtx *));
450static int reg_bitfield_target_p  PROTO((rtx, rtx));
451static void distribute_notes	PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
452static void distribute_links	PROTO((rtx));
453static void mark_used_regs_combine PROTO((rtx));
454static int insn_cuid		PROTO((rtx));
455
456/* Main entry point for combiner.  F is the first insn of the function.
457   NREGS is the first unused pseudo-reg number.  */
458
459void
460combine_instructions (f, nregs)
461     rtx f;
462     int nregs;
463{
464  register rtx insn, next;
465#ifdef HAVE_cc0
466  register rtx prev;
467#endif
468  register int i;
469  register rtx links, nextlinks;
470
471  combine_attempts = 0;
472  combine_merges = 0;
473  combine_extras = 0;
474  combine_successes = 0;
475  undobuf.undos = undobuf.previous_undos = 0;
476
477  combine_max_regno = nregs;
478
479  reg_nonzero_bits
480    = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
481  reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
482
483  bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
484  bzero (reg_sign_bit_copies, nregs * sizeof (char));
485
486  reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
487  reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
488  reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
489  reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
490  reg_last_set_label = (int *) alloca (nregs * sizeof (int));
491  reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
492  reg_last_set_mode
493    = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
494  reg_last_set_nonzero_bits
495    = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
496  reg_last_set_sign_bit_copies
497    = (char *) alloca (nregs * sizeof (char));
498
499  init_reg_last_arrays ();
500
501  init_recog_no_volatile ();
502
503  /* Compute maximum uid value so uid_cuid can be allocated.  */
504
505  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
506    if (INSN_UID (insn) > i)
507      i = INSN_UID (insn);
508
509  uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
510  max_uid_cuid = i;
511
512  nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
513
514  /* Don't use reg_nonzero_bits when computing it.  This can cause problems
515     when, for example, we have j <<= 1 in a loop.  */
516
517  nonzero_sign_valid = 0;
518
519  /* Compute the mapping from uids to cuids.
520     Cuids are numbers assigned to insns, like uids,
521     except that cuids increase monotonically through the code.
522
523     Scan all SETs and see if we can deduce anything about what
524     bits are known to be zero for some registers and how many copies
525     of the sign bit are known to exist for those registers.
526
527     Also set any known values so that we can use it while searching
528     for what bits are known to be set.  */
529
530  label_tick = 1;
531
532  /* We need to initialize it here, because record_dead_and_set_regs may call
533     get_last_value.  */
534  subst_prev_insn = NULL_RTX;
535
536  setup_incoming_promotions ();
537
538  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
539    {
540      uid_cuid[INSN_UID (insn)] = ++i;
541      subst_low_cuid = i;
542      subst_insn = insn;
543
544      if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
545	{
546	  note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
547	  record_dead_and_set_regs (insn);
548
549#ifdef AUTO_INC_DEC
550	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
551	    if (REG_NOTE_KIND (links) == REG_INC)
552	      set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX);
553#endif
554	}
555
556      if (GET_CODE (insn) == CODE_LABEL)
557	label_tick++;
558    }
559
560  nonzero_sign_valid = 1;
561
562  /* Now scan all the insns in forward order.  */
563
564  this_basic_block = -1;
565  label_tick = 1;
566  last_call_cuid = 0;
567  mem_last_set = 0;
568  init_reg_last_arrays ();
569  setup_incoming_promotions ();
570
571  for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
572    {
573      next = 0;
574
575      /* If INSN starts a new basic block, update our basic block number.  */
576      if (this_basic_block + 1 < n_basic_blocks
577	  && BLOCK_HEAD (this_basic_block + 1) == insn)
578	this_basic_block++;
579
580      if (GET_CODE (insn) == CODE_LABEL)
581	label_tick++;
582
583      else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
584	{
585	  /* Try this insn with each insn it links back to.  */
586
587	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
588	    if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
589	      goto retry;
590
591	  /* Try each sequence of three linked insns ending with this one.  */
592
593	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
594	    for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
595		 nextlinks = XEXP (nextlinks, 1))
596	      if ((next = try_combine (insn, XEXP (links, 0),
597				       XEXP (nextlinks, 0))) != 0)
598		goto retry;
599
600#ifdef HAVE_cc0
601	  /* Try to combine a jump insn that uses CC0
602	     with a preceding insn that sets CC0, and maybe with its
603	     logical predecessor as well.
604	     This is how we make decrement-and-branch insns.
605	     We need this special code because data flow connections
606	     via CC0 do not get entered in LOG_LINKS.  */
607
608	  if (GET_CODE (insn) == JUMP_INSN
609	      && (prev = prev_nonnote_insn (insn)) != 0
610	      && GET_CODE (prev) == INSN
611	      && sets_cc0_p (PATTERN (prev)))
612	    {
613	      if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
614		goto retry;
615
616	      for (nextlinks = LOG_LINKS (prev); nextlinks;
617		   nextlinks = XEXP (nextlinks, 1))
618		if ((next = try_combine (insn, prev,
619					 XEXP (nextlinks, 0))) != 0)
620		  goto retry;
621	    }
622
623	  /* Do the same for an insn that explicitly references CC0.  */
624	  if (GET_CODE (insn) == INSN
625	      && (prev = prev_nonnote_insn (insn)) != 0
626	      && GET_CODE (prev) == INSN
627	      && sets_cc0_p (PATTERN (prev))
628	      && GET_CODE (PATTERN (insn)) == SET
629	      && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
630	    {
631	      if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
632		goto retry;
633
634	      for (nextlinks = LOG_LINKS (prev); nextlinks;
635		   nextlinks = XEXP (nextlinks, 1))
636		if ((next = try_combine (insn, prev,
637					 XEXP (nextlinks, 0))) != 0)
638		  goto retry;
639	    }
640
641	  /* Finally, see if any of the insns that this insn links to
642	     explicitly references CC0.  If so, try this insn, that insn,
643	     and its predecessor if it sets CC0.  */
644	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
645	    if (GET_CODE (XEXP (links, 0)) == INSN
646		&& GET_CODE (PATTERN (XEXP (links, 0))) == SET
647		&& reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
648		&& (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
649		&& GET_CODE (prev) == INSN
650		&& sets_cc0_p (PATTERN (prev))
651		&& (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
652	      goto retry;
653#endif
654
655	  /* Try combining an insn with two different insns whose results it
656	     uses.  */
657	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
658	    for (nextlinks = XEXP (links, 1); nextlinks;
659		 nextlinks = XEXP (nextlinks, 1))
660	      if ((next = try_combine (insn, XEXP (links, 0),
661				       XEXP (nextlinks, 0))) != 0)
662		goto retry;
663
664	  if (GET_CODE (insn) != NOTE)
665	    record_dead_and_set_regs (insn);
666
667	retry:
668	  ;
669	}
670    }
671
672  total_attempts += combine_attempts;
673  total_merges += combine_merges;
674  total_extras += combine_extras;
675  total_successes += combine_successes;
676
677  nonzero_sign_valid = 0;
678
679  /* Make recognizer allow volatile MEMs again.  */
680  init_recog ();
681}
682
683/* Wipe the reg_last_xxx arrays in preparation for another pass.  */
684
685static void
686init_reg_last_arrays ()
687{
688  int nregs = combine_max_regno;
689
690  bzero ((char *) reg_last_death, nregs * sizeof (rtx));
691  bzero ((char *) reg_last_set, nregs * sizeof (rtx));
692  bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
693  bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
694  bzero ((char *) reg_last_set_label, nregs * sizeof (int));
695  bzero (reg_last_set_invalid, nregs * sizeof (char));
696  bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
697  bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
698  bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
699}
700
701/* Set up any promoted values for incoming argument registers.  */
702
703static void
704setup_incoming_promotions ()
705{
706#ifdef PROMOTE_FUNCTION_ARGS
707  int regno;
708  rtx reg;
709  enum machine_mode mode;
710  int unsignedp;
711  rtx first = get_insns ();
712
713  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
714    if (FUNCTION_ARG_REGNO_P (regno)
715	&& (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
716      {
717	record_value_for_reg
718	  (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
719				       : SIGN_EXTEND),
720				      GET_MODE (reg),
721				      gen_rtx_CLOBBER (mode, const0_rtx)));
722      }
723#endif
724}
725
726/* Called via note_stores.  If X is a pseudo that is narrower than
727   HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
728
729   If we are setting only a portion of X and we can't figure out what
730   portion, assume all bits will be used since we don't know what will
731   be happening.
732
733   Similarly, set how many bits of X are known to be copies of the sign bit
734   at all locations in the function.  This is the smallest number implied
735   by any set of X.  */
736
737static void
738set_nonzero_bits_and_sign_copies (x, set)
739     rtx x;
740     rtx set;
741{
742  int num;
743
744  if (GET_CODE (x) == REG
745      && REGNO (x) >= FIRST_PSEUDO_REGISTER
746      /* If this register is undefined at the start of the file, we can't
747	 say what its contents were.  */
748      && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, REGNO (x))
749      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
750    {
751      if (set == 0 || GET_CODE (set) == CLOBBER)
752	{
753	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
754	  reg_sign_bit_copies[REGNO (x)] = 1;
755	  return;
756	}
757
758      /* If this is a complex assignment, see if we can convert it into a
759	 simple assignment.  */
760      set = expand_field_assignment (set);
761
762      /* If this is a simple assignment, or we have a paradoxical SUBREG,
763	 set what we know about X.  */
764
765      if (SET_DEST (set) == x
766	  || (GET_CODE (SET_DEST (set)) == SUBREG
767	      && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
768		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
769	      && SUBREG_REG (SET_DEST (set)) == x))
770	{
771	  rtx src = SET_SRC (set);
772
773#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
774	  /* If X is narrower than a word and SRC is a non-negative
775	     constant that would appear negative in the mode of X,
776	     sign-extend it for use in reg_nonzero_bits because some
777	     machines (maybe most) will actually do the sign-extension
778	     and this is the conservative approach.
779
780	     ??? For 2.5, try to tighten up the MD files in this regard
781	     instead of this kludge.  */
782
783	  if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
784	      && GET_CODE (src) == CONST_INT
785	      && INTVAL (src) > 0
786	      && 0 != (INTVAL (src)
787		       & ((HOST_WIDE_INT) 1
788			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
789	    src = GEN_INT (INTVAL (src)
790			   | ((HOST_WIDE_INT) (-1)
791			      << GET_MODE_BITSIZE (GET_MODE (x))));
792#endif
793
794	  reg_nonzero_bits[REGNO (x)]
795	    |= nonzero_bits (src, nonzero_bits_mode);
796	  num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
797	  if (reg_sign_bit_copies[REGNO (x)] == 0
798	      || reg_sign_bit_copies[REGNO (x)] > num)
799	    reg_sign_bit_copies[REGNO (x)] = num;
800	}
801      else
802	{
803	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
804	  reg_sign_bit_copies[REGNO (x)] = 1;
805	}
806    }
807}
808
809/* See if INSN can be combined into I3.  PRED and SUCC are optionally
810   insns that were previously combined into I3 or that will be combined
811   into the merger of INSN and I3.
812
813   Return 0 if the combination is not allowed for any reason.
814
815   If the combination is allowed, *PDEST will be set to the single
816   destination of INSN and *PSRC to the single source, and this function
817   will return 1.  */
818
819static int
820can_combine_p (insn, i3, pred, succ, pdest, psrc)
821     rtx insn;
822     rtx i3;
823     rtx pred ATTRIBUTE_UNUSED;
824     rtx succ;
825     rtx *pdest, *psrc;
826{
827  int i;
828  rtx set = 0, src, dest;
829  rtx p;
830#ifdef AUTO_INC_DEC
831  rtx link;
832#endif
833  int all_adjacent = (succ ? (next_active_insn (insn) == succ
834			      && next_active_insn (succ) == i3)
835		      : next_active_insn (insn) == i3);
836
837  /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
838     or a PARALLEL consisting of such a SET and CLOBBERs.
839
840     If INSN has CLOBBER parallel parts, ignore them for our processing.
841     By definition, these happen during the execution of the insn.  When it
842     is merged with another insn, all bets are off.  If they are, in fact,
843     needed and aren't also supplied in I3, they may be added by
844     recog_for_combine.  Otherwise, it won't match.
845
846     We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
847     note.
848
849     Get the source and destination of INSN.  If more than one, can't
850     combine.  */
851
852  if (GET_CODE (PATTERN (insn)) == SET)
853    set = PATTERN (insn);
854  else if (GET_CODE (PATTERN (insn)) == PARALLEL
855	   && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
856    {
857      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
858	{
859	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
860
861	  switch (GET_CODE (elt))
862	    {
863	    /* This is important to combine floating point insns
864	       for the SH4 port.  */
865	    case USE:
866	      /* Combining an isolated USE doesn't make sense.
867		 We depend here on combinable_i3_pat to reject them.  */
868	      /* The code below this loop only verifies that the inputs of
869		 the SET in INSN do not change.  We call reg_set_between_p
870		 to verify that the REG in the USE does not change betweeen
871		 I3 and INSN.
872		 If the USE in INSN was for a pseudo register, the matching
873		 insn pattern will likely match any register; combining this
874		 with any other USE would only be safe if we knew that the
875		 used registers have identical values, or if there was
876		 something to tell them apart, e.g. different modes.  For
877		 now, we forgo such compilcated tests and simply disallow
878		 combining of USES of pseudo registers with any other USE.  */
879	      if (GET_CODE (XEXP (elt, 0)) == REG
880		  && GET_CODE (PATTERN (i3)) == PARALLEL)
881		{
882		  rtx i3pat = PATTERN (i3);
883		  int i = XVECLEN (i3pat, 0) - 1;
884		  int regno = REGNO (XEXP (elt, 0));
885		  do
886		    {
887		      rtx i3elt = XVECEXP (i3pat, 0, i);
888		      if (GET_CODE (i3elt) == USE
889			  && GET_CODE (XEXP (i3elt, 0)) == REG
890			  && (REGNO (XEXP (i3elt, 0)) == regno
891			      ? reg_set_between_p (XEXP (elt, 0),
892						   PREV_INSN (insn), i3)
893			      : regno >= FIRST_PSEUDO_REGISTER))
894			return 0;
895		    }
896		  while (--i >= 0);
897		}
898	      break;
899
900	      /* We can ignore CLOBBERs.  */
901	    case CLOBBER:
902	      break;
903
904	    case SET:
905	      /* Ignore SETs whose result isn't used but not those that
906		 have side-effects.  */
907	      if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
908		  && ! side_effects_p (elt))
909		break;
910
911	      /* If we have already found a SET, this is a second one and
912		 so we cannot combine with this insn.  */
913	      if (set)
914		return 0;
915
916	      set = elt;
917	      break;
918
919	    default:
920	      /* Anything else means we can't combine.  */
921	      return 0;
922	    }
923	}
924
925      if (set == 0
926	  /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
927	     so don't do anything with it.  */
928	  || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
929	return 0;
930    }
931  else
932    return 0;
933
934  if (set == 0)
935    return 0;
936
937  set = expand_field_assignment (set);
938  src = SET_SRC (set), dest = SET_DEST (set);
939
940  /* Don't eliminate a store in the stack pointer.  */
941  if (dest == stack_pointer_rtx
942      /* If we couldn't eliminate a field assignment, we can't combine.  */
943      || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
944      /* Don't combine with an insn that sets a register to itself if it has
945	 a REG_EQUAL note.  This may be part of a REG_NO_CONFLICT sequence.  */
946      || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
947      /* Can't merge a function call.  */
948      || GET_CODE (src) == CALL
949      /* Don't eliminate a function call argument.  */
950      || (GET_CODE (i3) == CALL_INSN
951	  && (find_reg_fusage (i3, USE, dest)
952	      || (GET_CODE (dest) == REG
953		  && REGNO (dest) < FIRST_PSEUDO_REGISTER
954		  && global_regs[REGNO (dest)])))
955      /* Don't substitute into an incremented register.  */
956      || FIND_REG_INC_NOTE (i3, dest)
957      || (succ && FIND_REG_INC_NOTE (succ, dest))
958#if 0
959      /* Don't combine the end of a libcall into anything.  */
960      /* ??? This gives worse code, and appears to be unnecessary, since no
961	 pass after flow uses REG_LIBCALL/REG_RETVAL notes.  Local-alloc does
962	 use REG_RETVAL notes for noconflict blocks, but other code here
963	 makes sure that those insns don't disappear.  */
964      || find_reg_note (insn, REG_RETVAL, NULL_RTX)
965#endif
966      /* Make sure that DEST is not used after SUCC but before I3.  */
967      || (succ && ! all_adjacent
968	  && reg_used_between_p (dest, succ, i3))
969      /* Make sure that the value that is to be substituted for the register
970	 does not use any registers whose values alter in between.  However,
971	 If the insns are adjacent, a use can't cross a set even though we
972	 think it might (this can happen for a sequence of insns each setting
973	 the same destination; reg_last_set of that register might point to
974	 a NOTE).  If INSN has a REG_EQUIV note, the register is always
975	 equivalent to the memory so the substitution is valid even if there
976	 are intervening stores.  Also, don't move a volatile asm or
977	 UNSPEC_VOLATILE across any other insns.  */
978      || (! all_adjacent
979	  && (((GET_CODE (src) != MEM
980		|| ! find_reg_note (insn, REG_EQUIV, src))
981	       && use_crosses_set_p (src, INSN_CUID (insn)))
982	      || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
983	      || GET_CODE (src) == UNSPEC_VOLATILE))
984      /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
985	 better register allocation by not doing the combine.  */
986      || find_reg_note (i3, REG_NO_CONFLICT, dest)
987      || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
988      /* Don't combine across a CALL_INSN, because that would possibly
989	 change whether the life span of some REGs crosses calls or not,
990	 and it is a pain to update that information.
991	 Exception: if source is a constant, moving it later can't hurt.
992	 Accept that special case, because it helps -fforce-addr a lot.  */
993      || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
994    return 0;
995
996  /* DEST must either be a REG or CC0.  */
997  if (GET_CODE (dest) == REG)
998    {
999      /* If register alignment is being enforced for multi-word items in all
1000	 cases except for parameters, it is possible to have a register copy
1001	 insn referencing a hard register that is not allowed to contain the
1002	 mode being copied and which would not be valid as an operand of most
1003	 insns.  Eliminate this problem by not combining with such an insn.
1004
1005	 Also, on some machines we don't want to extend the life of a hard
1006	 register.
1007
1008	 This is the same test done in can_combine except that we don't test
1009	 if SRC is a CALL operation to permit a hard register with
1010	 SMALL_REGISTER_CLASSES, and that we have to take all_adjacent
1011	 into account.  */
1012
1013      if (GET_CODE (src) == REG
1014	  && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1015	       && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1016	      /* Don't extend the life of a hard register unless it is
1017		 user variable (if we have few registers) or it can't
1018		 fit into the desired register (meaning something special
1019		 is going on).
1020		 Also avoid substituting a return register into I3, because
1021		 reload can't handle a conflict with constraints of other
1022		 inputs.  */
1023	      || (REGNO (src) < FIRST_PSEUDO_REGISTER
1024		  && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
1025		      || (SMALL_REGISTER_CLASSES
1026			  && ((! all_adjacent && ! REG_USERVAR_P (src))
1027			      || (FUNCTION_VALUE_REGNO_P (REGNO (src))
1028				  && ! REG_USERVAR_P (src))))))))
1029	return 0;
1030    }
1031  else if (GET_CODE (dest) != CC0)
1032    return 0;
1033
1034  /* Don't substitute for a register intended as a clobberable operand.
1035     Similarly, don't substitute an expression containing a register that
1036     will be clobbered in I3.  */
1037  if (GET_CODE (PATTERN (i3)) == PARALLEL)
1038    for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1039      if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
1040	  && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1041				       src)
1042	      || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
1043	return 0;
1044
1045  /* If INSN contains anything volatile, or is an `asm' (whether volatile
1046     or not), reject, unless nothing volatile comes between it and I3 */
1047
1048  if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1049    {
1050      /* Make sure succ doesn't contain a volatile reference.  */
1051      if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1052        return 0;
1053
1054      for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1055        if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1056  	  && p != succ && volatile_refs_p (PATTERN (p)))
1057  	return 0;
1058    }
1059
1060  /* If INSN is an asm, and DEST is a hard register, reject, since it has
1061     to be an explicit register variable, and was chosen for a reason.  */
1062
1063  if (GET_CODE (src) == ASM_OPERANDS
1064      && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1065    return 0;
1066
1067  /* If there are any volatile insns between INSN and I3, reject, because
1068     they might affect machine state.  */
1069
1070  for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1071    if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1072	&& p != succ && volatile_insn_p (PATTERN (p)))
1073      return 0;
1074
1075  /* If INSN or I2 contains an autoincrement or autodecrement,
1076     make sure that register is not used between there and I3,
1077     and not already used in I3 either.
1078     Also insist that I3 not be a jump; if it were one
1079     and the incremented register were spilled, we would lose.  */
1080
1081#ifdef AUTO_INC_DEC
1082  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1083    if (REG_NOTE_KIND (link) == REG_INC
1084	&& (GET_CODE (i3) == JUMP_INSN
1085	    || reg_used_between_p (XEXP (link, 0), insn, i3)
1086	    || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1087      return 0;
1088#endif
1089
1090#ifdef HAVE_cc0
1091  /* Don't combine an insn that follows a CC0-setting insn.
1092     An insn that uses CC0 must not be separated from the one that sets it.
1093     We do, however, allow I2 to follow a CC0-setting insn if that insn
1094     is passed as I1; in that case it will be deleted also.
1095     We also allow combining in this case if all the insns are adjacent
1096     because that would leave the two CC0 insns adjacent as well.
1097     It would be more logical to test whether CC0 occurs inside I1 or I2,
1098     but that would be much slower, and this ought to be equivalent.  */
1099
1100  p = prev_nonnote_insn (insn);
1101  if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1102      && ! all_adjacent)
1103    return 0;
1104#endif
1105
1106  /* If we get here, we have passed all the tests and the combination is
1107     to be allowed.  */
1108
1109  *pdest = dest;
1110  *psrc = src;
1111
1112  return 1;
1113}
1114
1115/* Check if PAT is an insn - or a part of it - used to set up an
1116   argument for a function in a hard register.  */
1117
1118static int
1119sets_function_arg_p (pat)
1120     rtx pat;
1121{
1122  int i;
1123  rtx inner_dest;
1124
1125  switch (GET_CODE (pat))
1126    {
1127    case INSN:
1128      return sets_function_arg_p (PATTERN (pat));
1129
1130    case PARALLEL:
1131      for (i = XVECLEN (pat, 0); --i >= 0;)
1132	if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1133	  return 1;
1134
1135      break;
1136
1137    case SET:
1138      inner_dest = SET_DEST (pat);
1139      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1140	     || GET_CODE (inner_dest) == SUBREG
1141	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
1142	inner_dest = XEXP (inner_dest, 0);
1143
1144      return (GET_CODE (inner_dest) == REG
1145	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1146	      && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1147
1148    default:
1149      break;
1150    }
1151
1152  return 0;
1153}
1154
1155/* LOC is the location within I3 that contains its pattern or the component
1156   of a PARALLEL of the pattern.  We validate that it is valid for combining.
1157
1158   One problem is if I3 modifies its output, as opposed to replacing it
1159   entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1160   so would produce an insn that is not equivalent to the original insns.
1161
1162   Consider:
1163
1164         (set (reg:DI 101) (reg:DI 100))
1165	 (set (subreg:SI (reg:DI 101) 0) <foo>)
1166
1167   This is NOT equivalent to:
1168
1169         (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1170	 	    (set (reg:DI 101) (reg:DI 100))])
1171
1172   Not only does this modify 100 (in which case it might still be valid
1173   if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1174
1175   We can also run into a problem if I2 sets a register that I1
1176   uses and I1 gets directly substituted into I3 (not via I2).  In that
1177   case, we would be getting the wrong value of I2DEST into I3, so we
1178   must reject the combination.  This case occurs when I2 and I1 both
1179   feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1180   If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1181   of a SET must prevent combination from occurring.
1182
1183   On machines where SMALL_REGISTER_CLASSES is non-zero, we don't combine
1184   if the destination of a SET is a hard register that isn't a user
1185   variable.
1186
1187   Before doing the above check, we first try to expand a field assignment
1188   into a set of logical operations.
1189
1190   If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1191   we place a register that is both set and used within I3.  If more than one
1192   such register is detected, we fail.
1193
1194   Return 1 if the combination is valid, zero otherwise.  */
1195
1196static int
1197combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1198     rtx i3;
1199     rtx *loc;
1200     rtx i2dest;
1201     rtx i1dest;
1202     int i1_not_in_src;
1203     rtx *pi3dest_killed;
1204{
1205  rtx x = *loc;
1206
1207  if (GET_CODE (x) == SET)
1208    {
1209      rtx set = expand_field_assignment (x);
1210      rtx dest = SET_DEST (set);
1211      rtx src = SET_SRC (set);
1212      rtx inner_dest = dest;
1213
1214#if 0
1215      rtx inner_src = src;
1216#endif
1217
1218      SUBST (*loc, set);
1219
1220      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1221	     || GET_CODE (inner_dest) == SUBREG
1222	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
1223	inner_dest = XEXP (inner_dest, 0);
1224
1225  /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1226     was added.  */
1227#if 0
1228      while (GET_CODE (inner_src) == STRICT_LOW_PART
1229	     || GET_CODE (inner_src) == SUBREG
1230	     || GET_CODE (inner_src) == ZERO_EXTRACT)
1231	inner_src = XEXP (inner_src, 0);
1232
1233      /* If it is better that two different modes keep two different pseudos,
1234	 avoid combining them.  This avoids producing the following pattern
1235	 on a 386:
1236	  (set (subreg:SI (reg/v:QI 21) 0)
1237	       (lshiftrt:SI (reg/v:SI 20)
1238	           (const_int 24)))
1239	 If that were made, reload could not handle the pair of
1240	 reg 20/21, since it would try to get any GENERAL_REGS
1241	 but some of them don't handle QImode.  */
1242
1243      if (rtx_equal_p (inner_src, i2dest)
1244	  && GET_CODE (inner_dest) == REG
1245	  && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1246	return 0;
1247#endif
1248
1249      /* Check for the case where I3 modifies its output, as
1250	 discussed above.  */
1251      if ((inner_dest != dest
1252	   && (reg_overlap_mentioned_p (i2dest, inner_dest)
1253	       || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1254
1255	  /* This is the same test done in can_combine_p except that we
1256	     allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1257	     CALL operation. Moreover, we can't test all_adjacent; we don't
1258	     have to, since this instruction will stay in place, thus we are
1259	     not considering increasing the lifetime of INNER_DEST.
1260
1261	     Also, if this insn sets a function argument, combining it with
1262	     something that might need a spill could clobber a previous
1263	     function argument; the all_adjacent test in can_combine_p also
1264	     checks this; here, we do a more specific test for this case.  */
1265
1266	  || (GET_CODE (inner_dest) == REG
1267	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1268	      && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1269					GET_MODE (inner_dest))
1270		 || (SMALL_REGISTER_CLASSES && GET_CODE (src) != CALL
1271		     && ! REG_USERVAR_P (inner_dest)
1272		     && (FUNCTION_VALUE_REGNO_P (REGNO (inner_dest))
1273			 || (FUNCTION_ARG_REGNO_P (REGNO (inner_dest))
1274			     && i3 != 0
1275			     && sets_function_arg_p (prev_nonnote_insn (i3)))))))
1276	  || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1277	return 0;
1278
1279      /* If DEST is used in I3, it is being killed in this insn,
1280	 so record that for later.
1281	 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1282	 STACK_POINTER_REGNUM, since these are always considered to be
1283	 live.  Similarly for ARG_POINTER_REGNUM if it is fixed.  */
1284      if (pi3dest_killed && GET_CODE (dest) == REG
1285	  && reg_referenced_p (dest, PATTERN (i3))
1286	  && REGNO (dest) != FRAME_POINTER_REGNUM
1287#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1288	  && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1289#endif
1290#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1291	  && (REGNO (dest) != ARG_POINTER_REGNUM
1292	      || ! fixed_regs [REGNO (dest)])
1293#endif
1294	  && REGNO (dest) != STACK_POINTER_REGNUM)
1295	{
1296	  if (*pi3dest_killed)
1297	    return 0;
1298
1299	  *pi3dest_killed = dest;
1300	}
1301    }
1302
1303  else if (GET_CODE (x) == PARALLEL)
1304    {
1305      int i;
1306
1307      for (i = 0; i < XVECLEN (x, 0); i++)
1308	if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1309				i1_not_in_src, pi3dest_killed))
1310	  return 0;
1311    }
1312
1313  return 1;
1314}
1315
1316/* Determine whether INSN can be used in a combination.  Return nonzero if
1317   not.  This is used in try_combine to detect early some cases where we
1318   can't perform combinations.  */
1319
1320static int
1321cant_combine_insn_p (insn)
1322     rtx insn;
1323{
1324  rtx set;
1325  rtx src, dest;
1326
1327  /* If this isn't really an insn, we can't do anything.
1328     This can occur when flow deletes an insn that it has merged into an
1329     auto-increment address.  */
1330  if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
1331    return 1;
1332
1333  /* For the 2.95.3 release, restrict this code to only handle the machines
1334     where it's strictly needed.  */
1335  if (! SMALL_REGISTER_CLASSES)
1336    return 0;
1337
1338  /* Never combine loads and stores involving hard regs.  The register
1339     allocator can usually handle such reg-reg moves by tying.  If we allow
1340     the combiner to make substitutions of hard regs, we risk aborting in
1341     reload on machines that have SMALL_REGISTER_CLASSES.
1342     As an exception, we allow combinations involving fixed regs; these are
1343     not available to the register allocator so there's no risk involved.  */
1344
1345  set = single_set (insn);
1346  if (! set)
1347    return 0;
1348  src = SET_SRC (set);
1349  dest = SET_DEST (set);
1350  if (GET_CODE (src) == SUBREG)
1351    src = SUBREG_REG (src);
1352  if (GET_CODE (dest) == SUBREG)
1353    dest = SUBREG_REG (dest);
1354  if (REG_P (src) && REG_P (dest)
1355      && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1356	   && ! fixed_regs[REGNO (src)])
1357	  || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1358	      && ! fixed_regs[REGNO (dest)])))
1359    return 1;
1360
1361  return 0;
1362}
1363
1364/* Try to combine the insns I1 and I2 into I3.
1365   Here I1 and I2 appear earlier than I3.
1366   I1 can be zero; then we combine just I2 into I3.
1367
1368   It we are combining three insns and the resulting insn is not recognized,
1369   try splitting it into two insns.  If that happens, I2 and I3 are retained
1370   and I1 is pseudo-deleted by turning it into a NOTE.  Otherwise, I1 and I2
1371   are pseudo-deleted.
1372
1373   Return 0 if the combination does not work.  Then nothing is changed.
1374   If we did the combination, return the insn at which combine should
1375   resume scanning.  */
1376
1377static rtx
1378try_combine (i3, i2, i1)
1379     register rtx i3, i2, i1;
1380{
1381  /* New patterns for I3 and I3, respectively.  */
1382  rtx newpat, newi2pat = 0;
1383  /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead.  */
1384  int added_sets_1, added_sets_2;
1385  /* Total number of SETs to put into I3.  */
1386  int total_sets;
1387  /* Nonzero is I2's body now appears in I3.  */
1388  int i2_is_used;
1389  /* INSN_CODEs for new I3, new I2, and user of condition code.  */
1390  int insn_code_number, i2_code_number, other_code_number;
1391  /* Contains I3 if the destination of I3 is used in its source, which means
1392     that the old life of I3 is being killed.  If that usage is placed into
1393     I2 and not in I3, a REG_DEAD note must be made.  */
1394  rtx i3dest_killed = 0;
1395  /* SET_DEST and SET_SRC of I2 and I1.  */
1396  rtx i2dest, i2src, i1dest = 0, i1src = 0;
1397  /* PATTERN (I2), or a copy of it in certain cases.  */
1398  rtx i2pat;
1399  /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC.  */
1400  int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1401  int i1_feeds_i3 = 0;
1402  /* Notes that must be added to REG_NOTES in I3 and I2.  */
1403  rtx new_i3_notes, new_i2_notes;
1404  /* Notes that we substituted I3 into I2 instead of the normal case.  */
1405  int i3_subst_into_i2 = 0;
1406  /* Notes that I1, I2 or I3 is a MULT operation.  */
1407  int have_mult = 0;
1408
1409  int maxreg;
1410  rtx temp;
1411  register rtx link;
1412  int i;
1413
1414  /* Exit early if one of the insns involved can't be used for
1415     combinations.  */
1416  if (cant_combine_insn_p (i3)
1417      || cant_combine_insn_p (i2)
1418      || (i1 && cant_combine_insn_p (i1))
1419      /* We also can't do anything if I3 has a
1420	 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1421	 libcall.  */
1422#if 0
1423      /* ??? This gives worse code, and appears to be unnecessary, since no
1424	 pass after flow uses REG_LIBCALL/REG_RETVAL notes.  */
1425      || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1426#endif
1427      )
1428    return 0;
1429
1430  combine_attempts++;
1431
1432  undobuf.undos = undobuf.previous_undos = 0;
1433  undobuf.other_insn = 0;
1434
1435  /* Save the current high-water-mark so we can free storage if we didn't
1436     accept this combination.  */
1437  undobuf.storage = (char *) oballoc (0);
1438
1439  /* Reset the hard register usage information.  */
1440  CLEAR_HARD_REG_SET (newpat_used_regs);
1441
1442  /* If I1 and I2 both feed I3, they can be in any order.  To simplify the
1443     code below, set I1 to be the earlier of the two insns.  */
1444  if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1445    temp = i1, i1 = i2, i2 = temp;
1446
1447  added_links_insn = 0;
1448
1449  /* First check for one important special-case that the code below will
1450     not handle.  Namely, the case where I1 is zero, I2 has multiple sets,
1451     and I3 is a SET whose SET_SRC is a SET_DEST in I2.  In that case,
1452     we may be able to replace that destination with the destination of I3.
1453     This occurs in the common code where we compute both a quotient and
1454     remainder into a structure, in which case we want to do the computation
1455     directly into the structure to avoid register-register copies.
1456
1457     We make very conservative checks below and only try to handle the
1458     most common cases of this.  For example, we only handle the case
1459     where I2 and I3 are adjacent to avoid making difficult register
1460     usage tests.  */
1461
1462  if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1463      && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1464      && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1465      && (! SMALL_REGISTER_CLASSES
1466	  || (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1467	      || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1468	      || REG_USERVAR_P (SET_DEST (PATTERN (i3)))))
1469      && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1470      && GET_CODE (PATTERN (i2)) == PARALLEL
1471      && ! side_effects_p (SET_DEST (PATTERN (i3)))
1472      /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1473	 below would need to check what is inside (and reg_overlap_mentioned_p
1474	 doesn't support those codes anyway).  Don't allow those destinations;
1475	 the resulting insn isn't likely to be recognized anyway.  */
1476      && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1477      && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1478      && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1479				    SET_DEST (PATTERN (i3)))
1480      && next_real_insn (i2) == i3)
1481    {
1482      rtx p2 = PATTERN (i2);
1483
1484      /* Make sure that the destination of I3,
1485	 which we are going to substitute into one output of I2,
1486	 is not used within another output of I2.  We must avoid making this:
1487	 (parallel [(set (mem (reg 69)) ...)
1488		    (set (reg 69) ...)])
1489	 which is not well-defined as to order of actions.
1490	 (Besides, reload can't handle output reloads for this.)
1491
1492	 The problem can also happen if the dest of I3 is a memory ref,
1493	 if another dest in I2 is an indirect memory ref.  */
1494      for (i = 0; i < XVECLEN (p2, 0); i++)
1495	if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1496	     || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1497	    && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1498					SET_DEST (XVECEXP (p2, 0, i))))
1499	  break;
1500
1501      if (i == XVECLEN (p2, 0))
1502	for (i = 0; i < XVECLEN (p2, 0); i++)
1503	  if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1504	    {
1505	      combine_merges++;
1506
1507	      subst_insn = i3;
1508	      subst_low_cuid = INSN_CUID (i2);
1509
1510	      added_sets_2 = added_sets_1 = 0;
1511	      i2dest = SET_SRC (PATTERN (i3));
1512
1513	      /* Replace the dest in I2 with our dest and make the resulting
1514		 insn the new pattern for I3.  Then skip to where we
1515		 validate the pattern.  Everything was set up above.  */
1516	      SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1517		     SET_DEST (PATTERN (i3)));
1518
1519	      newpat = p2;
1520	      i3_subst_into_i2 = 1;
1521	      goto validate_replacement;
1522	    }
1523    }
1524
1525#ifndef HAVE_cc0
1526  /* If we have no I1 and I2 looks like:
1527	(parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1528		   (set Y OP)])
1529     make up a dummy I1 that is
1530	(set Y OP)
1531     and change I2 to be
1532        (set (reg:CC X) (compare:CC Y (const_int 0)))
1533
1534     (We can ignore any trailing CLOBBERs.)
1535
1536     This undoes a previous combination and allows us to match a branch-and-
1537     decrement insn.  */
1538
1539  if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1540      && XVECLEN (PATTERN (i2), 0) >= 2
1541      && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1542      && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1543	  == MODE_CC)
1544      && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1545      && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1546      && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1547      && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1548      && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1549		      SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1550    {
1551      for (i =  XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1552	if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1553	  break;
1554
1555      if (i == 1)
1556	{
1557	  /* We make I1 with the same INSN_UID as I2.  This gives it
1558	     the same INSN_CUID for value tracking.  Our fake I1 will
1559	     never appear in the insn stream so giving it the same INSN_UID
1560	     as I2 will not cause a problem.  */
1561
1562	  subst_prev_insn = i1
1563	    = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1564			    XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1565			    NULL_RTX);
1566
1567	  SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1568	  SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1569		 SET_DEST (PATTERN (i1)));
1570	}
1571    }
1572#endif
1573
1574  /* Verify that I2 and I1 are valid for combining.  */
1575  if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1576      || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1577    {
1578      undo_all ();
1579      return 0;
1580    }
1581
1582  /* Record whether I2DEST is used in I2SRC and similarly for the other
1583     cases.  Knowing this will help in register status updating below.  */
1584  i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1585  i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1586  i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1587
1588  /* See if I1 directly feeds into I3.  It does if I1DEST is not used
1589     in I2SRC.  */
1590  i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1591
1592  /* Ensure that I3's pattern can be the destination of combines.  */
1593  if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1594			  i1 && i2dest_in_i1src && i1_feeds_i3,
1595			  &i3dest_killed))
1596    {
1597      undo_all ();
1598      return 0;
1599    }
1600
1601  /* See if any of the insns is a MULT operation.  Unless one is, we will
1602     reject a combination that is, since it must be slower.  Be conservative
1603     here.  */
1604  if (GET_CODE (i2src) == MULT
1605      || (i1 != 0 && GET_CODE (i1src) == MULT)
1606      || (GET_CODE (PATTERN (i3)) == SET
1607	  && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1608    have_mult = 1;
1609
1610  /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1611     We used to do this EXCEPT in one case: I3 has a post-inc in an
1612     output operand.  However, that exception can give rise to insns like
1613     	mov r3,(r3)+
1614     which is a famous insn on the PDP-11 where the value of r3 used as the
1615     source was model-dependent.  Avoid this sort of thing.  */
1616
1617#if 0
1618  if (!(GET_CODE (PATTERN (i3)) == SET
1619	&& GET_CODE (SET_SRC (PATTERN (i3))) == REG
1620	&& GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1621	&& (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1622	    || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1623    /* It's not the exception.  */
1624#endif
1625#ifdef AUTO_INC_DEC
1626    for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1627      if (REG_NOTE_KIND (link) == REG_INC
1628	  && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1629	      || (i1 != 0
1630		  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1631	{
1632	  undo_all ();
1633	  return 0;
1634	}
1635#endif
1636
1637  /* See if the SETs in I1 or I2 need to be kept around in the merged
1638     instruction: whenever the value set there is still needed past I3.
1639     For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1640
1641     For the SET in I1, we have two cases:  If I1 and I2 independently
1642     feed into I3, the set in I1 needs to be kept around if I1DEST dies
1643     or is set in I3.  Otherwise (if I1 feeds I2 which feeds I3), the set
1644     in I1 needs to be kept around unless I1DEST dies or is set in either
1645     I2 or I3.  We can distinguish these cases by seeing if I2SRC mentions
1646     I1DEST.  If so, we know I1 feeds into I2.  */
1647
1648  added_sets_2 = ! dead_or_set_p (i3, i2dest);
1649
1650  added_sets_1
1651    = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1652	       : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1653
1654  /* If the set in I2 needs to be kept around, we must make a copy of
1655     PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1656     PATTERN (I2), we are only substituting for the original I1DEST, not into
1657     an already-substituted copy.  This also prevents making self-referential
1658     rtx.  If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1659     I2DEST.  */
1660
1661  i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1662	   ? gen_rtx_SET (VOIDmode, i2dest, i2src)
1663	   : PATTERN (i2));
1664
1665  if (added_sets_2)
1666    i2pat = copy_rtx (i2pat);
1667
1668  combine_merges++;
1669
1670  /* Substitute in the latest insn for the regs set by the earlier ones.  */
1671
1672  maxreg = max_reg_num ();
1673
1674  subst_insn = i3;
1675
1676  /* It is possible that the source of I2 or I1 may be performing an
1677     unneeded operation, such as a ZERO_EXTEND of something that is known
1678     to have the high part zero.  Handle that case by letting subst look at
1679     the innermost one of them.
1680
1681     Another way to do this would be to have a function that tries to
1682     simplify a single insn instead of merging two or more insns.  We don't
1683     do this because of the potential of infinite loops and because
1684     of the potential extra memory required.  However, doing it the way
1685     we are is a bit of a kludge and doesn't catch all cases.
1686
1687     But only do this if -fexpensive-optimizations since it slows things down
1688     and doesn't usually win.  */
1689
1690  if (flag_expensive_optimizations)
1691    {
1692      /* Pass pc_rtx so no substitutions are done, just simplifications.
1693	 The cases that we are interested in here do not involve the few
1694	 cases were is_replaced is checked.  */
1695      if (i1)
1696	{
1697	  subst_low_cuid = INSN_CUID (i1);
1698	  i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1699	}
1700      else
1701	{
1702	  subst_low_cuid = INSN_CUID (i2);
1703	  i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1704	}
1705
1706      undobuf.previous_undos = undobuf.undos;
1707    }
1708
1709#ifndef HAVE_cc0
1710  /* Many machines that don't use CC0 have insns that can both perform an
1711     arithmetic operation and set the condition code.  These operations will
1712     be represented as a PARALLEL with the first element of the vector
1713     being a COMPARE of an arithmetic operation with the constant zero.
1714     The second element of the vector will set some pseudo to the result
1715     of the same arithmetic operation.  If we simplify the COMPARE, we won't
1716     match such a pattern and so will generate an extra insn.   Here we test
1717     for this case, where both the comparison and the operation result are
1718     needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1719     I2SRC.  Later we will make the PARALLEL that contains I2.  */
1720
1721  if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1722      && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1723      && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1724      && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1725    {
1726#ifdef EXTRA_CC_MODES
1727      rtx *cc_use;
1728      enum machine_mode compare_mode;
1729#endif
1730
1731      newpat = PATTERN (i3);
1732      SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1733
1734      i2_is_used = 1;
1735
1736#ifdef EXTRA_CC_MODES
1737      /* See if a COMPARE with the operand we substituted in should be done
1738	 with the mode that is currently being used.  If not, do the same
1739	 processing we do in `subst' for a SET; namely, if the destination
1740	 is used only once, try to replace it with a register of the proper
1741	 mode and also replace the COMPARE.  */
1742      if (undobuf.other_insn == 0
1743	  && (cc_use = find_single_use (SET_DEST (newpat), i3,
1744					&undobuf.other_insn))
1745	  && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1746					      i2src, const0_rtx))
1747	      != GET_MODE (SET_DEST (newpat))))
1748	{
1749	  int regno = REGNO (SET_DEST (newpat));
1750	  rtx new_dest = gen_rtx_REG (compare_mode, regno);
1751
1752	  if (regno < FIRST_PSEUDO_REGISTER
1753	      || (REG_N_SETS (regno) == 1 && ! added_sets_2
1754		  && ! REG_USERVAR_P (SET_DEST (newpat))))
1755	    {
1756	      if (regno >= FIRST_PSEUDO_REGISTER)
1757		SUBST (regno_reg_rtx[regno], new_dest);
1758
1759	      SUBST (SET_DEST (newpat), new_dest);
1760	      SUBST (XEXP (*cc_use, 0), new_dest);
1761	      SUBST (SET_SRC (newpat),
1762		     gen_rtx_combine (COMPARE, compare_mode,
1763				      i2src, const0_rtx));
1764	    }
1765	  else
1766	    undobuf.other_insn = 0;
1767	}
1768#endif
1769    }
1770  else
1771#endif
1772    {
1773      n_occurrences = 0;		/* `subst' counts here */
1774
1775      /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1776	 need to make a unique copy of I2SRC each time we substitute it
1777	 to avoid self-referential rtl.  */
1778
1779      subst_low_cuid = INSN_CUID (i2);
1780      newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1781		      ! i1_feeds_i3 && i1dest_in_i1src);
1782      undobuf.previous_undos = undobuf.undos;
1783
1784      /* Record whether i2's body now appears within i3's body.  */
1785      i2_is_used = n_occurrences;
1786    }
1787
1788  /* If we already got a failure, don't try to do more.  Otherwise,
1789     try to substitute in I1 if we have it.  */
1790
1791  if (i1 && GET_CODE (newpat) != CLOBBER)
1792    {
1793      /* Before we can do this substitution, we must redo the test done
1794	 above (see detailed comments there) that ensures  that I1DEST
1795	 isn't mentioned in any SETs in NEWPAT that are field assignments.  */
1796
1797      if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1798			      0, NULL_PTR))
1799	{
1800	  undo_all ();
1801	  return 0;
1802	}
1803
1804      n_occurrences = 0;
1805      subst_low_cuid = INSN_CUID (i1);
1806      newpat = subst (newpat, i1dest, i1src, 0, 0);
1807      undobuf.previous_undos = undobuf.undos;
1808    }
1809
1810  /* Fail if an autoincrement side-effect has been duplicated.  Be careful
1811     to count all the ways that I2SRC and I1SRC can be used.  */
1812  if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1813       && i2_is_used + added_sets_2 > 1)
1814      || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1815	  && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1816	      > 1))
1817      /* Fail if we tried to make a new register (we used to abort, but there's
1818	 really no reason to).  */
1819      || max_reg_num () != maxreg
1820      /* Fail if we couldn't do something and have a CLOBBER.  */
1821      || GET_CODE (newpat) == CLOBBER
1822      /* Fail if this new pattern is a MULT and we didn't have one before
1823	 at the outer level.  */
1824      || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1825	  && ! have_mult))
1826    {
1827      undo_all ();
1828      return 0;
1829    }
1830
1831  /* If the actions of the earlier insns must be kept
1832     in addition to substituting them into the latest one,
1833     we must make a new PARALLEL for the latest insn
1834     to hold additional the SETs.  */
1835
1836  if (added_sets_1 || added_sets_2)
1837    {
1838      combine_extras++;
1839
1840      if (GET_CODE (newpat) == PARALLEL)
1841	{
1842	  rtvec old = XVEC (newpat, 0);
1843	  total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1844	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
1845	  bcopy ((char *) &old->elem[0], (char *) XVEC (newpat, 0)->elem,
1846		 sizeof (old->elem[0]) * old->num_elem);
1847	}
1848      else
1849	{
1850	  rtx old = newpat;
1851	  total_sets = 1 + added_sets_1 + added_sets_2;
1852	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
1853	  XVECEXP (newpat, 0, 0) = old;
1854	}
1855
1856     if (added_sets_1)
1857       XVECEXP (newpat, 0, --total_sets)
1858	 = (GET_CODE (PATTERN (i1)) == PARALLEL
1859	    ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
1860
1861     if (added_sets_2)
1862	{
1863	  /* If there is no I1, use I2's body as is.  We used to also not do
1864	     the subst call below if I2 was substituted into I3,
1865	     but that could lose a simplification.  */
1866	  if (i1 == 0)
1867	    XVECEXP (newpat, 0, --total_sets) = i2pat;
1868	  else
1869	    /* See comment where i2pat is assigned.  */
1870	    XVECEXP (newpat, 0, --total_sets)
1871	      = subst (i2pat, i1dest, i1src, 0, 0);
1872	}
1873    }
1874
1875  /* We come here when we are replacing a destination in I2 with the
1876     destination of I3.  */
1877 validate_replacement:
1878
1879  /* Note which hard regs this insn has as inputs.  */
1880  mark_used_regs_combine (newpat);
1881
1882  /* Is the result of combination a valid instruction?  */
1883  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1884
1885  /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1886     the second SET's destination is a register that is unused.  In that case,
1887     we just need the first SET.   This can occur when simplifying a divmod
1888     insn.  We *must* test for this case here because the code below that
1889     splits two independent SETs doesn't handle this case correctly when it
1890     updates the register status.  Also check the case where the first
1891     SET's destination is unused.  That would not cause incorrect code, but
1892     does cause an unneeded insn to remain.  */
1893
1894  if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1895      && XVECLEN (newpat, 0) == 2
1896      && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1897      && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1898      && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1899      && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1900      && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1901      && asm_noperands (newpat) < 0)
1902    {
1903      newpat = XVECEXP (newpat, 0, 0);
1904      insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1905    }
1906
1907  else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1908	   && XVECLEN (newpat, 0) == 2
1909	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1910	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1911	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1912	   && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1913	   && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1914	   && asm_noperands (newpat) < 0)
1915    {
1916      newpat = XVECEXP (newpat, 0, 1);
1917      insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1918    }
1919
1920  /* If we were combining three insns and the result is a simple SET
1921     with no ASM_OPERANDS that wasn't recognized, try to split it into two
1922     insns.  There are two ways to do this.  It can be split using a
1923     machine-specific method (like when you have an addition of a large
1924     constant) or by combine in the function find_split_point.  */
1925
1926  if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1927      && asm_noperands (newpat) < 0)
1928    {
1929      rtx m_split, *split;
1930      rtx ni2dest = i2dest;
1931
1932      /* See if the MD file can split NEWPAT.  If it can't, see if letting it
1933	 use I2DEST as a scratch register will help.  In the latter case,
1934	 convert I2DEST to the mode of the source of NEWPAT if we can.  */
1935
1936      m_split = split_insns (newpat, i3);
1937
1938      /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1939	 inputs of NEWPAT.  */
1940
1941      /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1942	 possible to try that as a scratch reg.  This would require adding
1943	 more code to make it work though.  */
1944
1945      if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
1946	{
1947	  /* If I2DEST is a hard register or the only use of a pseudo,
1948	     we can change its mode.  */
1949	  if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1950	      && GET_MODE (SET_DEST (newpat)) != VOIDmode
1951	      && GET_CODE (i2dest) == REG
1952	      && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1953		  || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
1954		      && ! REG_USERVAR_P (i2dest))))
1955	    ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
1956			       REGNO (i2dest));
1957
1958	  m_split = split_insns
1959	    (gen_rtx_PARALLEL (VOIDmode,
1960			       gen_rtvec (2, newpat,
1961					  gen_rtx_CLOBBER (VOIDmode,
1962							   ni2dest))),
1963	     i3);
1964	}
1965
1966      if (m_split && GET_CODE (m_split) == SEQUENCE
1967	  && XVECLEN (m_split, 0) == 2
1968	  && (next_real_insn (i2) == i3
1969	      || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1970				      INSN_CUID (i2))))
1971	{
1972	  rtx i2set, i3set;
1973	  rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
1974	  newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1975
1976	  i3set = single_set (XVECEXP (m_split, 0, 1));
1977	  i2set = single_set (XVECEXP (m_split, 0, 0));
1978
1979	  /* In case we changed the mode of I2DEST, replace it in the
1980	     pseudo-register table here.  We can't do it above in case this
1981	     code doesn't get executed and we do a split the other way.  */
1982
1983	  if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1984	    SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1985
1986	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1987
1988	  /* If I2 or I3 has multiple SETs, we won't know how to track
1989	     register status, so don't use these insns.  If I2's destination
1990	     is used between I2 and I3, we also can't use these insns.  */
1991
1992	  if (i2_code_number >= 0 && i2set && i3set
1993	      && (next_real_insn (i2) == i3
1994		  || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
1995	    insn_code_number = recog_for_combine (&newi3pat, i3,
1996						  &new_i3_notes);
1997	  if (insn_code_number >= 0)
1998	    newpat = newi3pat;
1999
2000	  /* It is possible that both insns now set the destination of I3.
2001	     If so, we must show an extra use of it.  */
2002
2003	  if (insn_code_number >= 0)
2004	    {
2005	      rtx new_i3_dest = SET_DEST (i3set);
2006	      rtx new_i2_dest = SET_DEST (i2set);
2007
2008	      while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2009		     || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2010		     || GET_CODE (new_i3_dest) == SUBREG)
2011		new_i3_dest = XEXP (new_i3_dest, 0);
2012
2013	      while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2014		     || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2015		     || GET_CODE (new_i2_dest) == SUBREG)
2016		new_i2_dest = XEXP (new_i2_dest, 0);
2017
2018	      if (GET_CODE (new_i3_dest) == REG
2019		  && GET_CODE (new_i2_dest) == REG
2020		  && REGNO (new_i3_dest) == REGNO (new_i2_dest))
2021		REG_N_SETS (REGNO (new_i2_dest))++;
2022	    }
2023	}
2024
2025      /* If we can split it and use I2DEST, go ahead and see if that
2026	 helps things be recognized.  Verify that none of the registers
2027	 are set between I2 and I3.  */
2028      if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
2029#ifdef HAVE_cc0
2030	  && GET_CODE (i2dest) == REG
2031#endif
2032	  /* We need I2DEST in the proper mode.  If it is a hard register
2033	     or the only use of a pseudo, we can change its mode.  */
2034	  && (GET_MODE (*split) == GET_MODE (i2dest)
2035	      || GET_MODE (*split) == VOIDmode
2036	      || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2037	      || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2038		  && ! REG_USERVAR_P (i2dest)))
2039	  && (next_real_insn (i2) == i3
2040	      || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2041	  /* We can't overwrite I2DEST if its value is still used by
2042	     NEWPAT.  */
2043	  && ! reg_referenced_p (i2dest, newpat))
2044	{
2045	  rtx newdest = i2dest;
2046	  enum rtx_code split_code = GET_CODE (*split);
2047	  enum machine_mode split_mode = GET_MODE (*split);
2048
2049	  /* Get NEWDEST as a register in the proper mode.  We have already
2050	     validated that we can do this.  */
2051	  if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2052	    {
2053	      newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
2054
2055	      if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2056		SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2057	    }
2058
2059	  /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2060	     an ASHIFT.  This can occur if it was inside a PLUS and hence
2061	     appeared to be a memory address.  This is a kludge.  */
2062	  if (split_code == MULT
2063	      && GET_CODE (XEXP (*split, 1)) == CONST_INT
2064	      && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2065	    {
2066	      SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
2067					      XEXP (*split, 0), GEN_INT (i)));
2068	      /* Update split_code because we may not have a multiply
2069		 anymore.  */
2070	      split_code = GET_CODE (*split);
2071	    }
2072
2073#ifdef INSN_SCHEDULING
2074	  /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2075	     be written as a ZERO_EXTEND.  */
2076	  if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2077	    SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
2078					    XEXP (*split, 0)));
2079#endif
2080
2081	  newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
2082	  SUBST (*split, newdest);
2083	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2084
2085	  /* If the split point was a MULT and we didn't have one before,
2086	     don't use one now.  */
2087	  if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2088	    insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2089	}
2090    }
2091
2092  /* Check for a case where we loaded from memory in a narrow mode and
2093     then sign extended it, but we need both registers.  In that case,
2094     we have a PARALLEL with both loads from the same memory location.
2095     We can split this into a load from memory followed by a register-register
2096     copy.  This saves at least one insn, more if register allocation can
2097     eliminate the copy.
2098
2099     We cannot do this if the destination of the second assignment is
2100     a register that we have already assumed is zero-extended.  Similarly
2101     for a SUBREG of such a register.  */
2102
2103  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2104	   && GET_CODE (newpat) == PARALLEL
2105	   && XVECLEN (newpat, 0) == 2
2106	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2107	   && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2108	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2109	   && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2110			   XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2111	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2112				   INSN_CUID (i2))
2113	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2114	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2115	   && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2116		 (GET_CODE (temp) == REG
2117		  && reg_nonzero_bits[REGNO (temp)] != 0
2118		  && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2119		  && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2120		  && (reg_nonzero_bits[REGNO (temp)]
2121		      != GET_MODE_MASK (word_mode))))
2122	   && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2123		 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2124		     (GET_CODE (temp) == REG
2125		      && reg_nonzero_bits[REGNO (temp)] != 0
2126		      && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2127		      && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2128		      && (reg_nonzero_bits[REGNO (temp)]
2129			  != GET_MODE_MASK (word_mode)))))
2130	   && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2131					 SET_SRC (XVECEXP (newpat, 0, 1)))
2132	   && ! find_reg_note (i3, REG_UNUSED,
2133			       SET_DEST (XVECEXP (newpat, 0, 0))))
2134    {
2135      rtx ni2dest;
2136
2137      newi2pat = XVECEXP (newpat, 0, 0);
2138      ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2139      newpat = XVECEXP (newpat, 0, 1);
2140      SUBST (SET_SRC (newpat),
2141	     gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
2142      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2143
2144      if (i2_code_number >= 0)
2145	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2146
2147      if (insn_code_number >= 0)
2148	{
2149	  rtx insn;
2150	  rtx link;
2151
2152	  /* If we will be able to accept this, we have made a change to the
2153	     destination of I3.  This can invalidate a LOG_LINKS pointing
2154	     to I3.  No other part of combine.c makes such a transformation.
2155
2156	     The new I3 will have a destination that was previously the
2157	     destination of I1 or I2 and which was used in i2 or I3.  Call
2158	     distribute_links to make a LOG_LINK from the next use of
2159	     that destination.  */
2160
2161	  PATTERN (i3) = newpat;
2162	  distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
2163
2164	  /* I3 now uses what used to be its destination and which is
2165	     now I2's destination.  That means we need a LOG_LINK from
2166	     I3 to I2.  But we used to have one, so we still will.
2167
2168	     However, some later insn might be using I2's dest and have
2169	     a LOG_LINK pointing at I3.  We must remove this link.
2170	     The simplest way to remove the link is to point it at I1,
2171	     which we know will be a NOTE.  */
2172
2173	  for (insn = NEXT_INSN (i3);
2174	       insn && (this_basic_block == n_basic_blocks - 1
2175			|| insn != BLOCK_HEAD (this_basic_block + 1));
2176	       insn = NEXT_INSN (insn))
2177	    {
2178	      if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
2179		  && reg_referenced_p (ni2dest, PATTERN (insn)))
2180		{
2181		  for (link = LOG_LINKS (insn); link;
2182		       link = XEXP (link, 1))
2183		    if (XEXP (link, 0) == i3)
2184		      XEXP (link, 0) = i1;
2185
2186		  break;
2187		}
2188	    }
2189	}
2190    }
2191
2192  /* Similarly, check for a case where we have a PARALLEL of two independent
2193     SETs but we started with three insns.  In this case, we can do the sets
2194     as two separate insns.  This case occurs when some SET allows two
2195     other insns to combine, but the destination of that SET is still live.  */
2196
2197  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2198	   && GET_CODE (newpat) == PARALLEL
2199	   && XVECLEN (newpat, 0) == 2
2200	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2201	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2202	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2203	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2204	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2205	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2206	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2207				   INSN_CUID (i2))
2208	   /* Don't pass sets with (USE (MEM ...)) dests to the following.  */
2209	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2210	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2211	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2212				  XVECEXP (newpat, 0, 0))
2213	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2214				  XVECEXP (newpat, 0, 1)))
2215    {
2216      /* Normally, it doesn't matter which of the two is done first,
2217	 but it does if one references cc0.  In that case, it has to
2218	 be first.  */
2219#ifdef HAVE_cc0
2220      if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2221	{
2222	  newi2pat = XVECEXP (newpat, 0, 0);
2223	  newpat = XVECEXP (newpat, 0, 1);
2224	}
2225      else
2226#endif
2227	{
2228	  newi2pat = XVECEXP (newpat, 0, 1);
2229	  newpat = XVECEXP (newpat, 0, 0);
2230	}
2231
2232      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2233
2234      if (i2_code_number >= 0)
2235	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2236    }
2237
2238  /* If it still isn't recognized, fail and change things back the way they
2239     were.  */
2240  if ((insn_code_number < 0
2241       /* Is the result a reasonable ASM_OPERANDS?  */
2242       && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2243    {
2244      undo_all ();
2245      return 0;
2246    }
2247
2248  /* If we had to change another insn, make sure it is valid also.  */
2249  if (undobuf.other_insn)
2250    {
2251      rtx other_pat = PATTERN (undobuf.other_insn);
2252      rtx new_other_notes;
2253      rtx note, next;
2254
2255      CLEAR_HARD_REG_SET (newpat_used_regs);
2256
2257      other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2258					     &new_other_notes);
2259
2260      if (other_code_number < 0 && ! check_asm_operands (other_pat))
2261	{
2262	  undo_all ();
2263	  return 0;
2264	}
2265
2266      PATTERN (undobuf.other_insn) = other_pat;
2267
2268      /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2269	 are still valid.  Then add any non-duplicate notes added by
2270	 recog_for_combine.  */
2271      for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2272	{
2273	  next = XEXP (note, 1);
2274
2275	  if (REG_NOTE_KIND (note) == REG_UNUSED
2276	      && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2277	    {
2278	      if (GET_CODE (XEXP (note, 0)) == REG)
2279		REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2280
2281	      remove_note (undobuf.other_insn, note);
2282	    }
2283	}
2284
2285      for (note = new_other_notes; note; note = XEXP (note, 1))
2286	if (GET_CODE (XEXP (note, 0)) == REG)
2287	  REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
2288
2289      distribute_notes (new_other_notes, undobuf.other_insn,
2290			undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2291    }
2292
2293  /* We now know that we can do this combination.  Merge the insns and
2294     update the status of registers and LOG_LINKS.  */
2295
2296  {
2297    rtx i3notes, i2notes, i1notes = 0;
2298    rtx i3links, i2links, i1links = 0;
2299    rtx midnotes = 0;
2300    register int regno;
2301    /* Compute which registers we expect to eliminate.  newi2pat may be setting
2302       either i3dest or i2dest, so we must check it.  Also, i1dest may be the
2303       same as i3dest, in which case newi2pat may be setting i1dest.  */
2304    rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2305		   || i2dest_in_i2src || i2dest_in_i1src
2306		   ? 0 : i2dest);
2307    rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2308		   || (newi2pat && reg_set_p (i1dest, newi2pat))
2309		   ? 0 : i1dest);
2310
2311    /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2312       clear them.  */
2313    i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2314    i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2315    if (i1)
2316      i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2317
2318    /* Ensure that we do not have something that should not be shared but
2319       occurs multiple times in the new insns.  Check this by first
2320       resetting all the `used' flags and then copying anything is shared.  */
2321
2322    reset_used_flags (i3notes);
2323    reset_used_flags (i2notes);
2324    reset_used_flags (i1notes);
2325    reset_used_flags (newpat);
2326    reset_used_flags (newi2pat);
2327    if (undobuf.other_insn)
2328      reset_used_flags (PATTERN (undobuf.other_insn));
2329
2330    i3notes = copy_rtx_if_shared (i3notes);
2331    i2notes = copy_rtx_if_shared (i2notes);
2332    i1notes = copy_rtx_if_shared (i1notes);
2333    newpat = copy_rtx_if_shared (newpat);
2334    newi2pat = copy_rtx_if_shared (newi2pat);
2335    if (undobuf.other_insn)
2336      reset_used_flags (PATTERN (undobuf.other_insn));
2337
2338    INSN_CODE (i3) = insn_code_number;
2339    PATTERN (i3) = newpat;
2340    if (undobuf.other_insn)
2341      INSN_CODE (undobuf.other_insn) = other_code_number;
2342
2343    /* We had one special case above where I2 had more than one set and
2344       we replaced a destination of one of those sets with the destination
2345       of I3.  In that case, we have to update LOG_LINKS of insns later
2346       in this basic block.  Note that this (expensive) case is rare.
2347
2348       Also, in this case, we must pretend that all REG_NOTEs for I2
2349       actually came from I3, so that REG_UNUSED notes from I2 will be
2350       properly handled.  */
2351
2352    if (i3_subst_into_i2)
2353      {
2354	for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2355	  if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2356	      && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2357	      && ! find_reg_note (i2, REG_UNUSED,
2358				  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2359	    for (temp = NEXT_INSN (i2);
2360		 temp && (this_basic_block == n_basic_blocks - 1
2361			  || BLOCK_HEAD (this_basic_block) != temp);
2362		 temp = NEXT_INSN (temp))
2363	      if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2364		for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2365		  if (XEXP (link, 0) == i2)
2366		    XEXP (link, 0) = i3;
2367
2368	if (i3notes)
2369	  {
2370	    rtx link = i3notes;
2371	    while (XEXP (link, 1))
2372	      link = XEXP (link, 1);
2373	    XEXP (link, 1) = i2notes;
2374	  }
2375	else
2376	  i3notes = i2notes;
2377	i2notes = 0;
2378      }
2379
2380    LOG_LINKS (i3) = 0;
2381    REG_NOTES (i3) = 0;
2382    LOG_LINKS (i2) = 0;
2383    REG_NOTES (i2) = 0;
2384
2385    if (newi2pat)
2386      {
2387	INSN_CODE (i2) = i2_code_number;
2388	PATTERN (i2) = newi2pat;
2389      }
2390    else
2391      {
2392	PUT_CODE (i2, NOTE);
2393	NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2394	NOTE_SOURCE_FILE (i2) = 0;
2395      }
2396
2397    if (i1)
2398      {
2399	LOG_LINKS (i1) = 0;
2400	REG_NOTES (i1) = 0;
2401	PUT_CODE (i1, NOTE);
2402	NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2403	NOTE_SOURCE_FILE (i1) = 0;
2404      }
2405
2406    /* Get death notes for everything that is now used in either I3 or
2407       I2 and used to die in a previous insn.  If we built two new
2408       patterns, move from I1 to I2 then I2 to I3 so that we get the
2409       proper movement on registers that I2 modifies.  */
2410
2411    if (newi2pat)
2412      {
2413	move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2414	move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2415      }
2416    else
2417      move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2418		   i3, &midnotes);
2419
2420    /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3.  */
2421    if (i3notes)
2422      distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2423			elim_i2, elim_i1);
2424    if (i2notes)
2425      distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2426			elim_i2, elim_i1);
2427    if (i1notes)
2428      distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2429			elim_i2, elim_i1);
2430    if (midnotes)
2431      distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2432			elim_i2, elim_i1);
2433
2434    /* Distribute any notes added to I2 or I3 by recog_for_combine.  We
2435       know these are REG_UNUSED and want them to go to the desired insn,
2436       so we always pass it as i3.  We have not counted the notes in
2437       reg_n_deaths yet, so we need to do so now.  */
2438
2439    if (newi2pat && new_i2_notes)
2440      {
2441	for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2442	  if (GET_CODE (XEXP (temp, 0)) == REG)
2443	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2444
2445	distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2446      }
2447
2448    if (new_i3_notes)
2449      {
2450	for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2451	  if (GET_CODE (XEXP (temp, 0)) == REG)
2452	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2453
2454	distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2455      }
2456
2457    /* If I3DEST was used in I3SRC, it really died in I3.  We may need to
2458       put a REG_DEAD note for it somewhere.  If NEWI2PAT exists and sets
2459       I3DEST, the death must be somewhere before I2, not I3.  If we passed I3
2460       in that case, it might delete I2.  Similarly for I2 and I1.
2461       Show an additional death due to the REG_DEAD note we make here.  If
2462       we discard it in distribute_notes, we will decrement it again.  */
2463
2464    if (i3dest_killed)
2465      {
2466	if (GET_CODE (i3dest_killed) == REG)
2467	  REG_N_DEATHS (REGNO (i3dest_killed))++;
2468
2469	if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
2470	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2471					       NULL_RTX),
2472			    NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
2473	else
2474	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2475					       NULL_RTX),
2476			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2477			    elim_i2, elim_i1);
2478      }
2479
2480    if (i2dest_in_i2src)
2481      {
2482	if (GET_CODE (i2dest) == REG)
2483	  REG_N_DEATHS (REGNO (i2dest))++;
2484
2485	if (newi2pat && reg_set_p (i2dest, newi2pat))
2486	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2487			    NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2488	else
2489	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2490			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2491			    NULL_RTX, NULL_RTX);
2492      }
2493
2494    if (i1dest_in_i1src)
2495      {
2496	if (GET_CODE (i1dest) == REG)
2497	  REG_N_DEATHS (REGNO (i1dest))++;
2498
2499	if (newi2pat && reg_set_p (i1dest, newi2pat))
2500	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2501			    NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2502	else
2503	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2504			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2505			    NULL_RTX, NULL_RTX);
2506      }
2507
2508    distribute_links (i3links);
2509    distribute_links (i2links);
2510    distribute_links (i1links);
2511
2512    if (GET_CODE (i2dest) == REG)
2513      {
2514	rtx link;
2515	rtx i2_insn = 0, i2_val = 0, set;
2516
2517	/* The insn that used to set this register doesn't exist, and
2518	   this life of the register may not exist either.  See if one of
2519	   I3's links points to an insn that sets I2DEST.  If it does,
2520	   that is now the last known value for I2DEST. If we don't update
2521	   this and I2 set the register to a value that depended on its old
2522	   contents, we will get confused.  If this insn is used, thing
2523	   will be set correctly in combine_instructions.  */
2524
2525	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2526	  if ((set = single_set (XEXP (link, 0))) != 0
2527	      && rtx_equal_p (i2dest, SET_DEST (set)))
2528	    i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2529
2530	record_value_for_reg (i2dest, i2_insn, i2_val);
2531
2532	/* If the reg formerly set in I2 died only once and that was in I3,
2533	   zero its use count so it won't make `reload' do any work.  */
2534	if (! added_sets_2
2535	    && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2536	    && ! i2dest_in_i2src)
2537	  {
2538	    regno = REGNO (i2dest);
2539	    REG_N_SETS (regno)--;
2540	    if (REG_N_SETS (regno) == 0
2541		&& ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
2542				      regno))
2543	      REG_N_REFS (regno) = 0;
2544	  }
2545      }
2546
2547    if (i1 && GET_CODE (i1dest) == REG)
2548      {
2549	rtx link;
2550	rtx i1_insn = 0, i1_val = 0, set;
2551
2552	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2553	  if ((set = single_set (XEXP (link, 0))) != 0
2554	      && rtx_equal_p (i1dest, SET_DEST (set)))
2555	    i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2556
2557	record_value_for_reg (i1dest, i1_insn, i1_val);
2558
2559	regno = REGNO (i1dest);
2560	if (! added_sets_1 && ! i1dest_in_i1src)
2561	  {
2562	    REG_N_SETS (regno)--;
2563	    if (REG_N_SETS (regno) == 0
2564		&& ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
2565				      regno))
2566	      REG_N_REFS (regno) = 0;
2567	  }
2568      }
2569
2570    /* Update reg_nonzero_bits et al for any changes that may have been made
2571       to this insn.  */
2572
2573    note_stores (newpat, set_nonzero_bits_and_sign_copies);
2574    if (newi2pat)
2575      note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
2576
2577    /* If I3 is now an unconditional jump, ensure that it has a
2578       BARRIER following it since it may have initially been a
2579       conditional jump.  It may also be the last nonnote insn.  */
2580
2581    if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2582	&& ((temp = next_nonnote_insn (i3)) == NULL_RTX
2583	    || GET_CODE (temp) != BARRIER))
2584      emit_barrier_after (i3);
2585  }
2586
2587  combine_successes++;
2588
2589  /* Clear this here, so that subsequent get_last_value calls are not
2590     affected.  */
2591  subst_prev_insn = NULL_RTX;
2592
2593  if (added_links_insn
2594      && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2595      && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2596    return added_links_insn;
2597  else
2598    return newi2pat ? i2 : i3;
2599}
2600
2601/* Undo all the modifications recorded in undobuf.  */
2602
2603static void
2604undo_all ()
2605{
2606  struct undo *undo, *next;
2607
2608  for (undo = undobuf.undos; undo; undo = next)
2609    {
2610      next = undo->next;
2611      if (undo->is_int)
2612	*undo->where.i = undo->old_contents.i;
2613      else
2614	*undo->where.r = undo->old_contents.r;
2615
2616      undo->next = undobuf.frees;
2617      undobuf.frees = undo;
2618    }
2619
2620  obfree (undobuf.storage);
2621  undobuf.undos = undobuf.previous_undos = 0;
2622
2623  /* Clear this here, so that subsequent get_last_value calls are not
2624     affected.  */
2625  subst_prev_insn = NULL_RTX;
2626}
2627
2628/* Find the innermost point within the rtx at LOC, possibly LOC itself,
2629   where we have an arithmetic expression and return that point.  LOC will
2630   be inside INSN.
2631
2632   try_combine will call this function to see if an insn can be split into
2633   two insns.  */
2634
2635static rtx *
2636find_split_point (loc, insn)
2637     rtx *loc;
2638     rtx insn;
2639{
2640  rtx x = *loc;
2641  enum rtx_code code = GET_CODE (x);
2642  rtx *split;
2643  int len = 0, pos, unsignedp;
2644  rtx inner;
2645
2646  /* First special-case some codes.  */
2647  switch (code)
2648    {
2649    case SUBREG:
2650#ifdef INSN_SCHEDULING
2651      /* If we are making a paradoxical SUBREG invalid, it becomes a split
2652	 point.  */
2653      if (GET_CODE (SUBREG_REG (x)) == MEM)
2654	return loc;
2655#endif
2656      return find_split_point (&SUBREG_REG (x), insn);
2657
2658    case MEM:
2659#ifdef HAVE_lo_sum
2660      /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2661	 using LO_SUM and HIGH.  */
2662      if (GET_CODE (XEXP (x, 0)) == CONST
2663	  || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2664	{
2665	  SUBST (XEXP (x, 0),
2666		 gen_rtx_combine (LO_SUM, Pmode,
2667				  gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2668				  XEXP (x, 0)));
2669	  return &XEXP (XEXP (x, 0), 0);
2670	}
2671#endif
2672
2673      /* If we have a PLUS whose second operand is a constant and the
2674	 address is not valid, perhaps will can split it up using
2675	 the machine-specific way to split large constants.  We use
2676	 the first pseudo-reg (one of the virtual regs) as a placeholder;
2677	 it will not remain in the result.  */
2678      if (GET_CODE (XEXP (x, 0)) == PLUS
2679	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2680	  && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2681	{
2682	  rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2683	  rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
2684				 subst_insn);
2685
2686	  /* This should have produced two insns, each of which sets our
2687	     placeholder.  If the source of the second is a valid address,
2688	     we can make put both sources together and make a split point
2689	     in the middle.  */
2690
2691	  if (seq && XVECLEN (seq, 0) == 2
2692	      && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2693	      && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2694	      && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2695	      && ! reg_mentioned_p (reg,
2696				    SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2697	      && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2698	      && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2699	      && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2700	      && memory_address_p (GET_MODE (x),
2701				   SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2702	    {
2703	      rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2704	      rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2705
2706	      /* Replace the placeholder in SRC2 with SRC1.  If we can
2707		 find where in SRC2 it was placed, that can become our
2708		 split point and we can replace this address with SRC2.
2709		 Just try two obvious places.  */
2710
2711	      src2 = replace_rtx (src2, reg, src1);
2712	      split = 0;
2713	      if (XEXP (src2, 0) == src1)
2714		split = &XEXP (src2, 0);
2715	      else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2716		       && XEXP (XEXP (src2, 0), 0) == src1)
2717		split = &XEXP (XEXP (src2, 0), 0);
2718
2719	      if (split)
2720		{
2721		  SUBST (XEXP (x, 0), src2);
2722		  return split;
2723		}
2724	    }
2725
2726	  /* If that didn't work, perhaps the first operand is complex and
2727	     needs to be computed separately, so make a split point there.
2728	     This will occur on machines that just support REG + CONST
2729	     and have a constant moved through some previous computation.  */
2730
2731	  else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2732		   && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2733			 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2734			     == 'o')))
2735	    return &XEXP (XEXP (x, 0), 0);
2736	}
2737      break;
2738
2739    case SET:
2740#ifdef HAVE_cc0
2741      /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2742	 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2743	 we need to put the operand into a register.  So split at that
2744	 point.  */
2745
2746      if (SET_DEST (x) == cc0_rtx
2747	  && GET_CODE (SET_SRC (x)) != COMPARE
2748	  && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2749	  && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2750	  && ! (GET_CODE (SET_SRC (x)) == SUBREG
2751		&& GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2752	return &SET_SRC (x);
2753#endif
2754
2755      /* See if we can split SET_SRC as it stands.  */
2756      split = find_split_point (&SET_SRC (x), insn);
2757      if (split && split != &SET_SRC (x))
2758	return split;
2759
2760      /* See if we can split SET_DEST as it stands.  */
2761      split = find_split_point (&SET_DEST (x), insn);
2762      if (split && split != &SET_DEST (x))
2763	return split;
2764
2765      /* See if this is a bitfield assignment with everything constant.  If
2766	 so, this is an IOR of an AND, so split it into that.  */
2767      if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2768	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2769	      <= HOST_BITS_PER_WIDE_INT)
2770	  && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2771	  && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2772	  && GET_CODE (SET_SRC (x)) == CONST_INT
2773	  && ((INTVAL (XEXP (SET_DEST (x), 1))
2774	      + INTVAL (XEXP (SET_DEST (x), 2)))
2775	      <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2776	  && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2777	{
2778	  int pos = INTVAL (XEXP (SET_DEST (x), 2));
2779	  int len = INTVAL (XEXP (SET_DEST (x), 1));
2780	  int src = INTVAL (SET_SRC (x));
2781	  rtx dest = XEXP (SET_DEST (x), 0);
2782	  enum machine_mode mode = GET_MODE (dest);
2783	  unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2784
2785	  if (BITS_BIG_ENDIAN)
2786	    pos = GET_MODE_BITSIZE (mode) - len - pos;
2787
2788	  if ((unsigned HOST_WIDE_INT) src == mask)
2789	    SUBST (SET_SRC (x),
2790		   gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2791	  else
2792	    SUBST (SET_SRC (x),
2793		   gen_binary (IOR, mode,
2794			       gen_binary (AND, mode, dest,
2795					   GEN_INT (~ (mask << pos)
2796						    & GET_MODE_MASK (mode))),
2797			       GEN_INT (src << pos)));
2798
2799	  SUBST (SET_DEST (x), dest);
2800
2801	  split = find_split_point (&SET_SRC (x), insn);
2802	  if (split && split != &SET_SRC (x))
2803	    return split;
2804	}
2805
2806      /* Otherwise, see if this is an operation that we can split into two.
2807	 If so, try to split that.  */
2808      code = GET_CODE (SET_SRC (x));
2809
2810      switch (code)
2811	{
2812	case AND:
2813	  /* If we are AND'ing with a large constant that is only a single
2814	     bit and the result is only being used in a context where we
2815	     need to know if it is zero or non-zero, replace it with a bit
2816	     extraction.  This will avoid the large constant, which might
2817	     have taken more than one insn to make.  If the constant were
2818	     not a valid argument to the AND but took only one insn to make,
2819	     this is no worse, but if it took more than one insn, it will
2820	     be better.  */
2821
2822	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2823	      && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2824	      && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2825	      && GET_CODE (SET_DEST (x)) == REG
2826	      && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2827	      && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2828	      && XEXP (*split, 0) == SET_DEST (x)
2829	      && XEXP (*split, 1) == const0_rtx)
2830	    {
2831	      rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
2832						XEXP (SET_SRC (x), 0),
2833						pos, NULL_RTX, 1, 1, 0, 0);
2834	      if (extraction != 0)
2835		{
2836		  SUBST (SET_SRC (x), extraction);
2837		  return find_split_point (loc, insn);
2838		}
2839	    }
2840	  break;
2841
2842	case NE:
2843	  /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
2844	     is known to be on, this can be converted into a NEG of a shift. */
2845	  if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
2846	      && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
2847	      && 1 <= (pos = exact_log2
2848		       (nonzero_bits (XEXP (SET_SRC (x), 0),
2849				      GET_MODE (XEXP (SET_SRC (x), 0))))))
2850	    {
2851	      enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
2852
2853	      SUBST (SET_SRC (x),
2854		     gen_rtx_combine (NEG, mode,
2855				      gen_rtx_combine (LSHIFTRT, mode,
2856						       XEXP (SET_SRC (x), 0),
2857						       GEN_INT (pos))));
2858
2859	      split = find_split_point (&SET_SRC (x), insn);
2860	      if (split && split != &SET_SRC (x))
2861		return split;
2862	    }
2863	  break;
2864
2865	case SIGN_EXTEND:
2866	  inner = XEXP (SET_SRC (x), 0);
2867
2868	  /* We can't optimize if either mode is a partial integer
2869	     mode as we don't know how many bits are significant
2870	     in those modes.  */
2871	  if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
2872	      || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
2873	    break;
2874
2875	  pos = 0;
2876	  len = GET_MODE_BITSIZE (GET_MODE (inner));
2877	  unsignedp = 0;
2878	  break;
2879
2880	case SIGN_EXTRACT:
2881	case ZERO_EXTRACT:
2882	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2883	      && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2884	    {
2885	      inner = XEXP (SET_SRC (x), 0);
2886	      len = INTVAL (XEXP (SET_SRC (x), 1));
2887	      pos = INTVAL (XEXP (SET_SRC (x), 2));
2888
2889	      if (BITS_BIG_ENDIAN)
2890		pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2891	      unsignedp = (code == ZERO_EXTRACT);
2892	    }
2893	  break;
2894
2895	default:
2896	  break;
2897	}
2898
2899      if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2900	{
2901	  enum machine_mode mode = GET_MODE (SET_SRC (x));
2902
2903	  /* For unsigned, we have a choice of a shift followed by an
2904	     AND or two shifts.  Use two shifts for field sizes where the
2905	     constant might be too large.  We assume here that we can
2906	     always at least get 8-bit constants in an AND insn, which is
2907	     true for every current RISC.  */
2908
2909	  if (unsignedp && len <= 8)
2910	    {
2911	      SUBST (SET_SRC (x),
2912		     gen_rtx_combine
2913		     (AND, mode,
2914		      gen_rtx_combine (LSHIFTRT, mode,
2915				       gen_lowpart_for_combine (mode, inner),
2916				       GEN_INT (pos)),
2917		      GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2918
2919	      split = find_split_point (&SET_SRC (x), insn);
2920	      if (split && split != &SET_SRC (x))
2921		return split;
2922	    }
2923	  else
2924	    {
2925	      SUBST (SET_SRC (x),
2926		     gen_rtx_combine
2927		     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
2928		      gen_rtx_combine (ASHIFT, mode,
2929				       gen_lowpart_for_combine (mode, inner),
2930				       GEN_INT (GET_MODE_BITSIZE (mode)
2931						- len - pos)),
2932		      GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2933
2934	      split = find_split_point (&SET_SRC (x), insn);
2935	      if (split && split != &SET_SRC (x))
2936		return split;
2937	    }
2938	}
2939
2940      /* See if this is a simple operation with a constant as the second
2941	 operand.  It might be that this constant is out of range and hence
2942	 could be used as a split point.  */
2943      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2944	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2945	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2946	  && CONSTANT_P (XEXP (SET_SRC (x), 1))
2947	  && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2948	      || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2949		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2950		      == 'o'))))
2951	return &XEXP (SET_SRC (x), 1);
2952
2953      /* Finally, see if this is a simple operation with its first operand
2954	 not in a register.  The operation might require this operand in a
2955	 register, so return it as a split point.  We can always do this
2956	 because if the first operand were another operation, we would have
2957	 already found it as a split point.  */
2958      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2959	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2960	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2961	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2962	  && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2963	return &XEXP (SET_SRC (x), 0);
2964
2965      return 0;
2966
2967    case AND:
2968    case IOR:
2969      /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2970	 it is better to write this as (not (ior A B)) so we can split it.
2971	 Similarly for IOR.  */
2972      if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2973	{
2974	  SUBST (*loc,
2975		 gen_rtx_combine (NOT, GET_MODE (x),
2976				  gen_rtx_combine (code == IOR ? AND : IOR,
2977						   GET_MODE (x),
2978						   XEXP (XEXP (x, 0), 0),
2979						   XEXP (XEXP (x, 1), 0))));
2980	  return find_split_point (loc, insn);
2981	}
2982
2983      /* Many RISC machines have a large set of logical insns.  If the
2984	 second operand is a NOT, put it first so we will try to split the
2985	 other operand first.  */
2986      if (GET_CODE (XEXP (x, 1)) == NOT)
2987	{
2988	  rtx tem = XEXP (x, 0);
2989	  SUBST (XEXP (x, 0), XEXP (x, 1));
2990	  SUBST (XEXP (x, 1), tem);
2991	}
2992      break;
2993
2994    default:
2995      break;
2996    }
2997
2998  /* Otherwise, select our actions depending on our rtx class.  */
2999  switch (GET_RTX_CLASS (code))
3000    {
3001    case 'b':			/* This is ZERO_EXTRACT and SIGN_EXTRACT.  */
3002    case '3':
3003      split = find_split_point (&XEXP (x, 2), insn);
3004      if (split)
3005	return split;
3006      /* ... fall through ...  */
3007    case '2':
3008    case 'c':
3009    case '<':
3010      split = find_split_point (&XEXP (x, 1), insn);
3011      if (split)
3012	return split;
3013      /* ... fall through ...  */
3014    case '1':
3015      /* Some machines have (and (shift ...) ...) insns.  If X is not
3016	 an AND, but XEXP (X, 0) is, use it as our split point.  */
3017      if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3018	return &XEXP (x, 0);
3019
3020      split = find_split_point (&XEXP (x, 0), insn);
3021      if (split)
3022	return split;
3023      return loc;
3024    }
3025
3026  /* Otherwise, we don't have a split point.  */
3027  return 0;
3028}
3029
3030/* Throughout X, replace FROM with TO, and return the result.
3031   The result is TO if X is FROM;
3032   otherwise the result is X, but its contents may have been modified.
3033   If they were modified, a record was made in undobuf so that
3034   undo_all will (among other things) return X to its original state.
3035
3036   If the number of changes necessary is too much to record to undo,
3037   the excess changes are not made, so the result is invalid.
3038   The changes already made can still be undone.
3039   undobuf.num_undo is incremented for such changes, so by testing that
3040   the caller can tell whether the result is valid.
3041
3042   `n_occurrences' is incremented each time FROM is replaced.
3043
3044   IN_DEST is non-zero if we are processing the SET_DEST of a SET.
3045
3046   UNIQUE_COPY is non-zero if each substitution must be unique.  We do this
3047   by copying if `n_occurrences' is non-zero.  */
3048
3049static rtx
3050subst (x, from, to, in_dest, unique_copy)
3051     register rtx x, from, to;
3052     int in_dest;
3053     int unique_copy;
3054{
3055  register enum rtx_code code = GET_CODE (x);
3056  enum machine_mode op0_mode = VOIDmode;
3057  register char *fmt;
3058  register int len, i;
3059  rtx new;
3060
3061/* Two expressions are equal if they are identical copies of a shared
3062   RTX or if they are both registers with the same register number
3063   and mode.  */
3064
3065#define COMBINE_RTX_EQUAL_P(X,Y)			\
3066  ((X) == (Y)						\
3067   || (GET_CODE (X) == REG && GET_CODE (Y) == REG	\
3068       && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3069
3070  if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3071    {
3072      n_occurrences++;
3073      return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3074    }
3075
3076  /* If X and FROM are the same register but different modes, they will
3077     not have been seen as equal above.  However, flow.c will make a
3078     LOG_LINKS entry for that case.  If we do nothing, we will try to
3079     rerecognize our original insn and, when it succeeds, we will
3080     delete the feeding insn, which is incorrect.
3081
3082     So force this insn not to match in this (rare) case.  */
3083  if (! in_dest && code == REG && GET_CODE (from) == REG
3084      && REGNO (x) == REGNO (from))
3085    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
3086
3087  /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3088     of which may contain things that can be combined.  */
3089  if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3090    return x;
3091
3092  /* It is possible to have a subexpression appear twice in the insn.
3093     Suppose that FROM is a register that appears within TO.
3094     Then, after that subexpression has been scanned once by `subst',
3095     the second time it is scanned, TO may be found.  If we were
3096     to scan TO here, we would find FROM within it and create a
3097     self-referent rtl structure which is completely wrong.  */
3098  if (COMBINE_RTX_EQUAL_P (x, to))
3099    return to;
3100
3101  /* Parallel asm_operands need special attention because all of the
3102     inputs are shared across the arms.  Furthermore, unsharing the
3103     rtl results in recognition failures.  Failure to handle this case
3104     specially can result in circular rtl.
3105
3106     Solve this by doing a normal pass across the first entry of the
3107     parallel, and only processing the SET_DESTs of the subsequent
3108     entries.  Ug.  */
3109
3110  if (code == PARALLEL
3111      && GET_CODE (XVECEXP (x, 0, 0)) == SET
3112      && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
3113    {
3114      new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3115
3116      /* If this substitution failed, this whole thing fails.  */
3117      if (GET_CODE (new) == CLOBBER
3118	  && XEXP (new, 0) == const0_rtx)
3119	return new;
3120
3121      SUBST (XVECEXP (x, 0, 0), new);
3122
3123      for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
3124	{
3125	  rtx dest = SET_DEST (XVECEXP (x, 0, i));
3126
3127	  if (GET_CODE (dest) != REG
3128	      && GET_CODE (dest) != CC0
3129	      && GET_CODE (dest) != PC)
3130	    {
3131	      new = subst (dest, from, to, 0, unique_copy);
3132
3133	      /* If this substitution failed, this whole thing fails.  */
3134	      if (GET_CODE (new) == CLOBBER
3135		  && XEXP (new, 0) == const0_rtx)
3136		return new;
3137
3138	      SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
3139	    }
3140	}
3141    }
3142  else
3143    {
3144      len = GET_RTX_LENGTH (code);
3145      fmt = GET_RTX_FORMAT (code);
3146
3147      /* We don't need to process a SET_DEST that is a register, CC0,
3148	 or PC, so set up to skip this common case.  All other cases
3149	 where we want to suppress replacing something inside a
3150	 SET_SRC are handled via the IN_DEST operand.  */
3151      if (code == SET
3152	  && (GET_CODE (SET_DEST (x)) == REG
3153	      || GET_CODE (SET_DEST (x)) == CC0
3154	      || GET_CODE (SET_DEST (x)) == PC))
3155	fmt = "ie";
3156
3157      /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3158	 constant.  */
3159      if (fmt[0] == 'e')
3160	op0_mode = GET_MODE (XEXP (x, 0));
3161
3162      for (i = 0; i < len; i++)
3163	{
3164	  if (fmt[i] == 'E')
3165	    {
3166	      register int j;
3167	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3168		{
3169		  if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3170		    {
3171		      new = (unique_copy && n_occurrences
3172			     ? copy_rtx (to) : to);
3173		      n_occurrences++;
3174		    }
3175		  else
3176		    {
3177		      new = subst (XVECEXP (x, i, j), from, to, 0,
3178				   unique_copy);
3179
3180		      /* If this substitution failed, this whole thing
3181			 fails.  */
3182		      if (GET_CODE (new) == CLOBBER
3183			  && XEXP (new, 0) == const0_rtx)
3184			return new;
3185		    }
3186
3187		  SUBST (XVECEXP (x, i, j), new);
3188		}
3189	    }
3190	  else if (fmt[i] == 'e')
3191	    {
3192	      if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3193		{
3194		  /* In general, don't install a subreg involving two
3195		     modes not tieable.  It can worsen register
3196		     allocation, and can even make invalid reload
3197		     insns, since the reg inside may need to be copied
3198		     from in the outside mode, and that may be invalid
3199		     if it is an fp reg copied in integer mode.
3200
3201		     We allow two exceptions to this: It is valid if
3202		     it is inside another SUBREG and the mode of that
3203		     SUBREG and the mode of the inside of TO is
3204		     tieable and it is valid if X is a SET that copies
3205		     FROM to CC0.  */
3206
3207		  if (GET_CODE (to) == SUBREG
3208		      && ! MODES_TIEABLE_P (GET_MODE (to),
3209					    GET_MODE (SUBREG_REG (to)))
3210		      && ! (code == SUBREG
3211			    && MODES_TIEABLE_P (GET_MODE (x),
3212						GET_MODE (SUBREG_REG (to))))
3213#ifdef HAVE_cc0
3214		      && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3215#endif
3216		      )
3217		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3218
3219		  new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3220		  n_occurrences++;
3221		}
3222	      else
3223		/* If we are in a SET_DEST, suppress most cases unless we
3224		   have gone inside a MEM, in which case we want to
3225		   simplify the address.  We assume here that things that
3226		   are actually part of the destination have their inner
3227		   parts in the first expression.  This is true for SUBREG,
3228		   STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3229		   things aside from REG and MEM that should appear in a
3230		   SET_DEST.  */
3231		new = subst (XEXP (x, i), from, to,
3232			     (((in_dest
3233				&& (code == SUBREG || code == STRICT_LOW_PART
3234				    || code == ZERO_EXTRACT))
3235			       || code == SET)
3236			      && i == 0), unique_copy);
3237
3238	      /* If we found that we will have to reject this combination,
3239		 indicate that by returning the CLOBBER ourselves, rather than
3240		 an expression containing it.  This will speed things up as
3241		 well as prevent accidents where two CLOBBERs are considered
3242		 to be equal, thus producing an incorrect simplification.  */
3243
3244	      if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3245		return new;
3246
3247	      SUBST (XEXP (x, i), new);
3248	    }
3249	}
3250    }
3251
3252  /* Try to simplify X.  If the simplification changed the code, it is likely
3253     that further simplification will help, so loop, but limit the number
3254     of repetitions that will be performed.  */
3255
3256  for (i = 0; i < 4; i++)
3257    {
3258      /* If X is sufficiently simple, don't bother trying to do anything
3259	 with it.  */
3260      if (code != CONST_INT && code != REG && code != CLOBBER)
3261	x = simplify_rtx (x, op0_mode, i == 3, in_dest);
3262
3263      if (GET_CODE (x) == code)
3264	break;
3265
3266      code = GET_CODE (x);
3267
3268      /* We no longer know the original mode of operand 0 since we
3269	 have changed the form of X)  */
3270      op0_mode = VOIDmode;
3271    }
3272
3273  return x;
3274}
3275
3276/* Simplify X, a piece of RTL.  We just operate on the expression at the
3277   outer level; call `subst' to simplify recursively.  Return the new
3278   expression.
3279
3280   OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3281   will be the iteration even if an expression with a code different from
3282   X is returned; IN_DEST is nonzero if we are inside a SET_DEST.  */
3283
3284static rtx
3285simplify_rtx (x, op0_mode, last, in_dest)
3286     rtx x;
3287     enum machine_mode op0_mode;
3288     int last;
3289     int in_dest;
3290{
3291  enum rtx_code code = GET_CODE (x);
3292  enum machine_mode mode = GET_MODE (x);
3293  rtx temp;
3294  int i;
3295
3296  /* If this is a commutative operation, put a constant last and a complex
3297     expression first.  We don't need to do this for comparisons here.  */
3298  if (GET_RTX_CLASS (code) == 'c'
3299      && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3300	  || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
3301	      && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
3302	  || (GET_CODE (XEXP (x, 0)) == SUBREG
3303	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
3304	      && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
3305    {
3306      temp = XEXP (x, 0);
3307      SUBST (XEXP (x, 0), XEXP (x, 1));
3308      SUBST (XEXP (x, 1), temp);
3309    }
3310
3311  /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3312     sign extension of a PLUS with a constant, reverse the order of the sign
3313     extension and the addition. Note that this not the same as the original
3314     code, but overflow is undefined for signed values.  Also note that the
3315     PLUS will have been partially moved "inside" the sign-extension, so that
3316     the first operand of X will really look like:
3317         (ashiftrt (plus (ashift A C4) C5) C4).
3318     We convert this to
3319         (plus (ashiftrt (ashift A C4) C2) C4)
3320     and replace the first operand of X with that expression.  Later parts
3321     of this function may simplify the expression further.
3322
3323     For example, if we start with (mult (sign_extend (plus A C1)) C2),
3324     we swap the SIGN_EXTEND and PLUS.  Later code will apply the
3325     distributive law to produce (plus (mult (sign_extend X) C1) C3).
3326
3327     We do this to simplify address expressions.  */
3328
3329  if ((code == PLUS || code == MINUS || code == MULT)
3330      && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3331      && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3332      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3333      && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3334      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3335      && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3336      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3337      && (temp = simplify_binary_operation (ASHIFTRT, mode,
3338					    XEXP (XEXP (XEXP (x, 0), 0), 1),
3339					    XEXP (XEXP (x, 0), 1))) != 0)
3340    {
3341      rtx new
3342	= simplify_shift_const (NULL_RTX, ASHIFT, mode,
3343				XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3344				INTVAL (XEXP (XEXP (x, 0), 1)));
3345
3346      new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3347				  INTVAL (XEXP (XEXP (x, 0), 1)));
3348
3349      SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3350    }
3351
3352  /* If this is a simple operation applied to an IF_THEN_ELSE, try
3353     applying it to the arms of the IF_THEN_ELSE.  This often simplifies
3354     things.  Check for cases where both arms are testing the same
3355     condition.
3356
3357     Don't do anything if all operands are very simple.  */
3358
3359  if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3360	|| GET_RTX_CLASS (code) == '<')
3361       && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3362	    && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3363		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3364		      == 'o')))
3365	   || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3366	       && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3367		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3368			 == 'o')))))
3369      || (GET_RTX_CLASS (code) == '1'
3370	  && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3371	       && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3372		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3373			 == 'o'))))))
3374    {
3375      rtx cond, true, false;
3376
3377      cond = if_then_else_cond (x, &true, &false);
3378      if (cond != 0
3379	  /* If everything is a comparison, what we have is highly unlikely
3380	     to be simpler, so don't use it.  */
3381	  && ! (GET_RTX_CLASS (code) == '<'
3382		&& (GET_RTX_CLASS (GET_CODE (true)) == '<'
3383		    || GET_RTX_CLASS (GET_CODE (false)) == '<')))
3384	{
3385	  rtx cop1 = const0_rtx;
3386	  enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3387
3388	  if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3389	    return x;
3390
3391	  /* Simplify the alternative arms; this may collapse the true and
3392	     false arms to store-flag values.  */
3393	  true = subst (true, pc_rtx, pc_rtx, 0, 0);
3394	  false = subst (false, pc_rtx, pc_rtx, 0, 0);
3395
3396	  /* Restarting if we generate a store-flag expression will cause
3397	     us to loop.  Just drop through in this case.  */
3398
3399	  /* If the result values are STORE_FLAG_VALUE and zero, we can
3400	     just make the comparison operation.  */
3401	  if (true == const_true_rtx && false == const0_rtx)
3402	    x = gen_binary (cond_code, mode, cond, cop1);
3403	  else if (true == const0_rtx && false == const_true_rtx)
3404	    x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3405
3406	  /* Likewise, we can make the negate of a comparison operation
3407	     if the result values are - STORE_FLAG_VALUE and zero.  */
3408	  else if (GET_CODE (true) == CONST_INT
3409		   && INTVAL (true) == - STORE_FLAG_VALUE
3410		   && false == const0_rtx)
3411	    x = gen_unary (NEG, mode, mode,
3412			   gen_binary (cond_code, mode, cond, cop1));
3413	  else if (GET_CODE (false) == CONST_INT
3414		   && INTVAL (false) == - STORE_FLAG_VALUE
3415		   && true == const0_rtx)
3416	    x = gen_unary (NEG, mode, mode,
3417			   gen_binary (reverse_condition (cond_code),
3418				       mode, cond, cop1));
3419	  else
3420	    return gen_rtx_IF_THEN_ELSE (mode,
3421					 gen_binary (cond_code, VOIDmode,
3422						     cond, cop1),
3423					 true, false);
3424
3425	  code = GET_CODE (x);
3426	  op0_mode = VOIDmode;
3427	}
3428    }
3429
3430  /* Try to fold this expression in case we have constants that weren't
3431     present before.  */
3432  temp = 0;
3433  switch (GET_RTX_CLASS (code))
3434    {
3435    case '1':
3436      temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3437      break;
3438    case '<':
3439      temp = simplify_relational_operation (code, op0_mode,
3440					    XEXP (x, 0), XEXP (x, 1));
3441#ifdef FLOAT_STORE_FLAG_VALUE
3442      if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3443	temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3444		: immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3445#endif
3446      break;
3447    case 'c':
3448    case '2':
3449      temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3450      break;
3451    case 'b':
3452    case '3':
3453      temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3454					 XEXP (x, 1), XEXP (x, 2));
3455      break;
3456    }
3457
3458  if (temp)
3459    x = temp, code = GET_CODE (temp);
3460
3461  /* First see if we can apply the inverse distributive law.  */
3462  if (code == PLUS || code == MINUS
3463      || code == AND || code == IOR || code == XOR)
3464    {
3465      x = apply_distributive_law (x);
3466      code = GET_CODE (x);
3467    }
3468
3469  /* If CODE is an associative operation not otherwise handled, see if we
3470     can associate some operands.  This can win if they are constants or
3471     if they are logically related (i.e. (a & b) & a.  */
3472  if ((code == PLUS || code == MINUS
3473       || code == MULT || code == AND || code == IOR || code == XOR
3474       || code == DIV || code == UDIV
3475       || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3476      && INTEGRAL_MODE_P (mode))
3477    {
3478      if (GET_CODE (XEXP (x, 0)) == code)
3479	{
3480	  rtx other = XEXP (XEXP (x, 0), 0);
3481	  rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3482	  rtx inner_op1 = XEXP (x, 1);
3483	  rtx inner;
3484
3485	  /* Make sure we pass the constant operand if any as the second
3486	     one if this is a commutative operation.  */
3487	  if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3488	    {
3489	      rtx tem = inner_op0;
3490	      inner_op0 = inner_op1;
3491	      inner_op1 = tem;
3492	    }
3493	  inner = simplify_binary_operation (code == MINUS ? PLUS
3494					     : code == DIV ? MULT
3495					     : code == UDIV ? MULT
3496					     : code,
3497					     mode, inner_op0, inner_op1);
3498
3499	  /* For commutative operations, try the other pair if that one
3500	     didn't simplify.  */
3501	  if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3502	    {
3503	      other = XEXP (XEXP (x, 0), 1);
3504	      inner = simplify_binary_operation (code, mode,
3505						 XEXP (XEXP (x, 0), 0),
3506						 XEXP (x, 1));
3507	    }
3508
3509	  if (inner)
3510	    return gen_binary (code, mode, other, inner);
3511	}
3512    }
3513
3514  /* A little bit of algebraic simplification here.  */
3515  switch (code)
3516    {
3517    case MEM:
3518      /* Ensure that our address has any ASHIFTs converted to MULT in case
3519	 address-recognizing predicates are called later.  */
3520      temp = make_compound_operation (XEXP (x, 0), MEM);
3521      SUBST (XEXP (x, 0), temp);
3522      break;
3523
3524    case SUBREG:
3525      /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3526	 is paradoxical.  If we can't do that safely, then it becomes
3527	 something nonsensical so that this combination won't take place.  */
3528
3529      if (GET_CODE (SUBREG_REG (x)) == MEM
3530	  && (GET_MODE_SIZE (mode)
3531	      <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3532	{
3533	  rtx inner = SUBREG_REG (x);
3534	  int endian_offset = 0;
3535	  /* Don't change the mode of the MEM
3536	     if that would change the meaning of the address.  */
3537	  if (MEM_VOLATILE_P (SUBREG_REG (x))
3538	      || mode_dependent_address_p (XEXP (inner, 0)))
3539	    return gen_rtx_CLOBBER (mode, const0_rtx);
3540
3541	  if (BYTES_BIG_ENDIAN)
3542	    {
3543	      if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3544		endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3545	      if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3546		endian_offset -= (UNITS_PER_WORD
3547				  - GET_MODE_SIZE (GET_MODE (inner)));
3548	    }
3549	  /* Note if the plus_constant doesn't make a valid address
3550	     then this combination won't be accepted.  */
3551	  x = gen_rtx_MEM (mode,
3552			   plus_constant (XEXP (inner, 0),
3553					  (SUBREG_WORD (x) * UNITS_PER_WORD
3554					   + endian_offset)));
3555	  RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3556	  MEM_COPY_ATTRIBUTES (x, inner);
3557	  return x;
3558	}
3559
3560      /* If we are in a SET_DEST, these other cases can't apply.  */
3561      if (in_dest)
3562	return x;
3563
3564      /* Changing mode twice with SUBREG => just change it once,
3565	 or not at all if changing back to starting mode.  */
3566      if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3567	{
3568	  if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3569	      && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3570	    return SUBREG_REG (SUBREG_REG (x));
3571
3572	  SUBST_INT (SUBREG_WORD (x),
3573		     SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3574	  SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3575	}
3576
3577      /* SUBREG of a hard register => just change the register number
3578	 and/or mode.  If the hard register is not valid in that mode,
3579	 suppress this combination.  If the hard register is the stack,
3580	 frame, or argument pointer, leave this as a SUBREG.  */
3581
3582      if (GET_CODE (SUBREG_REG (x)) == REG
3583	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3584	  && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
3585#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3586	  && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3587#endif
3588#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3589	  && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3590#endif
3591	  && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
3592	{
3593	  if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3594				  mode))
3595	    return gen_rtx_REG (mode,
3596				REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3597	  else
3598	    return gen_rtx_CLOBBER (mode, const0_rtx);
3599	}
3600
3601      /* For a constant, try to pick up the part we want.  Handle a full
3602	 word and low-order part.  Only do this if we are narrowing
3603	 the constant; if it is being widened, we have no idea what
3604	 the extra bits will have been set to.  */
3605
3606      if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3607	  && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3608	  && GET_MODE_SIZE (op0_mode) > UNITS_PER_WORD
3609	  && GET_MODE_CLASS (mode) == MODE_INT)
3610	{
3611	  temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
3612				  0, op0_mode);
3613	  if (temp)
3614	    return temp;
3615	}
3616
3617      /* If we want a subreg of a constant, at offset 0,
3618	 take the low bits.  On a little-endian machine, that's
3619	 always valid.  On a big-endian machine, it's valid
3620	 only if the constant's mode fits in one word.   Note that we
3621	 cannot use subreg_lowpart_p since SUBREG_REG may be VOIDmode.  */
3622      if (CONSTANT_P (SUBREG_REG (x))
3623	  && ((GET_MODE_SIZE (op0_mode) <= UNITS_PER_WORD
3624	      || ! WORDS_BIG_ENDIAN)
3625	      ? SUBREG_WORD (x) == 0
3626	      : (SUBREG_WORD (x)
3627		 == ((GET_MODE_SIZE (op0_mode)
3628		      - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
3629		     / UNITS_PER_WORD)))
3630	  && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode)
3631	  && (! WORDS_BIG_ENDIAN
3632	      || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
3633	return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3634
3635      /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3636	 since we are saying that the high bits don't matter.  */
3637      if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3638	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3639	return SUBREG_REG (x);
3640
3641      /* Note that we cannot do any narrowing for non-constants since
3642	 we might have been counting on using the fact that some bits were
3643	 zero.  We now do this in the SET.  */
3644
3645      break;
3646
3647    case NOT:
3648      /* (not (plus X -1)) can become (neg X).  */
3649      if (GET_CODE (XEXP (x, 0)) == PLUS
3650	  && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3651	return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
3652
3653      /* Similarly, (not (neg X)) is (plus X -1).  */
3654      if (GET_CODE (XEXP (x, 0)) == NEG)
3655	return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3656				constm1_rtx);
3657
3658      /* (not (xor X C)) for C constant is (xor X D) with D = ~ C.  */
3659      if (GET_CODE (XEXP (x, 0)) == XOR
3660	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3661	  && (temp = simplify_unary_operation (NOT, mode,
3662					       XEXP (XEXP (x, 0), 1),
3663					       mode)) != 0)
3664	return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
3665
3666      /* (not (ashift 1 X)) is (rotate ~1 X).  We used to do this for operands
3667	 other than 1, but that is not valid.  We could do a similar
3668	 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3669	 but this doesn't seem common enough to bother with.  */
3670      if (GET_CODE (XEXP (x, 0)) == ASHIFT
3671	  && XEXP (XEXP (x, 0), 0) == const1_rtx)
3672	return gen_rtx_ROTATE (mode, gen_unary (NOT, mode, mode, const1_rtx),
3673			       XEXP (XEXP (x, 0), 1));
3674
3675      if (GET_CODE (XEXP (x, 0)) == SUBREG
3676	  && subreg_lowpart_p (XEXP (x, 0))
3677	  && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3678	      < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3679	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3680	  && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3681	{
3682	  enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3683
3684	  x = gen_rtx_ROTATE (inner_mode,
3685			      gen_unary (NOT, inner_mode, inner_mode,
3686					 const1_rtx),
3687			      XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3688	  return gen_lowpart_for_combine (mode, x);
3689	}
3690
3691      /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3692	 reversing the comparison code if valid.  */
3693      if (STORE_FLAG_VALUE == -1
3694	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3695	  && reversible_comparison_p (XEXP (x, 0)))
3696	return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3697				mode, XEXP (XEXP (x, 0), 0),
3698				XEXP (XEXP (x, 0), 1));
3699
3700      /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3701	 is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3702	 perform the above simplification.  */
3703
3704      if (STORE_FLAG_VALUE == -1
3705	  && XEXP (x, 1) == const1_rtx
3706	  && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3707	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3708	  && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3709	return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3710
3711      /* Apply De Morgan's laws to reduce number of patterns for machines
3712 	 with negating logical insns (and-not, nand, etc.).  If result has
3713 	 only one NOT, put it first, since that is how the patterns are
3714 	 coded.  */
3715
3716      if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3717 	{
3718 	 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3719
3720	 if (GET_CODE (in1) == NOT)
3721	   in1 = XEXP (in1, 0);
3722 	 else
3723	   in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3724
3725	 if (GET_CODE (in2) == NOT)
3726	   in2 = XEXP (in2, 0);
3727 	 else if (GET_CODE (in2) == CONST_INT
3728		  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3729	   in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
3730	 else
3731	   in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3732
3733	 if (GET_CODE (in2) == NOT)
3734	   {
3735	     rtx tem = in2;
3736	     in2 = in1; in1 = tem;
3737	   }
3738
3739	 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3740				 mode, in1, in2);
3741       }
3742      break;
3743
3744    case NEG:
3745      /* (neg (plus X 1)) can become (not X).  */
3746      if (GET_CODE (XEXP (x, 0)) == PLUS
3747	  && XEXP (XEXP (x, 0), 1) == const1_rtx)
3748	return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3749
3750      /* Similarly, (neg (not X)) is (plus X 1).  */
3751      if (GET_CODE (XEXP (x, 0)) == NOT)
3752	return plus_constant (XEXP (XEXP (x, 0), 0), 1);
3753
3754      /* (neg (minus X Y)) can become (minus Y X).  */
3755      if (GET_CODE (XEXP (x, 0)) == MINUS
3756	  && (! FLOAT_MODE_P (mode)
3757	      /* x-y != -(y-x) with IEEE floating point.  */
3758	      || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3759	      || flag_fast_math))
3760	return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3761			   XEXP (XEXP (x, 0), 0));
3762
3763      /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1.  */
3764      if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3765	  && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3766	return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3767
3768      /* NEG commutes with ASHIFT since it is multiplication.  Only do this
3769	 if we can then eliminate the NEG (e.g.,
3770	 if the operand is a constant).  */
3771
3772      if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3773	{
3774	  temp = simplify_unary_operation (NEG, mode,
3775					   XEXP (XEXP (x, 0), 0), mode);
3776	  if (temp)
3777	    {
3778	      SUBST (XEXP (XEXP (x, 0), 0), temp);
3779	      return XEXP (x, 0);
3780	    }
3781	}
3782
3783      temp = expand_compound_operation (XEXP (x, 0));
3784
3785      /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3786 	 replaced by (lshiftrt X C).  This will convert
3787	 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y).  */
3788
3789      if (GET_CODE (temp) == ASHIFTRT
3790	  && GET_CODE (XEXP (temp, 1)) == CONST_INT
3791	  && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3792	return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3793				     INTVAL (XEXP (temp, 1)));
3794
3795      /* If X has only a single bit that might be nonzero, say, bit I, convert
3796	 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3797	 MODE minus 1.  This will convert (neg (zero_extract X 1 Y)) to
3798	 (sign_extract X 1 Y).  But only do this if TEMP isn't a register
3799	 or a SUBREG of one since we'd be making the expression more
3800	 complex if it was just a register.  */
3801
3802      if (GET_CODE (temp) != REG
3803	  && ! (GET_CODE (temp) == SUBREG
3804		&& GET_CODE (SUBREG_REG (temp)) == REG)
3805	  && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
3806	{
3807	  rtx temp1 = simplify_shift_const
3808	    (NULL_RTX, ASHIFTRT, mode,
3809	     simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3810				   GET_MODE_BITSIZE (mode) - 1 - i),
3811	     GET_MODE_BITSIZE (mode) - 1 - i);
3812
3813	  /* If all we did was surround TEMP with the two shifts, we
3814	     haven't improved anything, so don't use it.  Otherwise,
3815	     we are better off with TEMP1.  */
3816	  if (GET_CODE (temp1) != ASHIFTRT
3817	      || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3818	      || XEXP (XEXP (temp1, 0), 0) != temp)
3819	    return temp1;
3820	}
3821      break;
3822
3823    case TRUNCATE:
3824      /* We can't handle truncation to a partial integer mode here
3825	 because we don't know the real bitsize of the partial
3826	 integer mode.  */
3827      if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3828	break;
3829
3830      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3831	  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3832				    GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
3833	SUBST (XEXP (x, 0),
3834	       force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3835			      GET_MODE_MASK (mode), NULL_RTX, 0));
3836
3837      /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI.  */
3838      if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3839	   || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3840	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3841	return XEXP (XEXP (x, 0), 0);
3842
3843      /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3844	 (OP:SI foo:SI) if OP is NEG or ABS.  */
3845      if ((GET_CODE (XEXP (x, 0)) == ABS
3846	   || GET_CODE (XEXP (x, 0)) == NEG)
3847	  && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3848	      || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
3849	  && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3850	return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3851			  XEXP (XEXP (XEXP (x, 0), 0), 0));
3852
3853      /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
3854	 (truncate:SI x).  */
3855      if (GET_CODE (XEXP (x, 0)) == SUBREG
3856	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
3857	  && subreg_lowpart_p (XEXP (x, 0)))
3858	return SUBREG_REG (XEXP (x, 0));
3859
3860      /* If we know that the value is already truncated, we can
3861         replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION is
3862	 nonzero for the corresponding modes.  */
3863      if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3864				 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
3865	  && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3866	     >= GET_MODE_BITSIZE (mode) + 1)
3867	return gen_lowpart_for_combine (mode, XEXP (x, 0));
3868
3869      /* A truncate of a comparison can be replaced with a subreg if
3870         STORE_FLAG_VALUE permits.  This is like the previous test,
3871         but it works even if the comparison is done in a mode larger
3872         than HOST_BITS_PER_WIDE_INT.  */
3873      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3874	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3875	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0)
3876	return gen_lowpart_for_combine (mode, XEXP (x, 0));
3877
3878      /* Similarly, a truncate of a register whose value is a
3879         comparison can be replaced with a subreg if STORE_FLAG_VALUE
3880         permits.  */
3881      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3882	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0
3883	  && (temp = get_last_value (XEXP (x, 0)))
3884	  && GET_RTX_CLASS (GET_CODE (temp)) == '<')
3885	return gen_lowpart_for_combine (mode, XEXP (x, 0));
3886
3887      break;
3888
3889    case FLOAT_TRUNCATE:
3890      /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF.  */
3891      if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3892	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3893 	return XEXP (XEXP (x, 0), 0);
3894
3895      /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3896	 (OP:SF foo:SF) if OP is NEG or ABS.  */
3897      if ((GET_CODE (XEXP (x, 0)) == ABS
3898	   || GET_CODE (XEXP (x, 0)) == NEG)
3899	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3900	  && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3901	return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3902			  XEXP (XEXP (XEXP (x, 0), 0), 0));
3903
3904      /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3905	 is (float_truncate:SF x).  */
3906      if (GET_CODE (XEXP (x, 0)) == SUBREG
3907	  && subreg_lowpart_p (XEXP (x, 0))
3908	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3909	return SUBREG_REG (XEXP (x, 0));
3910      break;
3911
3912#ifdef HAVE_cc0
3913    case COMPARE:
3914      /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3915	 using cc0, in which case we want to leave it as a COMPARE
3916	 so we can distinguish it from a register-register-copy.  */
3917      if (XEXP (x, 1) == const0_rtx)
3918	return XEXP (x, 0);
3919
3920      /* In IEEE floating point, x-0 is not the same as x.  */
3921      if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3922	   || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3923	   || flag_fast_math)
3924	  && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3925	return XEXP (x, 0);
3926      break;
3927#endif
3928
3929    case CONST:
3930      /* (const (const X)) can become (const X).  Do it this way rather than
3931	 returning the inner CONST since CONST can be shared with a
3932	 REG_EQUAL note.  */
3933      if (GET_CODE (XEXP (x, 0)) == CONST)
3934	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3935      break;
3936
3937#ifdef HAVE_lo_sum
3938    case LO_SUM:
3939      /* Convert (lo_sum (high FOO) FOO) to FOO.  This is necessary so we
3940	 can add in an offset.  find_split_point will split this address up
3941	 again if it doesn't match.  */
3942      if (GET_CODE (XEXP (x, 0)) == HIGH
3943	  && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3944	return XEXP (x, 1);
3945      break;
3946#endif
3947
3948    case PLUS:
3949      /* If we have (plus (plus (A const) B)), associate it so that CONST is
3950	 outermost.  That's because that's the way indexed addresses are
3951	 supposed to appear.  This code used to check many more cases, but
3952	 they are now checked elsewhere.  */
3953      if (GET_CODE (XEXP (x, 0)) == PLUS
3954	  && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3955	return gen_binary (PLUS, mode,
3956			   gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3957				       XEXP (x, 1)),
3958			   XEXP (XEXP (x, 0), 1));
3959
3960      /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3961	 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3962	 bit-field and can be replaced by either a sign_extend or a
3963	 sign_extract.  The `and' may be a zero_extend.  */
3964      if (GET_CODE (XEXP (x, 0)) == XOR
3965	  && GET_CODE (XEXP (x, 1)) == CONST_INT
3966	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3967	  && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3968	  && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3969	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3970	  && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3971	       && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3972	       && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
3973		   == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
3974	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3975		  && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3976		      == i + 1))))
3977	return simplify_shift_const
3978	  (NULL_RTX, ASHIFTRT, mode,
3979	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
3980				 XEXP (XEXP (XEXP (x, 0), 0), 0),
3981				 GET_MODE_BITSIZE (mode) - (i + 1)),
3982	   GET_MODE_BITSIZE (mode) - (i + 1));
3983
3984      /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3985	 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3986	 is 1.  This produces better code than the alternative immediately
3987	 below.  */
3988      if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3989	  && reversible_comparison_p (XEXP (x, 0))
3990	  && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3991	      || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
3992	return
3993	  gen_unary (NEG, mode, mode,
3994		     gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3995				 mode, XEXP (XEXP (x, 0), 0),
3996				 XEXP (XEXP (x, 0), 1)));
3997
3998      /* If only the low-order bit of X is possibly nonzero, (plus x -1)
3999	 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4000	 the bitsize of the mode - 1.  This allows simplification of
4001	 "a = (b & 8) == 0;"  */
4002      if (XEXP (x, 1) == constm1_rtx
4003	  && GET_CODE (XEXP (x, 0)) != REG
4004	  && ! (GET_CODE (XEXP (x,0)) == SUBREG
4005		&& GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
4006	  && nonzero_bits (XEXP (x, 0), mode) == 1)
4007	return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4008	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
4009				 gen_rtx_combine (XOR, mode,
4010						  XEXP (x, 0), const1_rtx),
4011				 GET_MODE_BITSIZE (mode) - 1),
4012	   GET_MODE_BITSIZE (mode) - 1);
4013
4014      /* If we are adding two things that have no bits in common, convert
4015	 the addition into an IOR.  This will often be further simplified,
4016	 for example in cases like ((a & 1) + (a & 2)), which can
4017	 become a & 3.  */
4018
4019      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4020	  && (nonzero_bits (XEXP (x, 0), mode)
4021	      & nonzero_bits (XEXP (x, 1), mode)) == 0)
4022	return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4023      break;
4024
4025    case MINUS:
4026      /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4027	 by reversing the comparison code if valid.  */
4028      if (STORE_FLAG_VALUE == 1
4029	  && XEXP (x, 0) == const1_rtx
4030	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
4031	  && reversible_comparison_p (XEXP (x, 1)))
4032	return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
4033			   mode, XEXP (XEXP (x, 1), 0),
4034				XEXP (XEXP (x, 1), 1));
4035
4036      /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4037	 (and <foo> (const_int pow2-1))  */
4038      if (GET_CODE (XEXP (x, 1)) == AND
4039	  && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4040	  && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4041	  && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4042	return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4043				       - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4044
4045      /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4046	 integers.  */
4047      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
4048	return gen_binary (MINUS, mode,
4049			   gen_binary (MINUS, mode, XEXP (x, 0),
4050				       XEXP (XEXP (x, 1), 0)),
4051			   XEXP (XEXP (x, 1), 1));
4052      break;
4053
4054    case MULT:
4055      /* If we have (mult (plus A B) C), apply the distributive law and then
4056	 the inverse distributive law to see if things simplify.  This
4057	 occurs mostly in addresses, often when unrolling loops.  */
4058
4059      if (GET_CODE (XEXP (x, 0)) == PLUS)
4060	{
4061	  x = apply_distributive_law
4062	    (gen_binary (PLUS, mode,
4063			 gen_binary (MULT, mode,
4064				     XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4065			 gen_binary (MULT, mode,
4066				     XEXP (XEXP (x, 0), 1),
4067				     copy_rtx (XEXP (x, 1)))));
4068
4069	  if (GET_CODE (x) != MULT)
4070	    return x;
4071	}
4072      break;
4073
4074    case UDIV:
4075      /* If this is a divide by a power of two, treat it as a shift if
4076	 its first operand is a shift.  */
4077      if (GET_CODE (XEXP (x, 1)) == CONST_INT
4078	  && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4079	  && (GET_CODE (XEXP (x, 0)) == ASHIFT
4080	      || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4081	      || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4082	      || GET_CODE (XEXP (x, 0)) == ROTATE
4083	      || GET_CODE (XEXP (x, 0)) == ROTATERT))
4084	return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4085      break;
4086
4087    case EQ:  case NE:
4088    case GT:  case GTU:  case GE:  case GEU:
4089    case LT:  case LTU:  case LE:  case LEU:
4090      /* If the first operand is a condition code, we can't do anything
4091	 with it.  */
4092      if (GET_CODE (XEXP (x, 0)) == COMPARE
4093	  || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4094#ifdef HAVE_cc0
4095	      && XEXP (x, 0) != cc0_rtx
4096#endif
4097	       ))
4098	{
4099	  rtx op0 = XEXP (x, 0);
4100	  rtx op1 = XEXP (x, 1);
4101	  enum rtx_code new_code;
4102
4103	  if (GET_CODE (op0) == COMPARE)
4104	    op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4105
4106	  /* Simplify our comparison, if possible.  */
4107	  new_code = simplify_comparison (code, &op0, &op1);
4108
4109	  /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4110	     if only the low-order bit is possibly nonzero in X (such as when
4111	     X is a ZERO_EXTRACT of one bit).  Similarly, we can convert EQ to
4112	     (xor X 1) or (minus 1 X); we use the former.  Finally, if X is
4113	     known to be either 0 or -1, NE becomes a NEG and EQ becomes
4114	     (plus X 1).
4115
4116	     Remove any ZERO_EXTRACT we made when thinking this was a
4117	     comparison.  It may now be simpler to use, e.g., an AND.  If a
4118	     ZERO_EXTRACT is indeed appropriate, it will be placed back by
4119	     the call to make_compound_operation in the SET case.  */
4120
4121	  if (STORE_FLAG_VALUE == 1
4122	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4123	      && op1 == const0_rtx && nonzero_bits (op0, mode) == 1)
4124	    return gen_lowpart_for_combine (mode,
4125					    expand_compound_operation (op0));
4126
4127	  else if (STORE_FLAG_VALUE == 1
4128		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4129		   && op1 == const0_rtx
4130		   && (num_sign_bit_copies (op0, mode)
4131		       == GET_MODE_BITSIZE (mode)))
4132	    {
4133	      op0 = expand_compound_operation (op0);
4134	      return gen_unary (NEG, mode, mode,
4135				gen_lowpart_for_combine (mode, op0));
4136	    }
4137
4138	  else if (STORE_FLAG_VALUE == 1
4139		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4140		   && op1 == const0_rtx
4141		   && nonzero_bits (op0, mode) == 1)
4142	    {
4143	      op0 = expand_compound_operation (op0);
4144	      return gen_binary (XOR, mode,
4145				 gen_lowpart_for_combine (mode, op0),
4146				 const1_rtx);
4147	    }
4148
4149	  else if (STORE_FLAG_VALUE == 1
4150		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4151		   && op1 == const0_rtx
4152		   && (num_sign_bit_copies (op0, mode)
4153		       == GET_MODE_BITSIZE (mode)))
4154	    {
4155	      op0 = expand_compound_operation (op0);
4156	      return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
4157	    }
4158
4159	  /* If STORE_FLAG_VALUE is -1, we have cases similar to
4160	     those above.  */
4161	  if (STORE_FLAG_VALUE == -1
4162	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4163	      && op1 == const0_rtx
4164	      && (num_sign_bit_copies (op0, mode)
4165		  == GET_MODE_BITSIZE (mode)))
4166	    return gen_lowpart_for_combine (mode,
4167					    expand_compound_operation (op0));
4168
4169	  else if (STORE_FLAG_VALUE == -1
4170		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4171		   && op1 == const0_rtx
4172		   && nonzero_bits (op0, mode) == 1)
4173	    {
4174	      op0 = expand_compound_operation (op0);
4175	      return gen_unary (NEG, mode, mode,
4176				gen_lowpart_for_combine (mode, op0));
4177	    }
4178
4179	  else if (STORE_FLAG_VALUE == -1
4180		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4181		   && op1 == const0_rtx
4182		   && (num_sign_bit_copies (op0, mode)
4183		       == GET_MODE_BITSIZE (mode)))
4184	    {
4185	      op0 = expand_compound_operation (op0);
4186	      return gen_unary (NOT, mode, mode,
4187				gen_lowpart_for_combine (mode, op0));
4188	    }
4189
4190	  /* If X is 0/1, (eq X 0) is X-1.  */
4191	  else if (STORE_FLAG_VALUE == -1
4192		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4193		   && op1 == const0_rtx
4194		   && nonzero_bits (op0, mode) == 1)
4195	    {
4196	      op0 = expand_compound_operation (op0);
4197	      return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
4198	    }
4199
4200	  /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4201	     one bit that might be nonzero, we can convert (ne x 0) to
4202	     (ashift x c) where C puts the bit in the sign bit.  Remove any
4203	     AND with STORE_FLAG_VALUE when we are done, since we are only
4204	     going to test the sign bit.  */
4205	  if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4206	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4207	      && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4208		  == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE(mode)-1))
4209	      && op1 == const0_rtx
4210	      && mode == GET_MODE (op0)
4211	      && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4212	    {
4213	      x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4214					expand_compound_operation (op0),
4215					GET_MODE_BITSIZE (mode) - 1 - i);
4216	      if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4217		return XEXP (x, 0);
4218	      else
4219		return x;
4220	    }
4221
4222	  /* If the code changed, return a whole new comparison.  */
4223	  if (new_code != code)
4224	    return gen_rtx_combine (new_code, mode, op0, op1);
4225
4226	  /* Otherwise, keep this operation, but maybe change its operands.
4227	     This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR).  */
4228	  SUBST (XEXP (x, 0), op0);
4229	  SUBST (XEXP (x, 1), op1);
4230	}
4231      break;
4232
4233    case IF_THEN_ELSE:
4234      return simplify_if_then_else (x);
4235
4236    case ZERO_EXTRACT:
4237    case SIGN_EXTRACT:
4238    case ZERO_EXTEND:
4239    case SIGN_EXTEND:
4240      /* If we are processing SET_DEST, we are done.  */
4241      if (in_dest)
4242	return x;
4243
4244      return expand_compound_operation (x);
4245
4246    case SET:
4247      return simplify_set (x);
4248
4249    case AND:
4250    case IOR:
4251    case XOR:
4252      return simplify_logical (x, last);
4253
4254    case ABS:
4255      /* (abs (neg <foo>)) -> (abs <foo>) */
4256      if (GET_CODE (XEXP (x, 0)) == NEG)
4257	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4258
4259      /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4260         do nothing.  */
4261      if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4262	break;
4263
4264      /* If operand is something known to be positive, ignore the ABS.  */
4265      if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4266	  || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4267	       <= HOST_BITS_PER_WIDE_INT)
4268	      && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4269		   & ((HOST_WIDE_INT) 1
4270		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4271		  == 0)))
4272	return XEXP (x, 0);
4273
4274
4275      /* If operand is known to be only -1 or 0, convert ABS to NEG.  */
4276      if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4277	return gen_rtx_combine (NEG, mode, XEXP (x, 0));
4278
4279      break;
4280
4281    case FFS:
4282      /* (ffs (*_extend <X>)) = (ffs <X>) */
4283      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4284	  || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4285	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4286      break;
4287
4288    case FLOAT:
4289      /* (float (sign_extend <X>)) = (float <X>).  */
4290      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4291	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4292      break;
4293
4294    case ASHIFT:
4295    case LSHIFTRT:
4296    case ASHIFTRT:
4297    case ROTATE:
4298    case ROTATERT:
4299      /* If this is a shift by a constant amount, simplify it.  */
4300      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4301	return simplify_shift_const (x, code, mode, XEXP (x, 0),
4302				     INTVAL (XEXP (x, 1)));
4303
4304#ifdef SHIFT_COUNT_TRUNCATED
4305      else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4306	SUBST (XEXP (x, 1),
4307	       force_to_mode (XEXP (x, 1), GET_MODE (x),
4308			      ((HOST_WIDE_INT) 1
4309			       << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4310			      - 1,
4311			      NULL_RTX, 0));
4312#endif
4313
4314      break;
4315
4316    default:
4317      break;
4318    }
4319
4320  return x;
4321}
4322
4323/* Simplify X, an IF_THEN_ELSE expression.  Return the new expression.  */
4324
4325static rtx
4326simplify_if_then_else (x)
4327     rtx x;
4328{
4329  enum machine_mode mode = GET_MODE (x);
4330  rtx cond = XEXP (x, 0);
4331  rtx true = XEXP (x, 1);
4332  rtx false = XEXP (x, 2);
4333  enum rtx_code true_code = GET_CODE (cond);
4334  int comparison_p = GET_RTX_CLASS (true_code) == '<';
4335  rtx temp;
4336  int i;
4337
4338  /* Simplify storing of the truth value.  */
4339  if (comparison_p && true == const_true_rtx && false == const0_rtx)
4340    return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4341
4342  /* Also when the truth value has to be reversed.  */
4343  if (comparison_p && reversible_comparison_p (cond)
4344      && true == const0_rtx && false == const_true_rtx)
4345    return gen_binary (reverse_condition (true_code),
4346		       mode, XEXP (cond, 0), XEXP (cond, 1));
4347
4348  /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4349     in it is being compared against certain values.  Get the true and false
4350     comparisons and see if that says anything about the value of each arm.  */
4351
4352  if (comparison_p && reversible_comparison_p (cond)
4353      && GET_CODE (XEXP (cond, 0)) == REG)
4354    {
4355      HOST_WIDE_INT nzb;
4356      rtx from = XEXP (cond, 0);
4357      enum rtx_code false_code = reverse_condition (true_code);
4358      rtx true_val = XEXP (cond, 1);
4359      rtx false_val = true_val;
4360      int swapped = 0;
4361
4362      /* If FALSE_CODE is EQ, swap the codes and arms.  */
4363
4364      if (false_code == EQ)
4365	{
4366	  swapped = 1, true_code = EQ, false_code = NE;
4367	  temp = true, true = false, false = temp;
4368	}
4369
4370      /* If we are comparing against zero and the expression being tested has
4371	 only a single bit that might be nonzero, that is its value when it is
4372	 not equal to zero.  Similarly if it is known to be -1 or 0.  */
4373
4374      if (true_code == EQ && true_val == const0_rtx
4375	  && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4376	false_code = EQ, false_val = GEN_INT (nzb);
4377      else if (true_code == EQ && true_val == const0_rtx
4378	       && (num_sign_bit_copies (from, GET_MODE (from))
4379		   == GET_MODE_BITSIZE (GET_MODE (from))))
4380	false_code = EQ, false_val = constm1_rtx;
4381
4382      /* Now simplify an arm if we know the value of the register in the
4383	 branch and it is used in the arm.  Be careful due to the potential
4384	 of locally-shared RTL.  */
4385
4386      if (reg_mentioned_p (from, true))
4387	true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
4388		      pc_rtx, pc_rtx, 0, 0);
4389      if (reg_mentioned_p (from, false))
4390	false = subst (known_cond (copy_rtx (false), false_code,
4391				   from, false_val),
4392		       pc_rtx, pc_rtx, 0, 0);
4393
4394      SUBST (XEXP (x, 1), swapped ? false : true);
4395      SUBST (XEXP (x, 2), swapped ? true : false);
4396
4397      true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
4398    }
4399
4400  /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4401     reversed, do so to avoid needing two sets of patterns for
4402     subtract-and-branch insns.  Similarly if we have a constant in the true
4403     arm, the false arm is the same as the first operand of the comparison, or
4404     the false arm is more complicated than the true arm.  */
4405
4406  if (comparison_p && reversible_comparison_p (cond)
4407      && (true == pc_rtx
4408	  || (CONSTANT_P (true)
4409	      && GET_CODE (false) != CONST_INT && false != pc_rtx)
4410	  || true == const0_rtx
4411	  || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4412	      && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4413	  || (GET_CODE (true) == SUBREG
4414	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4415	      && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4416	  || reg_mentioned_p (true, false)
4417	  || rtx_equal_p (false, XEXP (cond, 0))))
4418    {
4419      true_code = reverse_condition (true_code);
4420      SUBST (XEXP (x, 0),
4421	     gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4422			 XEXP (cond, 1)));
4423
4424      SUBST (XEXP (x, 1), false);
4425      SUBST (XEXP (x, 2), true);
4426
4427      temp = true, true = false, false = temp, cond = XEXP (x, 0);
4428
4429      /* It is possible that the conditional has been simplified out.  */
4430      true_code = GET_CODE (cond);
4431      comparison_p = GET_RTX_CLASS (true_code) == '<';
4432    }
4433
4434  /* If the two arms are identical, we don't need the comparison.  */
4435
4436  if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4437    return true;
4438
4439  /* Convert a == b ? b : a to "a".  */
4440  if (true_code == EQ && ! side_effects_p (cond)
4441      && rtx_equal_p (XEXP (cond, 0), false)
4442      && rtx_equal_p (XEXP (cond, 1), true))
4443    return false;
4444  else if (true_code == NE && ! side_effects_p (cond)
4445	   && rtx_equal_p (XEXP (cond, 0), true)
4446	   && rtx_equal_p (XEXP (cond, 1), false))
4447    return true;
4448
4449  /* Look for cases where we have (abs x) or (neg (abs X)).  */
4450
4451  if (GET_MODE_CLASS (mode) == MODE_INT
4452      && GET_CODE (false) == NEG
4453      && rtx_equal_p (true, XEXP (false, 0))
4454      && comparison_p
4455      && rtx_equal_p (true, XEXP (cond, 0))
4456      && ! side_effects_p (true))
4457    switch (true_code)
4458      {
4459      case GT:
4460      case GE:
4461	return gen_unary (ABS, mode, mode, true);
4462      case LT:
4463      case LE:
4464	return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
4465    default:
4466      break;
4467      }
4468
4469  /* Look for MIN or MAX.  */
4470
4471  if ((! FLOAT_MODE_P (mode) || flag_fast_math)
4472      && comparison_p
4473      && rtx_equal_p (XEXP (cond, 0), true)
4474      && rtx_equal_p (XEXP (cond, 1), false)
4475      && ! side_effects_p (cond))
4476    switch (true_code)
4477      {
4478      case GE:
4479      case GT:
4480	return gen_binary (SMAX, mode, true, false);
4481      case LE:
4482      case LT:
4483	return gen_binary (SMIN, mode, true, false);
4484      case GEU:
4485      case GTU:
4486	return gen_binary (UMAX, mode, true, false);
4487      case LEU:
4488      case LTU:
4489	return gen_binary (UMIN, mode, true, false);
4490      default:
4491	break;
4492      }
4493
4494  /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4495     second operand is zero, this can be done as (OP Z (mult COND C2)) where
4496     C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4497     SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4498     We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4499     neither 1 or -1, but it isn't worth checking for.  */
4500
4501  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4502      && comparison_p && mode != VOIDmode && ! side_effects_p (x))
4503    {
4504      rtx t = make_compound_operation (true, SET);
4505      rtx f = make_compound_operation (false, SET);
4506      rtx cond_op0 = XEXP (cond, 0);
4507      rtx cond_op1 = XEXP (cond, 1);
4508      enum rtx_code op, extend_op = NIL;
4509      enum machine_mode m = mode;
4510      rtx z = 0, c1;
4511
4512      if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4513	   || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4514	   || GET_CODE (t) == ASHIFT
4515	   || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4516	  && rtx_equal_p (XEXP (t, 0), f))
4517	c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4518
4519      /* If an identity-zero op is commutative, check whether there
4520	 would be a match if we swapped the operands.  */
4521      else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4522		|| GET_CODE (t) == XOR)
4523	       && rtx_equal_p (XEXP (t, 1), f))
4524	c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4525      else if (GET_CODE (t) == SIGN_EXTEND
4526	       && (GET_CODE (XEXP (t, 0)) == PLUS
4527		   || GET_CODE (XEXP (t, 0)) == MINUS
4528		   || GET_CODE (XEXP (t, 0)) == IOR
4529		   || GET_CODE (XEXP (t, 0)) == XOR
4530		   || GET_CODE (XEXP (t, 0)) == ASHIFT
4531		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4532		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4533	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4534	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4535	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4536	       && (num_sign_bit_copies (f, GET_MODE (f))
4537		   > (GET_MODE_BITSIZE (mode)
4538		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4539	{
4540	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4541	  extend_op = SIGN_EXTEND;
4542	  m = GET_MODE (XEXP (t, 0));
4543	}
4544      else if (GET_CODE (t) == SIGN_EXTEND
4545	       && (GET_CODE (XEXP (t, 0)) == PLUS
4546		   || GET_CODE (XEXP (t, 0)) == IOR
4547		   || GET_CODE (XEXP (t, 0)) == XOR)
4548	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4549	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4550	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4551	       && (num_sign_bit_copies (f, GET_MODE (f))
4552		   > (GET_MODE_BITSIZE (mode)
4553		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4554	{
4555	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4556	  extend_op = SIGN_EXTEND;
4557	  m = GET_MODE (XEXP (t, 0));
4558	}
4559      else if (GET_CODE (t) == ZERO_EXTEND
4560	       && (GET_CODE (XEXP (t, 0)) == PLUS
4561		   || GET_CODE (XEXP (t, 0)) == MINUS
4562		   || GET_CODE (XEXP (t, 0)) == IOR
4563		   || GET_CODE (XEXP (t, 0)) == XOR
4564		   || GET_CODE (XEXP (t, 0)) == ASHIFT
4565		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4566		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4567	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4568	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4569	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4570	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4571	       && ((nonzero_bits (f, GET_MODE (f))
4572		    & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4573		   == 0))
4574	{
4575	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4576	  extend_op = ZERO_EXTEND;
4577	  m = GET_MODE (XEXP (t, 0));
4578	}
4579      else if (GET_CODE (t) == ZERO_EXTEND
4580	       && (GET_CODE (XEXP (t, 0)) == PLUS
4581		   || GET_CODE (XEXP (t, 0)) == IOR
4582		   || GET_CODE (XEXP (t, 0)) == XOR)
4583	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4584	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4585	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4586	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4587	       && ((nonzero_bits (f, GET_MODE (f))
4588		    & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4589		   == 0))
4590	{
4591	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4592	  extend_op = ZERO_EXTEND;
4593	  m = GET_MODE (XEXP (t, 0));
4594	}
4595
4596      if (z)
4597	{
4598	  temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4599			pc_rtx, pc_rtx, 0, 0);
4600	  temp = gen_binary (MULT, m, temp,
4601			     gen_binary (MULT, m, c1, const_true_rtx));
4602	  temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4603	  temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4604
4605	  if (extend_op != NIL)
4606	    temp = gen_unary (extend_op, mode, m, temp);
4607
4608	  return temp;
4609	}
4610    }
4611
4612  /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4613     1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4614     negation of a single bit, we can convert this operation to a shift.  We
4615     can actually do this more generally, but it doesn't seem worth it.  */
4616
4617  if (true_code == NE && XEXP (cond, 1) == const0_rtx
4618      && false == const0_rtx && GET_CODE (true) == CONST_INT
4619      && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4620	   && (i = exact_log2 (INTVAL (true))) >= 0)
4621	  || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4622	       == GET_MODE_BITSIZE (mode))
4623	      && (i = exact_log2 (- INTVAL (true))) >= 0)))
4624    return
4625      simplify_shift_const (NULL_RTX, ASHIFT, mode,
4626			    gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
4627
4628  return x;
4629}
4630
4631/* Simplify X, a SET expression.  Return the new expression.  */
4632
4633static rtx
4634simplify_set (x)
4635     rtx x;
4636{
4637  rtx src = SET_SRC (x);
4638  rtx dest = SET_DEST (x);
4639  enum machine_mode mode
4640    = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4641  rtx other_insn;
4642  rtx *cc_use;
4643
4644  /* (set (pc) (return)) gets written as (return).  */
4645  if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4646    return src;
4647
4648  /* Now that we know for sure which bits of SRC we are using, see if we can
4649     simplify the expression for the object knowing that we only need the
4650     low-order bits.  */
4651
4652  if (GET_MODE_CLASS (mode) == MODE_INT)
4653    src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4654
4655  /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4656     the comparison result and try to simplify it unless we already have used
4657     undobuf.other_insn.  */
4658  if ((GET_CODE (src) == COMPARE
4659#ifdef HAVE_cc0
4660       || dest == cc0_rtx
4661#endif
4662       )
4663      && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4664      && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4665      && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4666      && rtx_equal_p (XEXP (*cc_use, 0), dest))
4667    {
4668      enum rtx_code old_code = GET_CODE (*cc_use);
4669      enum rtx_code new_code;
4670      rtx op0, op1;
4671      int other_changed = 0;
4672      enum machine_mode compare_mode = GET_MODE (dest);
4673
4674      if (GET_CODE (src) == COMPARE)
4675	op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4676      else
4677	op0 = src, op1 = const0_rtx;
4678
4679      /* Simplify our comparison, if possible.  */
4680      new_code = simplify_comparison (old_code, &op0, &op1);
4681
4682#ifdef EXTRA_CC_MODES
4683      /* If this machine has CC modes other than CCmode, check to see if we
4684	 need to use a different CC mode here.  */
4685      compare_mode = SELECT_CC_MODE (new_code, op0, op1);
4686#endif /* EXTRA_CC_MODES */
4687
4688#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
4689      /* If the mode changed, we have to change SET_DEST, the mode in the
4690	 compare, and the mode in the place SET_DEST is used.  If SET_DEST is
4691	 a hard register, just build new versions with the proper mode.  If it
4692	 is a pseudo, we lose unless it is only time we set the pseudo, in
4693	 which case we can safely change its mode.  */
4694      if (compare_mode != GET_MODE (dest))
4695	{
4696	  int regno = REGNO (dest);
4697	  rtx new_dest = gen_rtx_REG (compare_mode, regno);
4698
4699	  if (regno < FIRST_PSEUDO_REGISTER
4700	      || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
4701	    {
4702	      if (regno >= FIRST_PSEUDO_REGISTER)
4703		SUBST (regno_reg_rtx[regno], new_dest);
4704
4705	      SUBST (SET_DEST (x), new_dest);
4706	      SUBST (XEXP (*cc_use, 0), new_dest);
4707	      other_changed = 1;
4708
4709	      dest = new_dest;
4710	    }
4711	}
4712#endif
4713
4714      /* If the code changed, we have to build a new comparison in
4715	 undobuf.other_insn.  */
4716      if (new_code != old_code)
4717	{
4718	  unsigned HOST_WIDE_INT mask;
4719
4720	  SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4721					   dest, const0_rtx));
4722
4723	  /* If the only change we made was to change an EQ into an NE or
4724	     vice versa, OP0 has only one bit that might be nonzero, and OP1
4725	     is zero, check if changing the user of the condition code will
4726	     produce a valid insn.  If it won't, we can keep the original code
4727	     in that insn by surrounding our operation with an XOR.  */
4728
4729	  if (((old_code == NE && new_code == EQ)
4730	       || (old_code == EQ && new_code == NE))
4731	      && ! other_changed && op1 == const0_rtx
4732	      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4733	      && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
4734	    {
4735	      rtx pat = PATTERN (other_insn), note = 0;
4736
4737	      if ((recog_for_combine (&pat, other_insn, &note) < 0
4738		   && ! check_asm_operands (pat)))
4739		{
4740		  PUT_CODE (*cc_use, old_code);
4741		  other_insn = 0;
4742
4743		  op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
4744		}
4745	    }
4746
4747	  other_changed = 1;
4748	}
4749
4750      if (other_changed)
4751	undobuf.other_insn = other_insn;
4752
4753#ifdef HAVE_cc0
4754      /* If we are now comparing against zero, change our source if
4755	 needed.  If we do not use cc0, we always have a COMPARE.  */
4756      if (op1 == const0_rtx && dest == cc0_rtx)
4757	{
4758	  SUBST (SET_SRC (x), op0);
4759	  src = op0;
4760	}
4761      else
4762#endif
4763
4764      /* Otherwise, if we didn't previously have a COMPARE in the
4765	 correct mode, we need one.  */
4766      if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4767	{
4768	  SUBST (SET_SRC (x),
4769		 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4770	  src = SET_SRC (x);
4771	}
4772      else
4773	{
4774	  /* Otherwise, update the COMPARE if needed.  */
4775	  SUBST (XEXP (src, 0), op0);
4776	  SUBST (XEXP (src, 1), op1);
4777	}
4778    }
4779  else
4780    {
4781      /* Get SET_SRC in a form where we have placed back any
4782	 compound expressions.  Then do the checks below.  */
4783      src = make_compound_operation (src, SET);
4784      SUBST (SET_SRC (x), src);
4785    }
4786
4787  /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4788     and X being a REG or (subreg (reg)), we may be able to convert this to
4789     (set (subreg:m2 x) (op)).
4790
4791     We can always do this if M1 is narrower than M2 because that means that
4792     we only care about the low bits of the result.
4793
4794     However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
4795     perform a narrower operation than requested since the high-order bits will
4796     be undefined.  On machine where it is defined, this transformation is safe
4797     as long as M1 and M2 have the same number of words.  */
4798
4799  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4800      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4801      && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4802	   / UNITS_PER_WORD)
4803	  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4804	       + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
4805#ifndef WORD_REGISTER_OPERATIONS
4806      && (GET_MODE_SIZE (GET_MODE (src))
4807	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4808#endif
4809#ifdef CLASS_CANNOT_CHANGE_SIZE
4810      && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
4811	    && (TEST_HARD_REG_BIT
4812		(reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
4813		 REGNO (dest)))
4814	    && (GET_MODE_SIZE (GET_MODE (src))
4815		!= GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4816#endif
4817      && (GET_CODE (dest) == REG
4818	  || (GET_CODE (dest) == SUBREG
4819	      && GET_CODE (SUBREG_REG (dest)) == REG)))
4820    {
4821      SUBST (SET_DEST (x),
4822	     gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4823				      dest));
4824      SUBST (SET_SRC (x), SUBREG_REG (src));
4825
4826      src = SET_SRC (x), dest = SET_DEST (x);
4827    }
4828
4829#ifdef LOAD_EXTEND_OP
4830  /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4831     would require a paradoxical subreg.  Replace the subreg with a
4832     zero_extend to avoid the reload that would otherwise be required.  */
4833
4834  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4835      && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4836      && SUBREG_WORD (src) == 0
4837      && (GET_MODE_SIZE (GET_MODE (src))
4838	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4839      && GET_CODE (SUBREG_REG (src)) == MEM)
4840    {
4841      SUBST (SET_SRC (x),
4842	     gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4843			      GET_MODE (src), XEXP (src, 0)));
4844
4845      src = SET_SRC (x);
4846    }
4847#endif
4848
4849  /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4850     are comparing an item known to be 0 or -1 against 0, use a logical
4851     operation instead. Check for one of the arms being an IOR of the other
4852     arm with some value.  We compute three terms to be IOR'ed together.  In
4853     practice, at most two will be nonzero.  Then we do the IOR's.  */
4854
4855  if (GET_CODE (dest) != PC
4856      && GET_CODE (src) == IF_THEN_ELSE
4857      && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
4858      && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4859      && XEXP (XEXP (src, 0), 1) == const0_rtx
4860      && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
4861#ifdef HAVE_conditional_move
4862      && ! can_conditionally_move_p (GET_MODE (src))
4863#endif
4864      && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4865			       GET_MODE (XEXP (XEXP (src, 0), 0)))
4866	  == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4867      && ! side_effects_p (src))
4868    {
4869      rtx true = (GET_CODE (XEXP (src, 0)) == NE
4870		      ? XEXP (src, 1) : XEXP (src, 2));
4871      rtx false = (GET_CODE (XEXP (src, 0)) == NE
4872		   ? XEXP (src, 2) : XEXP (src, 1));
4873      rtx term1 = const0_rtx, term2, term3;
4874
4875      if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4876	term1 = false, true = XEXP (true, 1), false = const0_rtx;
4877      else if (GET_CODE (true) == IOR
4878	       && rtx_equal_p (XEXP (true, 1), false))
4879	term1 = false, true = XEXP (true, 0), false = const0_rtx;
4880      else if (GET_CODE (false) == IOR
4881	       && rtx_equal_p (XEXP (false, 0), true))
4882	term1 = true, false = XEXP (false, 1), true = const0_rtx;
4883      else if (GET_CODE (false) == IOR
4884	       && rtx_equal_p (XEXP (false, 1), true))
4885	term1 = true, false = XEXP (false, 0), true = const0_rtx;
4886
4887      term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4888      term3 = gen_binary (AND, GET_MODE (src),
4889			  gen_unary (NOT, GET_MODE (src), GET_MODE (src),
4890				     XEXP (XEXP (src, 0), 0)),
4891			  false);
4892
4893      SUBST (SET_SRC (x),
4894	     gen_binary (IOR, GET_MODE (src),
4895			 gen_binary (IOR, GET_MODE (src), term1, term2),
4896			 term3));
4897
4898      src = SET_SRC (x);
4899    }
4900
4901  /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4902     whole thing fail.  */
4903  if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4904    return src;
4905  else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4906    return dest;
4907  else
4908    /* Convert this into a field assignment operation, if possible.  */
4909    return make_field_assignment (x);
4910}
4911
4912/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4913   result.  LAST is nonzero if this is the last retry.  */
4914
4915static rtx
4916simplify_logical (x, last)
4917     rtx x;
4918     int last;
4919{
4920  enum machine_mode mode = GET_MODE (x);
4921  rtx op0 = XEXP (x, 0);
4922  rtx op1 = XEXP (x, 1);
4923
4924  switch (GET_CODE (x))
4925    {
4926    case AND:
4927      /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4928	 insn (and may simplify more).  */
4929      if (GET_CODE (op0) == XOR
4930	  && rtx_equal_p (XEXP (op0, 0), op1)
4931	  && ! side_effects_p (op1))
4932	x = gen_binary (AND, mode,
4933			gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
4934
4935      if (GET_CODE (op0) == XOR
4936	  && rtx_equal_p (XEXP (op0, 1), op1)
4937	  && ! side_effects_p (op1))
4938	x = gen_binary (AND, mode,
4939			gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
4940
4941      /* Similarly for (~ (A ^ B)) & A.  */
4942      if (GET_CODE (op0) == NOT
4943	  && GET_CODE (XEXP (op0, 0)) == XOR
4944	  && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4945	  && ! side_effects_p (op1))
4946	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4947
4948      if (GET_CODE (op0) == NOT
4949	  && GET_CODE (XEXP (op0, 0)) == XOR
4950	  && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
4951	  && ! side_effects_p (op1))
4952	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4953
4954      if (GET_CODE (op1) == CONST_INT)
4955	{
4956	  x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
4957
4958	  /* If we have (ior (and (X C1) C2)) and the next restart would be
4959	     the last, simplify this by making C1 as small as possible
4960	     and then exit.  */
4961	  if (last
4962	      && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4963	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
4964	      && GET_CODE (op1) == CONST_INT)
4965	    return gen_binary (IOR, mode,
4966			       gen_binary (AND, mode, XEXP (op0, 0),
4967					   GEN_INT (INTVAL (XEXP (op0, 1))
4968						    & ~ INTVAL (op1))), op1);
4969
4970	  if (GET_CODE (x) != AND)
4971	    return x;
4972
4973	  if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
4974	      || GET_RTX_CLASS (GET_CODE (x)) == '2')
4975	    op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4976	}
4977
4978      /* Convert (A | B) & A to A.  */
4979      if (GET_CODE (op0) == IOR
4980	  && (rtx_equal_p (XEXP (op0, 0), op1)
4981	      || rtx_equal_p (XEXP (op0, 1), op1))
4982	  && ! side_effects_p (XEXP (op0, 0))
4983	  && ! side_effects_p (XEXP (op0, 1)))
4984	return op1;
4985
4986      /* In the following group of tests (and those in case IOR below),
4987	 we start with some combination of logical operations and apply
4988	 the distributive law followed by the inverse distributive law.
4989	 Most of the time, this results in no change.  However, if some of
4990	 the operands are the same or inverses of each other, simplifications
4991	 will result.
4992
4993	 For example, (and (ior A B) (not B)) can occur as the result of
4994	 expanding a bit field assignment.  When we apply the distributive
4995	 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4996	 which then simplifies to (and (A (not B))).
4997
4998	 If we have (and (ior A B) C), apply the distributive law and then
4999	 the inverse distributive law to see if things simplify.  */
5000
5001      if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
5002	{
5003	  x = apply_distributive_law
5004	    (gen_binary (GET_CODE (op0), mode,
5005			 gen_binary (AND, mode, XEXP (op0, 0), op1),
5006			 gen_binary (AND, mode, XEXP (op0, 1),
5007				     copy_rtx (op1))));
5008	  if (GET_CODE (x) != AND)
5009	    return x;
5010	}
5011
5012      if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5013	return apply_distributive_law
5014	  (gen_binary (GET_CODE (op1), mode,
5015		       gen_binary (AND, mode, XEXP (op1, 0), op0),
5016		       gen_binary (AND, mode, XEXP (op1, 1),
5017				   copy_rtx (op0))));
5018
5019      /* Similarly, taking advantage of the fact that
5020	 (and (not A) (xor B C)) == (xor (ior A B) (ior A C))  */
5021
5022      if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
5023	return apply_distributive_law
5024	  (gen_binary (XOR, mode,
5025		       gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
5026		       gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)),
5027				   XEXP (op1, 1))));
5028
5029      else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
5030	return apply_distributive_law
5031	  (gen_binary (XOR, mode,
5032		       gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
5033		       gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1))));
5034      break;
5035
5036    case IOR:
5037      /* (ior A C) is C if all bits of A that might be nonzero are on in C.  */
5038      if (GET_CODE (op1) == CONST_INT
5039	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5040	  && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
5041	return op1;
5042
5043      /* Convert (A & B) | A to A.  */
5044      if (GET_CODE (op0) == AND
5045	  && (rtx_equal_p (XEXP (op0, 0), op1)
5046	      || rtx_equal_p (XEXP (op0, 1), op1))
5047	  && ! side_effects_p (XEXP (op0, 0))
5048	  && ! side_effects_p (XEXP (op0, 1)))
5049	return op1;
5050
5051      /* If we have (ior (and A B) C), apply the distributive law and then
5052	 the inverse distributive law to see if things simplify.  */
5053
5054      if (GET_CODE (op0) == AND)
5055	{
5056	  x = apply_distributive_law
5057	    (gen_binary (AND, mode,
5058			 gen_binary (IOR, mode, XEXP (op0, 0), op1),
5059			 gen_binary (IOR, mode, XEXP (op0, 1),
5060				     copy_rtx (op1))));
5061
5062	  if (GET_CODE (x) != IOR)
5063	    return x;
5064	}
5065
5066      if (GET_CODE (op1) == AND)
5067	{
5068	  x = apply_distributive_law
5069	    (gen_binary (AND, mode,
5070			 gen_binary (IOR, mode, XEXP (op1, 0), op0),
5071			 gen_binary (IOR, mode, XEXP (op1, 1),
5072				     copy_rtx (op0))));
5073
5074	  if (GET_CODE (x) != IOR)
5075	    return x;
5076	}
5077
5078      /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5079	 mode size to (rotate A CX).  */
5080
5081      if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5082	   || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5083	  && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5084	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5085	  && GET_CODE (XEXP (op1, 1)) == CONST_INT
5086	  && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
5087	      == GET_MODE_BITSIZE (mode)))
5088	return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5089			       (GET_CODE (op0) == ASHIFT
5090				? XEXP (op0, 1) : XEXP (op1, 1)));
5091
5092      /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5093	 a (sign_extend (plus ...)).  If so, OP1 is a CONST_INT, and the PLUS
5094	 does not affect any of the bits in OP1, it can really be done
5095	 as a PLUS and we can associate.  We do this by seeing if OP1
5096	 can be safely shifted left C bits.  */
5097      if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5098	  && GET_CODE (XEXP (op0, 0)) == PLUS
5099	  && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5100	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5101	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5102	{
5103	  int count = INTVAL (XEXP (op0, 1));
5104	  HOST_WIDE_INT mask = INTVAL (op1) << count;
5105
5106	  if (mask >> count == INTVAL (op1)
5107	      && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5108	    {
5109	      SUBST (XEXP (XEXP (op0, 0), 1),
5110		     GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5111	      return op0;
5112	    }
5113	}
5114      break;
5115
5116    case XOR:
5117      /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5118	 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5119	 (NOT y).  */
5120      {
5121	int num_negated = 0;
5122
5123	if (GET_CODE (op0) == NOT)
5124	  num_negated++, op0 = XEXP (op0, 0);
5125	if (GET_CODE (op1) == NOT)
5126	  num_negated++, op1 = XEXP (op1, 0);
5127
5128	if (num_negated == 2)
5129	  {
5130	    SUBST (XEXP (x, 0), op0);
5131	    SUBST (XEXP (x, 1), op1);
5132	  }
5133	else if (num_negated == 1)
5134	  return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
5135      }
5136
5137      /* Convert (xor (and A B) B) to (and (not A) B).  The latter may
5138	 correspond to a machine insn or result in further simplifications
5139	 if B is a constant.  */
5140
5141      if (GET_CODE (op0) == AND
5142	  && rtx_equal_p (XEXP (op0, 1), op1)
5143	  && ! side_effects_p (op1))
5144	return gen_binary (AND, mode,
5145			   gen_unary (NOT, mode, mode, XEXP (op0, 0)),
5146			   op1);
5147
5148      else if (GET_CODE (op0) == AND
5149	       && rtx_equal_p (XEXP (op0, 0), op1)
5150	       && ! side_effects_p (op1))
5151	return gen_binary (AND, mode,
5152			   gen_unary (NOT, mode, mode, XEXP (op0, 1)),
5153			   op1);
5154
5155      /* (xor (comparison foo bar) (const_int 1)) can become the reversed
5156	 comparison if STORE_FLAG_VALUE is 1.  */
5157      if (STORE_FLAG_VALUE == 1
5158	  && op1 == const1_rtx
5159	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5160	  && reversible_comparison_p (op0))
5161	return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5162				mode, XEXP (op0, 0), XEXP (op0, 1));
5163
5164      /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5165	 is (lt foo (const_int 0)), so we can perform the above
5166	 simplification if STORE_FLAG_VALUE is 1.  */
5167
5168      if (STORE_FLAG_VALUE == 1
5169	  && op1 == const1_rtx
5170	  && GET_CODE (op0) == LSHIFTRT
5171	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5172	  && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5173	return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
5174
5175      /* (xor (comparison foo bar) (const_int sign-bit))
5176	 when STORE_FLAG_VALUE is the sign bit.  */
5177      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5178	  && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5179	      == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5180	  && op1 == const_true_rtx
5181	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5182	  && reversible_comparison_p (op0))
5183	return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5184				mode, XEXP (op0, 0), XEXP (op0, 1));
5185      break;
5186
5187    default:
5188      abort ();
5189    }
5190
5191  return x;
5192}
5193
5194/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5195   operations" because they can be replaced with two more basic operations.
5196   ZERO_EXTEND is also considered "compound" because it can be replaced with
5197   an AND operation, which is simpler, though only one operation.
5198
5199   The function expand_compound_operation is called with an rtx expression
5200   and will convert it to the appropriate shifts and AND operations,
5201   simplifying at each stage.
5202
5203   The function make_compound_operation is called to convert an expression
5204   consisting of shifts and ANDs into the equivalent compound expression.
5205   It is the inverse of this function, loosely speaking.  */
5206
5207static rtx
5208expand_compound_operation (x)
5209     rtx x;
5210{
5211  int pos = 0, len;
5212  int unsignedp = 0;
5213  int modewidth;
5214  rtx tem;
5215
5216  switch (GET_CODE (x))
5217    {
5218    case ZERO_EXTEND:
5219      unsignedp = 1;
5220    case SIGN_EXTEND:
5221      /* We can't necessarily use a const_int for a multiword mode;
5222	 it depends on implicitly extending the value.
5223	 Since we don't know the right way to extend it,
5224	 we can't tell whether the implicit way is right.
5225
5226	 Even for a mode that is no wider than a const_int,
5227	 we can't win, because we need to sign extend one of its bits through
5228	 the rest of it, and we don't know which bit.  */
5229      if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5230	return x;
5231
5232      /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5233	 (zero_extend:MODE FROM) or (sign_extend:MODE FROM).  It is for any MEM
5234	 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5235	 reloaded. If not for that, MEM's would very rarely be safe.
5236
5237	 Reject MODEs bigger than a word, because we might not be able
5238	 to reference a two-register group starting with an arbitrary register
5239	 (and currently gen_lowpart might crash for a SUBREG).  */
5240
5241      if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5242	return x;
5243
5244      len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5245      /* If the inner object has VOIDmode (the only way this can happen
5246	 is if it is a ASM_OPERANDS), we can't do anything since we don't
5247	 know how much masking to do.  */
5248      if (len == 0)
5249	return x;
5250
5251      break;
5252
5253    case ZERO_EXTRACT:
5254      unsignedp = 1;
5255    case SIGN_EXTRACT:
5256      /* If the operand is a CLOBBER, just return it.  */
5257      if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5258	return XEXP (x, 0);
5259
5260      if (GET_CODE (XEXP (x, 1)) != CONST_INT
5261	  || GET_CODE (XEXP (x, 2)) != CONST_INT
5262	  || GET_MODE (XEXP (x, 0)) == VOIDmode)
5263	return x;
5264
5265      len = INTVAL (XEXP (x, 1));
5266      pos = INTVAL (XEXP (x, 2));
5267
5268      /* If this goes outside the object being extracted, replace the object
5269	 with a (use (mem ...)) construct that only combine understands
5270	 and is used only for this purpose.  */
5271      if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5272	SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
5273
5274      if (BITS_BIG_ENDIAN)
5275	pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5276
5277      break;
5278
5279    default:
5280      return x;
5281    }
5282
5283  /* We can optimize some special cases of ZERO_EXTEND.  */
5284  if (GET_CODE (x) == ZERO_EXTEND)
5285    {
5286      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5287         know that the last value didn't have any inappropriate bits
5288         set.  */
5289      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5290	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5291	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5292	  && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5293	      & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5294	return XEXP (XEXP (x, 0), 0);
5295
5296      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5297      if (GET_CODE (XEXP (x, 0)) == SUBREG
5298	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5299	  && subreg_lowpart_p (XEXP (x, 0))
5300	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5301	  && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5302	      & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5303	return SUBREG_REG (XEXP (x, 0));
5304
5305      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5306         is a comparison and STORE_FLAG_VALUE permits.  This is like
5307         the first case, but it works even when GET_MODE (x) is larger
5308         than HOST_WIDE_INT.  */
5309      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5310	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5311	  && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5312	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5313	      <= HOST_BITS_PER_WIDE_INT)
5314 	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5315	      & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5316	return XEXP (XEXP (x, 0), 0);
5317
5318      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5319      if (GET_CODE (XEXP (x, 0)) == SUBREG
5320	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5321	  && subreg_lowpart_p (XEXP (x, 0))
5322	  && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5323	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5324	      <= HOST_BITS_PER_WIDE_INT)
5325	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5326	      & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5327	return SUBREG_REG (XEXP (x, 0));
5328
5329      /* If sign extension is cheaper than zero extension, then use it
5330	 if we know that no extraneous bits are set, and that the high
5331	 bit is not set.  */
5332      if (flag_expensive_optimizations
5333	  && ((GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5334	       && ((nonzero_bits (XEXP (x, 0), GET_MODE (x))
5335		    & ~ (((unsigned HOST_WIDE_INT)
5336			  GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5337			 >> 1))
5338		   == 0))
5339	      || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5340		  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5341		      <= HOST_BITS_PER_WIDE_INT)
5342		  && (((HOST_WIDE_INT) STORE_FLAG_VALUE
5343		       & ~ (((unsigned HOST_WIDE_INT)
5344			     GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5345			    >> 1))
5346		      == 0))))
5347	{
5348	  rtx temp = gen_rtx_SIGN_EXTEND (GET_MODE (x), XEXP (x, 0));
5349
5350	  if (rtx_cost (temp, SET) < rtx_cost (x, SET))
5351	    return expand_compound_operation (temp);
5352	}
5353    }
5354
5355  /* If we reach here, we want to return a pair of shifts.  The inner
5356     shift is a left shift of BITSIZE - POS - LEN bits.  The outer
5357     shift is a right shift of BITSIZE - LEN bits.  It is arithmetic or
5358     logical depending on the value of UNSIGNEDP.
5359
5360     If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5361     converted into an AND of a shift.
5362
5363     We must check for the case where the left shift would have a negative
5364     count.  This can happen in a case like (x >> 31) & 255 on machines
5365     that can't shift by a constant.  On those machines, we would first
5366     combine the shift with the AND to produce a variable-position
5367     extraction.  Then the constant of 31 would be substituted in to produce
5368     a such a position.  */
5369
5370  modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5371  if (modewidth >= pos - len)
5372    tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
5373				GET_MODE (x),
5374				simplify_shift_const (NULL_RTX, ASHIFT,
5375						      GET_MODE (x),
5376						      XEXP (x, 0),
5377						      modewidth - pos - len),
5378				modewidth - len);
5379
5380  else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5381    tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5382				  simplify_shift_const (NULL_RTX, LSHIFTRT,
5383							GET_MODE (x),
5384							XEXP (x, 0), pos),
5385				  ((HOST_WIDE_INT) 1 << len) - 1);
5386  else
5387    /* Any other cases we can't handle.  */
5388    return x;
5389
5390
5391  /* If we couldn't do this for some reason, return the original
5392     expression.  */
5393  if (GET_CODE (tem) == CLOBBER)
5394    return x;
5395
5396  return tem;
5397}
5398
5399/* X is a SET which contains an assignment of one object into
5400   a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5401   or certain SUBREGS). If possible, convert it into a series of
5402   logical operations.
5403
5404   We half-heartedly support variable positions, but do not at all
5405   support variable lengths.  */
5406
5407static rtx
5408expand_field_assignment (x)
5409     rtx x;
5410{
5411  rtx inner;
5412  rtx pos;			/* Always counts from low bit.  */
5413  int len;
5414  rtx mask;
5415  enum machine_mode compute_mode;
5416
5417  /* Loop until we find something we can't simplify.  */
5418  while (1)
5419    {
5420      if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5421	  && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5422	{
5423	  inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5424	  len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
5425	  pos = GEN_INT (BITS_PER_WORD * SUBREG_WORD (XEXP (SET_DEST (x), 0)));
5426	}
5427      else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5428	       && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5429	{
5430	  inner = XEXP (SET_DEST (x), 0);
5431	  len = INTVAL (XEXP (SET_DEST (x), 1));
5432	  pos = XEXP (SET_DEST (x), 2);
5433
5434	  /* If the position is constant and spans the width of INNER,
5435	     surround INNER  with a USE to indicate this.  */
5436	  if (GET_CODE (pos) == CONST_INT
5437	      && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5438	    inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
5439
5440	  if (BITS_BIG_ENDIAN)
5441	    {
5442	      if (GET_CODE (pos) == CONST_INT)
5443		pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5444			       - INTVAL (pos));
5445	      else if (GET_CODE (pos) == MINUS
5446		       && GET_CODE (XEXP (pos, 1)) == CONST_INT
5447		       && (INTVAL (XEXP (pos, 1))
5448			   == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5449		/* If position is ADJUST - X, new position is X.  */
5450		pos = XEXP (pos, 0);
5451	      else
5452		pos = gen_binary (MINUS, GET_MODE (pos),
5453				  GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5454					   - len),
5455				  pos);
5456	    }
5457	}
5458
5459      /* A SUBREG between two modes that occupy the same numbers of words
5460	 can be done by moving the SUBREG to the source.  */
5461      else if (GET_CODE (SET_DEST (x)) == SUBREG
5462	       && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5463		     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5464		   == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5465			+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5466	{
5467	  x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
5468			   gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
5469						    SET_SRC (x)));
5470	  continue;
5471	}
5472      else
5473	break;
5474
5475      while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5476	inner = SUBREG_REG (inner);
5477
5478      compute_mode = GET_MODE (inner);
5479
5480      /* Don't attempt bitwise arithmetic on non-integral modes.  */
5481      if (! INTEGRAL_MODE_P (compute_mode))
5482	{
5483	  enum machine_mode imode;
5484
5485	  /* Something is probably seriously wrong if this matches.  */
5486	  if (! FLOAT_MODE_P (compute_mode))
5487	    break;
5488
5489	  /* Try to find an integral mode to pun with.  */
5490	  imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5491	  if (imode == BLKmode)
5492	    break;
5493
5494	  compute_mode = imode;
5495	  inner = gen_lowpart_for_combine (imode, inner);
5496	}
5497
5498      /* Compute a mask of LEN bits, if we can do this on the host machine.  */
5499      if (len < HOST_BITS_PER_WIDE_INT)
5500	mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
5501      else
5502	break;
5503
5504      /* Now compute the equivalent expression.  Make a copy of INNER
5505	 for the SET_DEST in case it is a MEM into which we will substitute;
5506	 we don't want shared RTL in that case.  */
5507      x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
5508		       gen_binary (IOR, compute_mode,
5509				   gen_binary (AND, compute_mode,
5510					       gen_unary (NOT, compute_mode,
5511							  compute_mode,
5512							  gen_binary (ASHIFT,
5513								      compute_mode,
5514								      mask, pos)),
5515					       inner),
5516				   gen_binary (ASHIFT, compute_mode,
5517					       gen_binary (AND, compute_mode,
5518							   gen_lowpart_for_combine
5519							   (compute_mode,
5520							    SET_SRC (x)),
5521							   mask),
5522					       pos)));
5523    }
5524
5525  return x;
5526}
5527
5528/* Return an RTX for a reference to LEN bits of INNER.  If POS_RTX is nonzero,
5529   it is an RTX that represents a variable starting position; otherwise,
5530   POS is the (constant) starting bit position (counted from the LSB).
5531
5532   INNER may be a USE.  This will occur when we started with a bitfield
5533   that went outside the boundary of the object in memory, which is
5534   allowed on most machines.  To isolate this case, we produce a USE
5535   whose mode is wide enough and surround the MEM with it.  The only
5536   code that understands the USE is this routine.  If it is not removed,
5537   it will cause the resulting insn not to match.
5538
5539   UNSIGNEDP is non-zero for an unsigned reference and zero for a
5540   signed reference.
5541
5542   IN_DEST is non-zero if this is a reference in the destination of a
5543   SET.  This is used when a ZERO_ or SIGN_EXTRACT isn't needed.  If non-zero,
5544   a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5545   be used.
5546
5547   IN_COMPARE is non-zero if we are in a COMPARE.  This means that a
5548   ZERO_EXTRACT should be built even for bits starting at bit 0.
5549
5550   MODE is the desired mode of the result (if IN_DEST == 0).
5551
5552   The result is an RTX for the extraction or NULL_RTX if the target
5553   can't handle it.  */
5554
5555static rtx
5556make_extraction (mode, inner, pos, pos_rtx, len,
5557		 unsignedp, in_dest, in_compare)
5558     enum machine_mode mode;
5559     rtx inner;
5560     int pos;
5561     rtx pos_rtx;
5562     int len;
5563     int unsignedp;
5564     int in_dest, in_compare;
5565{
5566  /* This mode describes the size of the storage area
5567     to fetch the overall value from.  Within that, we
5568     ignore the POS lowest bits, etc.  */
5569  enum machine_mode is_mode = GET_MODE (inner);
5570  enum machine_mode inner_mode;
5571  enum machine_mode wanted_inner_mode = byte_mode;
5572  enum machine_mode wanted_inner_reg_mode = word_mode;
5573  enum machine_mode pos_mode = word_mode;
5574  enum machine_mode extraction_mode = word_mode;
5575  enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5576  int spans_byte = 0;
5577  rtx new = 0;
5578  rtx orig_pos_rtx = pos_rtx;
5579  int orig_pos;
5580
5581  /* Get some information about INNER and get the innermost object.  */
5582  if (GET_CODE (inner) == USE)
5583    /* (use:SI (mem:QI foo)) stands for (mem:SI foo).  */
5584    /* We don't need to adjust the position because we set up the USE
5585       to pretend that it was a full-word object.  */
5586    spans_byte = 1, inner = XEXP (inner, 0);
5587  else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5588    {
5589      /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5590	 consider just the QI as the memory to extract from.
5591	 The subreg adds or removes high bits; its mode is
5592	 irrelevant to the meaning of this extraction,
5593	 since POS and LEN count from the lsb.  */
5594      if (GET_CODE (SUBREG_REG (inner)) == MEM)
5595	is_mode = GET_MODE (SUBREG_REG (inner));
5596      inner = SUBREG_REG (inner);
5597    }
5598
5599  inner_mode = GET_MODE (inner);
5600
5601  if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
5602    pos = INTVAL (pos_rtx), pos_rtx = 0;
5603
5604  /* See if this can be done without an extraction.  We never can if the
5605     width of the field is not the same as that of some integer mode. For
5606     registers, we can only avoid the extraction if the position is at the
5607     low-order bit and this is either not in the destination or we have the
5608     appropriate STRICT_LOW_PART operation available.
5609
5610     For MEM, we can avoid an extract if the field starts on an appropriate
5611     boundary and we can change the mode of the memory reference.  However,
5612     we cannot directly access the MEM if we have a USE and the underlying
5613     MEM is not TMODE.  This combination means that MEM was being used in a
5614     context where bits outside its mode were being referenced; that is only
5615     valid in bit-field insns.  */
5616
5617  if (tmode != BLKmode
5618      && ! (spans_byte && inner_mode != tmode)
5619      && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5620	   && GET_CODE (inner) != MEM
5621	   && (! in_dest
5622	       || (GET_CODE (inner) == REG
5623		   && (movstrict_optab->handlers[(int) tmode].insn_code
5624		       != CODE_FOR_nothing))))
5625	  || (GET_CODE (inner) == MEM && pos_rtx == 0
5626	      && (pos
5627		  % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5628		     : BITS_PER_UNIT)) == 0
5629	      /* We can't do this if we are widening INNER_MODE (it
5630		 may not be aligned, for one thing).  */
5631	      && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5632	      && (inner_mode == tmode
5633		  || (! mode_dependent_address_p (XEXP (inner, 0))
5634		      && ! MEM_VOLATILE_P (inner))))))
5635    {
5636      /* If INNER is a MEM, make a new MEM that encompasses just the desired
5637	 field.  If the original and current mode are the same, we need not
5638	 adjust the offset.  Otherwise, we do if bytes big endian.
5639
5640	 If INNER is not a MEM, get a piece consisting of just the field
5641	 of interest (in this case POS % BITS_PER_WORD must be 0).  */
5642
5643      if (GET_CODE (inner) == MEM)
5644	{
5645	  int offset;
5646	  /* POS counts from lsb, but make OFFSET count in memory order.  */
5647	  if (BYTES_BIG_ENDIAN)
5648	    offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5649	  else
5650	    offset = pos / BITS_PER_UNIT;
5651
5652	  new = gen_rtx_MEM (tmode, plus_constant (XEXP (inner, 0), offset));
5653	  RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5654	  MEM_COPY_ATTRIBUTES (new, inner);
5655	}
5656      else if (GET_CODE (inner) == REG)
5657	{
5658	  /* We can't call gen_lowpart_for_combine here since we always want
5659	     a SUBREG and it would sometimes return a new hard register.  */
5660	  if (tmode != inner_mode)
5661	    new = gen_rtx_SUBREG (tmode, inner,
5662				  (WORDS_BIG_ENDIAN
5663				   && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5664				   ? (((GET_MODE_SIZE (inner_mode)
5665					- GET_MODE_SIZE (tmode))
5666				       / UNITS_PER_WORD)
5667				      - pos / BITS_PER_WORD)
5668				   : pos / BITS_PER_WORD));
5669	  else
5670	    new = inner;
5671	}
5672      else
5673	new = force_to_mode (inner, tmode,
5674			     len >= HOST_BITS_PER_WIDE_INT
5675			     ? GET_MODE_MASK (tmode)
5676			     : ((HOST_WIDE_INT) 1 << len) - 1,
5677			     NULL_RTX, 0);
5678
5679      /* If this extraction is going into the destination of a SET,
5680	 make a STRICT_LOW_PART unless we made a MEM.  */
5681
5682      if (in_dest)
5683	return (GET_CODE (new) == MEM ? new
5684		: (GET_CODE (new) != SUBREG
5685		   ? gen_rtx_CLOBBER (tmode, const0_rtx)
5686		   : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
5687
5688      /* Otherwise, sign- or zero-extend unless we already are in the
5689	 proper mode.  */
5690
5691      return (mode == tmode ? new
5692	      : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5693				 mode, new));
5694    }
5695
5696  /* Unless this is a COMPARE or we have a funny memory reference,
5697     don't do anything with zero-extending field extracts starting at
5698     the low-order bit since they are simple AND operations.  */
5699  if (pos_rtx == 0 && pos == 0 && ! in_dest
5700      && ! in_compare && ! spans_byte && unsignedp)
5701    return 0;
5702
5703  /* Unless we are allowed to span bytes, reject this if we would be
5704     spanning bytes or if the position is not a constant and the length
5705     is not 1.  In all other cases, we would only be going outside
5706     out object in cases when an original shift would have been
5707     undefined.  */
5708  if (! spans_byte
5709      && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5710	  || (pos_rtx != 0 && len != 1)))
5711    return 0;
5712
5713  /* Get the mode to use should INNER not be a MEM, the mode for the position,
5714     and the mode for the result.  */
5715#ifdef HAVE_insv
5716  if (in_dest)
5717    {
5718      wanted_inner_reg_mode
5719	= (insn_operand_mode[(int) CODE_FOR_insv][0] == VOIDmode
5720	   ? word_mode
5721	   : insn_operand_mode[(int) CODE_FOR_insv][0]);
5722      pos_mode = (insn_operand_mode[(int) CODE_FOR_insv][2] == VOIDmode
5723		  ? word_mode : insn_operand_mode[(int) CODE_FOR_insv][2]);
5724      extraction_mode = (insn_operand_mode[(int) CODE_FOR_insv][3] == VOIDmode
5725			 ? word_mode
5726			 : insn_operand_mode[(int) CODE_FOR_insv][3]);
5727    }
5728#endif
5729
5730#ifdef HAVE_extzv
5731  if (! in_dest && unsignedp)
5732    {
5733      wanted_inner_reg_mode
5734	= (insn_operand_mode[(int) CODE_FOR_extzv][1] == VOIDmode
5735	   ? word_mode
5736	   : insn_operand_mode[(int) CODE_FOR_extzv][1]);
5737      pos_mode = (insn_operand_mode[(int) CODE_FOR_extzv][3] == VOIDmode
5738		  ? word_mode : insn_operand_mode[(int) CODE_FOR_extzv][3]);
5739      extraction_mode = (insn_operand_mode[(int) CODE_FOR_extzv][0] == VOIDmode
5740			 ? word_mode
5741			 : insn_operand_mode[(int) CODE_FOR_extzv][0]);
5742    }
5743#endif
5744
5745#ifdef HAVE_extv
5746  if (! in_dest && ! unsignedp)
5747    {
5748      wanted_inner_reg_mode
5749	= (insn_operand_mode[(int) CODE_FOR_extv][1] == VOIDmode
5750	   ? word_mode
5751	   : insn_operand_mode[(int) CODE_FOR_extv][1]);
5752      pos_mode = (insn_operand_mode[(int) CODE_FOR_extv][3] == VOIDmode
5753		  ? word_mode : insn_operand_mode[(int) CODE_FOR_extv][3]);
5754      extraction_mode = (insn_operand_mode[(int) CODE_FOR_extv][0] == VOIDmode
5755			 ? word_mode
5756			 : insn_operand_mode[(int) CODE_FOR_extv][0]);
5757    }
5758#endif
5759
5760  /* Never narrow an object, since that might not be safe.  */
5761
5762  if (mode != VOIDmode
5763      && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5764    extraction_mode = mode;
5765
5766  if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5767      && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5768    pos_mode = GET_MODE (pos_rtx);
5769
5770  /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
5771     if we have to change the mode of memory and cannot, the desired mode is
5772     EXTRACTION_MODE.  */
5773  if (GET_CODE (inner) != MEM)
5774    wanted_inner_mode = wanted_inner_reg_mode;
5775  else if (inner_mode != wanted_inner_mode
5776	   && (mode_dependent_address_p (XEXP (inner, 0))
5777	       || MEM_VOLATILE_P (inner)))
5778    wanted_inner_mode = extraction_mode;
5779
5780  orig_pos = pos;
5781
5782  if (BITS_BIG_ENDIAN)
5783    {
5784      /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
5785	 BITS_BIG_ENDIAN style.  If position is constant, compute new
5786	 position.  Otherwise, build subtraction.
5787	 Note that POS is relative to the mode of the original argument.
5788	 If it's a MEM we need to recompute POS relative to that.
5789	 However, if we're extracting from (or inserting into) a register,
5790	 we want to recompute POS relative to wanted_inner_mode.  */
5791      int width = (GET_CODE (inner) == MEM
5792		   ? GET_MODE_BITSIZE (is_mode)
5793		   : GET_MODE_BITSIZE (wanted_inner_mode));
5794
5795      if (pos_rtx == 0)
5796	pos = width - len - pos;
5797      else
5798	pos_rtx
5799	  = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5800			     GEN_INT (width - len), pos_rtx);
5801      /* POS may be less than 0 now, but we check for that below.
5802	 Note that it can only be less than 0 if GET_CODE (inner) != MEM.  */
5803    }
5804
5805  /* If INNER has a wider mode, make it smaller.  If this is a constant
5806     extract, try to adjust the byte to point to the byte containing
5807     the value.  */
5808  if (wanted_inner_mode != VOIDmode
5809      && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
5810      && ((GET_CODE (inner) == MEM
5811	   && (inner_mode == wanted_inner_mode
5812	       || (! mode_dependent_address_p (XEXP (inner, 0))
5813		   && ! MEM_VOLATILE_P (inner))))))
5814    {
5815      int offset = 0;
5816
5817      /* The computations below will be correct if the machine is big
5818	 endian in both bits and bytes or little endian in bits and bytes.
5819	 If it is mixed, we must adjust.  */
5820
5821      /* If bytes are big endian and we had a paradoxical SUBREG, we must
5822	 adjust OFFSET to compensate.  */
5823      if (BYTES_BIG_ENDIAN
5824	  && ! spans_byte
5825	  && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5826	offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
5827
5828      /* If this is a constant position, we can move to the desired byte.  */
5829      if (pos_rtx == 0)
5830	{
5831	  offset += pos / BITS_PER_UNIT;
5832	  pos %= GET_MODE_BITSIZE (wanted_inner_mode);
5833	}
5834
5835      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5836	  && ! spans_byte
5837	  && is_mode != wanted_inner_mode)
5838	offset = (GET_MODE_SIZE (is_mode)
5839		  - GET_MODE_SIZE (wanted_inner_mode) - offset);
5840
5841      if (offset != 0 || inner_mode != wanted_inner_mode)
5842	{
5843	  rtx newmem = gen_rtx_MEM (wanted_inner_mode,
5844				    plus_constant (XEXP (inner, 0), offset));
5845	  RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5846	  MEM_COPY_ATTRIBUTES (newmem, inner);
5847	  inner = newmem;
5848	}
5849    }
5850
5851  /* If INNER is not memory, we can always get it into the proper mode.  If we
5852     are changing its mode, POS must be a constant and smaller than the size
5853     of the new mode.  */
5854  else if (GET_CODE (inner) != MEM)
5855    {
5856      if (GET_MODE (inner) != wanted_inner_mode
5857	  && (pos_rtx != 0
5858	      || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
5859	return 0;
5860
5861      inner = force_to_mode (inner, wanted_inner_mode,
5862			     pos_rtx
5863			     || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5864			     ? GET_MODE_MASK (wanted_inner_mode)
5865			     : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5866			     NULL_RTX, 0);
5867    }
5868
5869  /* Adjust mode of POS_RTX, if needed.  If we want a wider mode, we
5870     have to zero extend.  Otherwise, we can just use a SUBREG.  */
5871  if (pos_rtx != 0
5872      && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5873    pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
5874  else if (pos_rtx != 0
5875	   && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5876    pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5877
5878  /* Make POS_RTX unless we already have it and it is correct.  If we don't
5879     have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
5880     be a CONST_INT.  */
5881  if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5882    pos_rtx = orig_pos_rtx;
5883
5884  else if (pos_rtx == 0)
5885    pos_rtx = GEN_INT (pos);
5886
5887  /* Make the required operation.  See if we can use existing rtx.  */
5888  new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5889			 extraction_mode, inner, GEN_INT (len), pos_rtx);
5890  if (! in_dest)
5891    new = gen_lowpart_for_combine (mode, new);
5892
5893  return new;
5894}
5895
5896/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5897   with any other operations in X.  Return X without that shift if so.  */
5898
5899static rtx
5900extract_left_shift (x, count)
5901     rtx x;
5902     int count;
5903{
5904  enum rtx_code code = GET_CODE (x);
5905  enum machine_mode mode = GET_MODE (x);
5906  rtx tem;
5907
5908  switch (code)
5909    {
5910    case ASHIFT:
5911      /* This is the shift itself.  If it is wide enough, we will return
5912	 either the value being shifted if the shift count is equal to
5913	 COUNT or a shift for the difference.  */
5914      if (GET_CODE (XEXP (x, 1)) == CONST_INT
5915	  && INTVAL (XEXP (x, 1)) >= count)
5916	return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5917				     INTVAL (XEXP (x, 1)) - count);
5918      break;
5919
5920    case NEG:  case NOT:
5921      if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5922	return gen_unary (code, mode, mode, tem);
5923
5924      break;
5925
5926    case PLUS:  case IOR:  case XOR:  case AND:
5927      /* If we can safely shift this constant and we find the inner shift,
5928	 make a new operation.  */
5929      if (GET_CODE (XEXP (x,1)) == CONST_INT
5930	  && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
5931	  && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5932	return gen_binary (code, mode, tem,
5933			   GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5934
5935      break;
5936
5937    default:
5938      break;
5939    }
5940
5941  return 0;
5942}
5943
5944/* Look at the expression rooted at X.  Look for expressions
5945   equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5946   Form these expressions.
5947
5948   Return the new rtx, usually just X.
5949
5950   Also, for machines like the Vax that don't have logical shift insns,
5951   try to convert logical to arithmetic shift operations in cases where
5952   they are equivalent.  This undoes the canonicalizations to logical
5953   shifts done elsewhere.
5954
5955   We try, as much as possible, to re-use rtl expressions to save memory.
5956
5957   IN_CODE says what kind of expression we are processing.  Normally, it is
5958   SET.  In a memory address (inside a MEM, PLUS or minus, the latter two
5959   being kludges), it is MEM.  When processing the arguments of a comparison
5960   or a COMPARE against zero, it is COMPARE.  */
5961
5962static rtx
5963make_compound_operation (x, in_code)
5964     rtx x;
5965     enum rtx_code in_code;
5966{
5967  enum rtx_code code = GET_CODE (x);
5968  enum machine_mode mode = GET_MODE (x);
5969  int mode_width = GET_MODE_BITSIZE (mode);
5970  rtx rhs, lhs;
5971  enum rtx_code next_code;
5972  int i;
5973  rtx new = 0;
5974  rtx tem;
5975  char *fmt;
5976
5977  /* Select the code to be used in recursive calls.  Once we are inside an
5978     address, we stay there.  If we have a comparison, set to COMPARE,
5979     but once inside, go back to our default of SET.  */
5980
5981  next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
5982	       : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5983		  && XEXP (x, 1) == const0_rtx) ? COMPARE
5984	       : in_code == COMPARE ? SET : in_code);
5985
5986  /* Process depending on the code of this operation.  If NEW is set
5987     non-zero, it will be returned.  */
5988
5989  switch (code)
5990    {
5991    case ASHIFT:
5992      /* Convert shifts by constants into multiplications if inside
5993	 an address.  */
5994      if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5995	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5996	  && INTVAL (XEXP (x, 1)) >= 0)
5997	{
5998	  new = make_compound_operation (XEXP (x, 0), next_code);
5999	  new = gen_rtx_combine (MULT, mode, new,
6000				 GEN_INT ((HOST_WIDE_INT) 1
6001					  << INTVAL (XEXP (x, 1))));
6002	}
6003      break;
6004
6005    case AND:
6006      /* If the second operand is not a constant, we can't do anything
6007	 with it.  */
6008      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6009	break;
6010
6011      /* If the constant is a power of two minus one and the first operand
6012	 is a logical right shift, make an extraction.  */
6013      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6014	  && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6015	{
6016	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6017	  new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6018				 0, in_code == COMPARE);
6019	}
6020
6021      /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
6022      else if (GET_CODE (XEXP (x, 0)) == SUBREG
6023	       && subreg_lowpart_p (XEXP (x, 0))
6024	       && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6025	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6026	{
6027	  new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6028					 next_code);
6029	  new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
6030				 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6031				 0, in_code == COMPARE);
6032	}
6033      /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)).  */
6034      else if ((GET_CODE (XEXP (x, 0)) == XOR
6035		|| GET_CODE (XEXP (x, 0)) == IOR)
6036	       && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6037	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6038	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6039	{
6040	  /* Apply the distributive law, and then try to make extractions.  */
6041	  new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
6042				 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6043					      XEXP (x, 1)),
6044				 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6045					      XEXP (x, 1)));
6046	  new = make_compound_operation (new, in_code);
6047	}
6048
6049      /* If we are have (and (rotate X C) M) and C is larger than the number
6050	 of bits in M, this is an extraction.  */
6051
6052      else if (GET_CODE (XEXP (x, 0)) == ROTATE
6053	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6054	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6055	       && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
6056	{
6057	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6058	  new = make_extraction (mode, new,
6059				 (GET_MODE_BITSIZE (mode)
6060				  - INTVAL (XEXP (XEXP (x, 0), 1))),
6061				 NULL_RTX, i, 1, 0, in_code == COMPARE);
6062	}
6063
6064      /* On machines without logical shifts, if the operand of the AND is
6065	 a logical shift and our mask turns off all the propagated sign
6066	 bits, we can replace the logical shift with an arithmetic shift.  */
6067      else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6068	       && (lshr_optab->handlers[(int) mode].insn_code
6069		   == CODE_FOR_nothing)
6070	       && GET_CODE (XEXP (x, 0)) == LSHIFTRT
6071	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6072	       && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6073	       && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6074	       && mode_width <= HOST_BITS_PER_WIDE_INT)
6075	{
6076	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6077
6078	  mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6079	  if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6080	    SUBST (XEXP (x, 0),
6081		   gen_rtx_combine (ASHIFTRT, mode,
6082				    make_compound_operation (XEXP (XEXP (x, 0), 0),
6083							     next_code),
6084				    XEXP (XEXP (x, 0), 1)));
6085	}
6086
6087      /* If the constant is one less than a power of two, this might be
6088	 representable by an extraction even if no shift is present.
6089	 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6090	 we are in a COMPARE.  */
6091      else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6092	new = make_extraction (mode,
6093			       make_compound_operation (XEXP (x, 0),
6094							next_code),
6095			       0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6096
6097      /* If we are in a comparison and this is an AND with a power of two,
6098	 convert this into the appropriate bit extract.  */
6099      else if (in_code == COMPARE
6100	       && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6101	new = make_extraction (mode,
6102			       make_compound_operation (XEXP (x, 0),
6103							next_code),
6104			       i, NULL_RTX, 1, 1, 0, 1);
6105
6106      break;
6107
6108    case LSHIFTRT:
6109      /* If the sign bit is known to be zero, replace this with an
6110	 arithmetic shift.  */
6111      if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
6112	  && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6113	  && mode_width <= HOST_BITS_PER_WIDE_INT
6114	  && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6115	{
6116	  new = gen_rtx_combine (ASHIFTRT, mode,
6117				 make_compound_operation (XEXP (x, 0),
6118							  next_code),
6119				 XEXP (x, 1));
6120	  break;
6121	}
6122
6123      /* ... fall through ...  */
6124
6125    case ASHIFTRT:
6126      lhs = XEXP (x, 0);
6127      rhs = XEXP (x, 1);
6128
6129      /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6130	 this is a SIGN_EXTRACT.  */
6131      if (GET_CODE (rhs) == CONST_INT
6132	  && GET_CODE (lhs) == ASHIFT
6133	  && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6134	  && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6135	{
6136	  new = make_compound_operation (XEXP (lhs, 0), next_code);
6137	  new = make_extraction (mode, new,
6138				 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6139				 NULL_RTX, mode_width - INTVAL (rhs),
6140				 code == LSHIFTRT, 0, in_code == COMPARE);
6141	}
6142
6143      /* See if we have operations between an ASHIFTRT and an ASHIFT.
6144	 If so, try to merge the shifts into a SIGN_EXTEND.  We could
6145	 also do this for some cases of SIGN_EXTRACT, but it doesn't
6146	 seem worth the effort; the case checked for occurs on Alpha.  */
6147
6148      if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6149	  && ! (GET_CODE (lhs) == SUBREG
6150		&& (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6151	  && GET_CODE (rhs) == CONST_INT
6152	  && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6153	  && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6154	new = make_extraction (mode, make_compound_operation (new, next_code),
6155			       0, NULL_RTX, mode_width - INTVAL (rhs),
6156			       code == LSHIFTRT, 0, in_code == COMPARE);
6157
6158      break;
6159
6160    case SUBREG:
6161      /* Call ourselves recursively on the inner expression.  If we are
6162	 narrowing the object and it has a different RTL code from
6163	 what it originally did, do this SUBREG as a force_to_mode.  */
6164
6165      tem = make_compound_operation (SUBREG_REG (x), in_code);
6166      if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6167	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6168	  && subreg_lowpart_p (x))
6169	{
6170	  rtx newer = force_to_mode (tem, mode,
6171				     GET_MODE_MASK (mode), NULL_RTX, 0);
6172
6173	  /* If we have something other than a SUBREG, we might have
6174	     done an expansion, so rerun outselves.  */
6175	  if (GET_CODE (newer) != SUBREG)
6176	    newer = make_compound_operation (newer, in_code);
6177
6178	  return newer;
6179	}
6180
6181      /* If this is a paradoxical subreg, and the new code is a sign or
6182	 zero extension, omit the subreg and widen the extension.  If it
6183	 is a regular subreg, we can still get rid of the subreg by not
6184	 widening so much, or in fact removing the extension entirely.  */
6185      if ((GET_CODE (tem) == SIGN_EXTEND
6186	   || GET_CODE (tem) == ZERO_EXTEND)
6187	  && subreg_lowpart_p (x))
6188	{
6189	  if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6190	      || (GET_MODE_SIZE (mode) >
6191		  GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6192	    tem = gen_rtx_combine (GET_CODE (tem), mode, XEXP (tem, 0));
6193	  else
6194	    tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6195	  return tem;
6196	}
6197      break;
6198
6199    default:
6200      break;
6201    }
6202
6203  if (new)
6204    {
6205      x = gen_lowpart_for_combine (mode, new);
6206      code = GET_CODE (x);
6207    }
6208
6209  /* Now recursively process each operand of this operation.  */
6210  fmt = GET_RTX_FORMAT (code);
6211  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6212    if (fmt[i] == 'e')
6213      {
6214	new = make_compound_operation (XEXP (x, i), next_code);
6215	SUBST (XEXP (x, i), new);
6216      }
6217
6218  return x;
6219}
6220
6221/* Given M see if it is a value that would select a field of bits
6222    within an item, but not the entire word.  Return -1 if not.
6223    Otherwise, return the starting position of the field, where 0 is the
6224    low-order bit.
6225
6226   *PLEN is set to the length of the field.  */
6227
6228static int
6229get_pos_from_mask (m, plen)
6230     unsigned HOST_WIDE_INT m;
6231     int *plen;
6232{
6233  /* Get the bit number of the first 1 bit from the right, -1 if none.  */
6234  int pos = exact_log2 (m & - m);
6235
6236  if (pos < 0)
6237    return -1;
6238
6239  /* Now shift off the low-order zero bits and see if we have a power of
6240     two minus 1.  */
6241  *plen = exact_log2 ((m >> pos) + 1);
6242
6243  if (*plen <= 0)
6244    return -1;
6245
6246  return pos;
6247}
6248
6249/* See if X can be simplified knowing that we will only refer to it in
6250   MODE and will only refer to those bits that are nonzero in MASK.
6251   If other bits are being computed or if masking operations are done
6252   that select a superset of the bits in MASK, they can sometimes be
6253   ignored.
6254
6255   Return a possibly simplified expression, but always convert X to
6256   MODE.  If X is a CONST_INT, AND the CONST_INT with MASK.
6257
6258   Also, if REG is non-zero and X is a register equal in value to REG,
6259   replace X with REG.
6260
6261   If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6262   are all off in X.  This is used when X will be complemented, by either
6263   NOT, NEG, or XOR.  */
6264
6265static rtx
6266force_to_mode (x, mode, mask, reg, just_select)
6267     rtx x;
6268     enum machine_mode mode;
6269     unsigned HOST_WIDE_INT mask;
6270     rtx reg;
6271     int just_select;
6272{
6273  enum rtx_code code = GET_CODE (x);
6274  int next_select = just_select || code == XOR || code == NOT || code == NEG;
6275  enum machine_mode op_mode;
6276  unsigned HOST_WIDE_INT fuller_mask, nonzero;
6277  rtx op0, op1, temp;
6278
6279  /* If this is a CALL or ASM_OPERANDS, don't do anything.  Some of the
6280     code below will do the wrong thing since the mode of such an
6281     expression is VOIDmode.
6282
6283     Also do nothing if X is a CLOBBER; this can happen if X was
6284     the return value from a call to gen_lowpart_for_combine.  */
6285  if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
6286    return x;
6287
6288  /* We want to perform the operation is its present mode unless we know
6289     that the operation is valid in MODE, in which case we do the operation
6290     in MODE.  */
6291  op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6292	      && code_to_optab[(int) code] != 0
6293	      && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
6294		  != CODE_FOR_nothing))
6295	     ? mode : GET_MODE (x));
6296
6297  /* It is not valid to do a right-shift in a narrower mode
6298     than the one it came in with.  */
6299  if ((code == LSHIFTRT || code == ASHIFTRT)
6300      && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6301    op_mode = GET_MODE (x);
6302
6303  /* Truncate MASK to fit OP_MODE.  */
6304  if (op_mode)
6305    mask &= GET_MODE_MASK (op_mode);
6306
6307  /* When we have an arithmetic operation, or a shift whose count we
6308     do not know, we need to assume that all bit the up to the highest-order
6309     bit in MASK will be needed.  This is how we form such a mask.  */
6310  if (op_mode)
6311    fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6312		   ? GET_MODE_MASK (op_mode)
6313		   : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
6314  else
6315    fuller_mask = ~ (HOST_WIDE_INT) 0;
6316
6317  /* Determine what bits of X are guaranteed to be (non)zero.  */
6318  nonzero = nonzero_bits (x, mode);
6319
6320  /* If none of the bits in X are needed, return a zero.  */
6321  if (! just_select && (nonzero & mask) == 0)
6322    return const0_rtx;
6323
6324  /* If X is a CONST_INT, return a new one.  Do this here since the
6325     test below will fail.  */
6326  if (GET_CODE (x) == CONST_INT)
6327    {
6328      HOST_WIDE_INT cval = INTVAL (x) & mask;
6329      int width = GET_MODE_BITSIZE (mode);
6330
6331      /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6332	 number, sign extend it.  */
6333      if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6334	  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6335	cval |= (HOST_WIDE_INT) -1 << width;
6336
6337      return GEN_INT (cval);
6338    }
6339
6340  /* If X is narrower than MODE and we want all the bits in X's mode, just
6341     get X in the proper mode.  */
6342  if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6343      && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
6344    return gen_lowpart_for_combine (mode, x);
6345
6346  /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6347     MASK are already known to be zero in X, we need not do anything.  */
6348  if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
6349    return x;
6350
6351  switch (code)
6352    {
6353    case CLOBBER:
6354      /* If X is a (clobber (const_int)), return it since we know we are
6355	 generating something that won't match.  */
6356      return x;
6357
6358    case USE:
6359      /* X is a (use (mem ..)) that was made from a bit-field extraction that
6360	 spanned the boundary of the MEM.  If we are now masking so it is
6361	 within that boundary, we don't need the USE any more.  */
6362      if (! BITS_BIG_ENDIAN
6363	  && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6364	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6365      break;
6366
6367    case SIGN_EXTEND:
6368    case ZERO_EXTEND:
6369    case ZERO_EXTRACT:
6370    case SIGN_EXTRACT:
6371      x = expand_compound_operation (x);
6372      if (GET_CODE (x) != code)
6373	return force_to_mode (x, mode, mask, reg, next_select);
6374      break;
6375
6376    case REG:
6377      if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6378		       || rtx_equal_p (reg, get_last_value (x))))
6379	x = reg;
6380      break;
6381
6382    case SUBREG:
6383      if (subreg_lowpart_p (x)
6384	  /* We can ignore the effect of this SUBREG if it narrows the mode or
6385	     if the constant masks to zero all the bits the mode doesn't
6386	     have.  */
6387	  && ((GET_MODE_SIZE (GET_MODE (x))
6388	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6389	      || (0 == (mask
6390			& GET_MODE_MASK (GET_MODE (x))
6391			& ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
6392	return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
6393      break;
6394
6395    case AND:
6396      /* If this is an AND with a constant, convert it into an AND
6397	 whose constant is the AND of that constant with MASK.  If it
6398	 remains an AND of MASK, delete it since it is redundant.  */
6399
6400      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6401	{
6402	  x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6403				      mask & INTVAL (XEXP (x, 1)));
6404
6405	  /* If X is still an AND, see if it is an AND with a mask that
6406	     is just some low-order bits.  If so, and it is MASK, we don't
6407	     need it.  */
6408
6409	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6410	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == mask)
6411	    x = XEXP (x, 0);
6412
6413	  /* If it remains an AND, try making another AND with the bits
6414	     in the mode mask that aren't in MASK turned on.  If the
6415	     constant in the AND is wide enough, this might make a
6416	     cheaper constant.  */
6417
6418	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6419	      && GET_MODE_MASK (GET_MODE (x)) != mask
6420	      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
6421	    {
6422	      HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6423				    | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
6424	      int width = GET_MODE_BITSIZE (GET_MODE (x));
6425	      rtx y;
6426
6427	      /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6428		 number, sign extend it.  */
6429	      if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6430		  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6431		cval |= (HOST_WIDE_INT) -1 << width;
6432
6433	      y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6434	      if (rtx_cost (y, SET) < rtx_cost (x, SET))
6435		x = y;
6436	    }
6437
6438	  break;
6439	}
6440
6441      goto binop;
6442
6443    case PLUS:
6444      /* In (and (plus FOO C1) M), if M is a mask that just turns off
6445	 low-order bits (as in an alignment operation) and FOO is already
6446	 aligned to that boundary, mask C1 to that boundary as well.
6447	 This may eliminate that PLUS and, later, the AND.  */
6448
6449      {
6450	int width = GET_MODE_BITSIZE (mode);
6451	unsigned HOST_WIDE_INT smask = mask;
6452
6453	/* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6454	   number, sign extend it.  */
6455
6456	if (width < HOST_BITS_PER_WIDE_INT
6457	    && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6458	  smask |= (HOST_WIDE_INT) -1 << width;
6459
6460	if (GET_CODE (XEXP (x, 1)) == CONST_INT
6461	    && exact_log2 (- smask) >= 0)
6462	  {
6463#ifdef STACK_BIAS
6464	    if (STACK_BIAS
6465	        && (XEXP (x, 0) == stack_pointer_rtx
6466	            || XEXP (x, 0) == frame_pointer_rtx))
6467	      {
6468                int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6469                unsigned HOST_WIDE_INT sp_mask = GET_MODE_MASK (mode);
6470
6471		sp_mask &= ~ (sp_alignment - 1);
6472		if ((sp_mask & ~ smask) == 0
6473		    && ((INTVAL (XEXP (x, 1)) - STACK_BIAS) & ~ smask) != 0)
6474		  return force_to_mode (plus_constant (XEXP (x, 0),
6475		  				       ((INTVAL (XEXP (x, 1)) -
6476							 STACK_BIAS) & smask)
6477						       + STACK_BIAS),
6478		 			mode, smask, reg, next_select);
6479              }
6480#endif
6481	    if ((nonzero_bits (XEXP (x, 0), mode) & ~ smask) == 0
6482	        && (INTVAL (XEXP (x, 1)) & ~ smask) != 0)
6483	      return force_to_mode (plus_constant (XEXP (x, 0),
6484					           (INTVAL (XEXP (x, 1))
6485						    & smask)),
6486				    mode, smask, reg, next_select);
6487	  }
6488      }
6489
6490      /* ... fall through ...  */
6491
6492    case MINUS:
6493    case MULT:
6494      /* For PLUS, MINUS and MULT, we need any bits less significant than the
6495	 most significant bit in MASK since carries from those bits will
6496	 affect the bits we are interested in.  */
6497      mask = fuller_mask;
6498      goto binop;
6499
6500    case IOR:
6501    case XOR:
6502      /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6503	 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6504	 operation which may be a bitfield extraction.  Ensure that the
6505	 constant we form is not wider than the mode of X.  */
6506
6507      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6508	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6509	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6510	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6511	  && GET_CODE (XEXP (x, 1)) == CONST_INT
6512	  && ((INTVAL (XEXP (XEXP (x, 0), 1))
6513	       + floor_log2 (INTVAL (XEXP (x, 1))))
6514	      < GET_MODE_BITSIZE (GET_MODE (x)))
6515	  && (INTVAL (XEXP (x, 1))
6516	      & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6517	{
6518	  temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6519			      << INTVAL (XEXP (XEXP (x, 0), 1)));
6520	  temp = gen_binary (GET_CODE (x), GET_MODE (x),
6521			     XEXP (XEXP (x, 0), 0), temp);
6522	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6523			  XEXP (XEXP (x, 0), 1));
6524	  return force_to_mode (x, mode, mask, reg, next_select);
6525	}
6526
6527    binop:
6528      /* For most binary operations, just propagate into the operation and
6529	 change the mode if we have an operation of that mode.   */
6530
6531      op0 = gen_lowpart_for_combine (op_mode,
6532				     force_to_mode (XEXP (x, 0), mode, mask,
6533						    reg, next_select));
6534      op1 = gen_lowpart_for_combine (op_mode,
6535				     force_to_mode (XEXP (x, 1), mode, mask,
6536						    reg, next_select));
6537
6538      /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
6539	 MASK since OP1 might have been sign-extended but we never want
6540	 to turn on extra bits, since combine might have previously relied
6541	 on them being off.  */
6542      if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
6543	  && (INTVAL (op1) & mask) != 0)
6544	op1 = GEN_INT (INTVAL (op1) & mask);
6545
6546      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6547	x = gen_binary (code, op_mode, op0, op1);
6548      break;
6549
6550    case ASHIFT:
6551      /* For left shifts, do the same, but just for the first operand.
6552	 However, we cannot do anything with shifts where we cannot
6553	 guarantee that the counts are smaller than the size of the mode
6554	 because such a count will have a different meaning in a
6555	 wider mode.  */
6556
6557      if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6558	     && INTVAL (XEXP (x, 1)) >= 0
6559	     && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6560	  && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6561		&& (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
6562		    < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
6563	break;
6564
6565      /* If the shift count is a constant and we can do arithmetic in
6566	 the mode of the shift, refine which bits we need.  Otherwise, use the
6567	 conservative form of the mask.  */
6568      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6569	  && INTVAL (XEXP (x, 1)) >= 0
6570	  && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6571	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6572	mask >>= INTVAL (XEXP (x, 1));
6573      else
6574	mask = fuller_mask;
6575
6576      op0 = gen_lowpart_for_combine (op_mode,
6577				     force_to_mode (XEXP (x, 0), op_mode,
6578						    mask, reg, next_select));
6579
6580      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6581	x =  gen_binary (code, op_mode, op0, XEXP (x, 1));
6582      break;
6583
6584    case LSHIFTRT:
6585      /* Here we can only do something if the shift count is a constant,
6586	 this shift constant is valid for the host, and we can do arithmetic
6587	 in OP_MODE.  */
6588
6589      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6590	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6591	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6592	{
6593	  rtx inner = XEXP (x, 0);
6594
6595	  /* Select the mask of the bits we need for the shift operand.  */
6596	  mask <<= INTVAL (XEXP (x, 1));
6597
6598	  /* We can only change the mode of the shift if we can do arithmetic
6599	     in the mode of the shift and MASK is no wider than the width of
6600	     OP_MODE.  */
6601	  if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
6602	      || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
6603	    op_mode = GET_MODE (x);
6604
6605	  inner = force_to_mode (inner, op_mode, mask, reg, next_select);
6606
6607	  if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6608	    x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
6609	}
6610
6611      /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6612	 shift and AND produces only copies of the sign bit (C2 is one less
6613	 than a power of two), we can do this with just a shift.  */
6614
6615      if (GET_CODE (x) == LSHIFTRT
6616	  && GET_CODE (XEXP (x, 1)) == CONST_INT
6617	  && ((INTVAL (XEXP (x, 1))
6618	       + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6619	      >= GET_MODE_BITSIZE (GET_MODE (x)))
6620	  && exact_log2 (mask + 1) >= 0
6621	  && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6622	      >= exact_log2 (mask + 1)))
6623	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6624			GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6625				 - exact_log2 (mask + 1)));
6626      break;
6627
6628    case ASHIFTRT:
6629      /* If we are just looking for the sign bit, we don't need this shift at
6630	 all, even if it has a variable count.  */
6631      if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6632	  && (mask == ((unsigned HOST_WIDE_INT) 1
6633		       << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
6634	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6635
6636      /* If this is a shift by a constant, get a mask that contains those bits
6637	 that are not copies of the sign bit.  We then have two cases:  If
6638	 MASK only includes those bits, this can be a logical shift, which may
6639	 allow simplifications.  If MASK is a single-bit field not within
6640	 those bits, we are requesting a copy of the sign bit and hence can
6641	 shift the sign bit to the appropriate location.  */
6642
6643      if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
6644	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6645	{
6646	  int i = -1;
6647
6648	  /* If the considered data is wider then HOST_WIDE_INT, we can't
6649	     represent a mask for all its bits in a single scalar.
6650	     But we only care about the lower bits, so calculate these.  */
6651
6652	  if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
6653	    {
6654	      nonzero = ~ (HOST_WIDE_INT) 0;
6655
6656	      /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6657		 is the number of bits a full-width mask would have set.
6658		 We need only shift if these are fewer than nonzero can
6659		 hold.  If not, we must keep all bits set in nonzero.  */
6660
6661	      if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6662		  < HOST_BITS_PER_WIDE_INT)
6663		nonzero >>= INTVAL (XEXP (x, 1))
6664			    + HOST_BITS_PER_WIDE_INT
6665			    - GET_MODE_BITSIZE (GET_MODE (x)) ;
6666	    }
6667	  else
6668	    {
6669	      nonzero = GET_MODE_MASK (GET_MODE (x));
6670	      nonzero >>= INTVAL (XEXP (x, 1));
6671	    }
6672
6673	  if ((mask & ~ nonzero) == 0
6674	      || (i = exact_log2 (mask)) >= 0)
6675	    {
6676	      x = simplify_shift_const
6677		(x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6678		 i < 0 ? INTVAL (XEXP (x, 1))
6679		 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
6680
6681	      if (GET_CODE (x) != ASHIFTRT)
6682		return force_to_mode (x, mode, mask, reg, next_select);
6683	    }
6684	}
6685
6686      /* If MASK is 1, convert this to a LSHIFTRT.  This can be done
6687	 even if the shift count isn't a constant.  */
6688      if (mask == 1)
6689	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
6690
6691      /* If this is a sign-extension operation that just affects bits
6692	 we don't care about, remove it.  Be sure the call above returned
6693	 something that is still a shift.  */
6694
6695      if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
6696	  && GET_CODE (XEXP (x, 1)) == CONST_INT
6697	  && INTVAL (XEXP (x, 1)) >= 0
6698	  && (INTVAL (XEXP (x, 1))
6699	      <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
6700	  && GET_CODE (XEXP (x, 0)) == ASHIFT
6701	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6702	  && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
6703	return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
6704			      reg, next_select);
6705
6706      break;
6707
6708    case ROTATE:
6709    case ROTATERT:
6710      /* If the shift count is constant and we can do computations
6711	 in the mode of X, compute where the bits we care about are.
6712	 Otherwise, we can't do anything.  Don't change the mode of
6713	 the shift or propagate MODE into the shift, though.  */
6714      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6715	  && INTVAL (XEXP (x, 1)) >= 0)
6716	{
6717	  temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
6718					    GET_MODE (x), GEN_INT (mask),
6719					    XEXP (x, 1));
6720	  if (temp && GET_CODE(temp) == CONST_INT)
6721	    SUBST (XEXP (x, 0),
6722		   force_to_mode (XEXP (x, 0), GET_MODE (x),
6723				  INTVAL (temp), reg, next_select));
6724	}
6725      break;
6726
6727    case NEG:
6728      /* If we just want the low-order bit, the NEG isn't needed since it
6729	 won't change the low-order bit.    */
6730      if (mask == 1)
6731	return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6732
6733      /* We need any bits less significant than the most significant bit in
6734	 MASK since carries from those bits will affect the bits we are
6735	 interested in.  */
6736      mask = fuller_mask;
6737      goto unop;
6738
6739    case NOT:
6740      /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6741	 same as the XOR case above.  Ensure that the constant we form is not
6742	 wider than the mode of X.  */
6743
6744      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6745	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6746	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6747	  && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6748	      < GET_MODE_BITSIZE (GET_MODE (x)))
6749	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6750	{
6751	  temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6752	  temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6753	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6754
6755	  return force_to_mode (x, mode, mask, reg, next_select);
6756	}
6757
6758      /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
6759	 use the full mask inside the NOT.  */
6760      mask = fuller_mask;
6761
6762    unop:
6763      op0 = gen_lowpart_for_combine (op_mode,
6764				     force_to_mode (XEXP (x, 0), mode, mask,
6765						    reg, next_select));
6766      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6767	x = gen_unary (code, op_mode, op_mode, op0);
6768      break;
6769
6770    case NE:
6771      /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
6772	 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
6773	 which is equal to STORE_FLAG_VALUE.  */
6774      if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
6775	  && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
6776	  && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
6777	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6778
6779      break;
6780
6781    case IF_THEN_ELSE:
6782      /* We have no way of knowing if the IF_THEN_ELSE can itself be
6783	 written in a narrower mode.  We play it safe and do not do so.  */
6784
6785      SUBST (XEXP (x, 1),
6786	     gen_lowpart_for_combine (GET_MODE (x),
6787				      force_to_mode (XEXP (x, 1), mode,
6788						     mask, reg, next_select)));
6789      SUBST (XEXP (x, 2),
6790	     gen_lowpart_for_combine (GET_MODE (x),
6791				      force_to_mode (XEXP (x, 2), mode,
6792						     mask, reg,next_select)));
6793      break;
6794
6795    default:
6796      break;
6797    }
6798
6799  /* Ensure we return a value of the proper mode.  */
6800  return gen_lowpart_for_combine (mode, x);
6801}
6802
6803/* Return nonzero if X is an expression that has one of two values depending on
6804   whether some other value is zero or nonzero.  In that case, we return the
6805   value that is being tested, *PTRUE is set to the value if the rtx being
6806   returned has a nonzero value, and *PFALSE is set to the other alternative.
6807
6808   If we return zero, we set *PTRUE and *PFALSE to X.  */
6809
6810static rtx
6811if_then_else_cond (x, ptrue, pfalse)
6812     rtx x;
6813     rtx *ptrue, *pfalse;
6814{
6815  enum machine_mode mode = GET_MODE (x);
6816  enum rtx_code code = GET_CODE (x);
6817  int size = GET_MODE_BITSIZE (mode);
6818  rtx cond0, cond1, true0, true1, false0, false1;
6819  unsigned HOST_WIDE_INT nz;
6820
6821  /* If this is a unary operation whose operand has one of two values, apply
6822     our opcode to compute those values.  */
6823  if (GET_RTX_CLASS (code) == '1'
6824      && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6825    {
6826      *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6827      *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
6828      return cond0;
6829    }
6830
6831  /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
6832     make can't possibly match and would suppress other optimizations.  */
6833  else if (code == COMPARE)
6834    ;
6835
6836  /* If this is a binary operation, see if either side has only one of two
6837     values.  If either one does or if both do and they are conditional on
6838     the same value, compute the new true and false values.  */
6839  else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6840	   || GET_RTX_CLASS (code) == '<')
6841    {
6842      cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6843      cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6844
6845      if ((cond0 != 0 || cond1 != 0)
6846	  && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6847	{
6848	  /* If if_then_else_cond returned zero, then true/false are the
6849	     same rtl.  We must copy one of them to prevent invalid rtl
6850	     sharing.  */
6851	  if (cond0 == 0)
6852	    true0 = copy_rtx (true0);
6853	  else if (cond1 == 0)
6854	    true1 = copy_rtx (true1);
6855
6856	  *ptrue = gen_binary (code, mode, true0, true1);
6857	  *pfalse = gen_binary (code, mode, false0, false1);
6858	  return cond0 ? cond0 : cond1;
6859	}
6860
6861      /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
6862	 operands is zero when the other is non-zero, and vice-versa,
6863	 and STORE_FLAG_VALUE is 1 or -1.  */
6864
6865      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6866	  && (code == PLUS || code == IOR || code == XOR || code == MINUS
6867	   || code == UMAX)
6868	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6869	{
6870	  rtx op0 = XEXP (XEXP (x, 0), 1);
6871	  rtx op1 = XEXP (XEXP (x, 1), 1);
6872
6873	  cond0 = XEXP (XEXP (x, 0), 0);
6874	  cond1 = XEXP (XEXP (x, 1), 0);
6875
6876	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6877	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6878	      && reversible_comparison_p (cond1)
6879	      && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6880		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6881		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6882		  || ((swap_condition (GET_CODE (cond0))
6883		       == reverse_condition (GET_CODE (cond1)))
6884		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6885		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6886	      && ! side_effects_p (x))
6887	    {
6888	      *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6889	      *pfalse = gen_binary (MULT, mode,
6890				    (code == MINUS
6891				     ? gen_unary (NEG, mode, mode, op1) : op1),
6892				    const_true_rtx);
6893	      return cond0;
6894	    }
6895	}
6896
6897      /* Similarly for MULT, AND and UMIN, execpt that for these the result
6898	 is always zero.  */
6899      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6900	  && (code == MULT || code == AND || code == UMIN)
6901	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6902	{
6903	  cond0 = XEXP (XEXP (x, 0), 0);
6904	  cond1 = XEXP (XEXP (x, 1), 0);
6905
6906	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6907	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6908	      && reversible_comparison_p (cond1)
6909	      && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6910		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6911		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6912		  || ((swap_condition (GET_CODE (cond0))
6913		       == reverse_condition (GET_CODE (cond1)))
6914		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6915		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6916	      && ! side_effects_p (x))
6917	    {
6918	      *ptrue = *pfalse = const0_rtx;
6919	      return cond0;
6920	    }
6921	}
6922    }
6923
6924  else if (code == IF_THEN_ELSE)
6925    {
6926      /* If we have IF_THEN_ELSE already, extract the condition and
6927	 canonicalize it if it is NE or EQ.  */
6928      cond0 = XEXP (x, 0);
6929      *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6930      if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6931	return XEXP (cond0, 0);
6932      else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6933	{
6934	  *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6935	  return XEXP (cond0, 0);
6936	}
6937      else
6938	return cond0;
6939    }
6940
6941  /* If X is a normal SUBREG with both inner and outer modes integral,
6942     we can narrow both the true and false values of the inner expression,
6943     if there is a condition.  */
6944  else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6945	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6946	   && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6947	   && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6948					       &true0, &false0)))
6949    {
6950      *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6951      *pfalse
6952	= force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6953
6954      return cond0;
6955    }
6956
6957  /* If X is a constant, this isn't special and will cause confusions
6958     if we treat it as such.  Likewise if it is equivalent to a constant.  */
6959  else if (CONSTANT_P (x)
6960	   || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6961    ;
6962
6963  /* If X is known to be either 0 or -1, those are the true and
6964     false values when testing X.  */
6965  else if (num_sign_bit_copies (x, mode) == size)
6966    {
6967      *ptrue = constm1_rtx, *pfalse = const0_rtx;
6968      return x;
6969    }
6970
6971  /* Likewise for 0 or a single bit.  */
6972  else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6973    {
6974      *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6975      return x;
6976    }
6977
6978  /* Otherwise fail; show no condition with true and false values the same.  */
6979  *ptrue = *pfalse = x;
6980  return 0;
6981}
6982
6983/* Return the value of expression X given the fact that condition COND
6984   is known to be true when applied to REG as its first operand and VAL
6985   as its second.  X is known to not be shared and so can be modified in
6986   place.
6987
6988   We only handle the simplest cases, and specifically those cases that
6989   arise with IF_THEN_ELSE expressions.  */
6990
6991static rtx
6992known_cond (x, cond, reg, val)
6993     rtx x;
6994     enum rtx_code cond;
6995     rtx reg, val;
6996{
6997  enum rtx_code code = GET_CODE (x);
6998  rtx temp;
6999  char *fmt;
7000  int i, j;
7001
7002  if (side_effects_p (x))
7003    return x;
7004
7005  if (cond == EQ && rtx_equal_p (x, reg))
7006    return val;
7007
7008  /* If X is (abs REG) and we know something about REG's relationship
7009     with zero, we may be able to simplify this.  */
7010
7011  if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7012    switch (cond)
7013      {
7014      case GE:  case GT:  case EQ:
7015	return XEXP (x, 0);
7016      case LT:  case LE:
7017	return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
7018			  XEXP (x, 0));
7019      default:
7020	break;
7021      }
7022
7023  /* The only other cases we handle are MIN, MAX, and comparisons if the
7024     operands are the same as REG and VAL.  */
7025
7026  else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
7027    {
7028      if (rtx_equal_p (XEXP (x, 0), val))
7029	cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7030
7031      if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7032	{
7033	  if (GET_RTX_CLASS (code) == '<')
7034	    return (comparison_dominates_p (cond, code) ? const_true_rtx
7035		    : (comparison_dominates_p (cond,
7036					       reverse_condition (code))
7037		       ? const0_rtx : x));
7038
7039	  else if (code == SMAX || code == SMIN
7040		   || code == UMIN || code == UMAX)
7041	    {
7042	      int unsignedp = (code == UMIN || code == UMAX);
7043
7044	      if (code == SMAX || code == UMAX)
7045		cond = reverse_condition (cond);
7046
7047	      switch (cond)
7048		{
7049		case GE:   case GT:
7050		  return unsignedp ? x : XEXP (x, 1);
7051		case LE:   case LT:
7052		  return unsignedp ? x : XEXP (x, 0);
7053		case GEU:  case GTU:
7054		  return unsignedp ? XEXP (x, 1) : x;
7055		case LEU:  case LTU:
7056		  return unsignedp ? XEXP (x, 0) : x;
7057		default:
7058		  break;
7059		}
7060	    }
7061	}
7062    }
7063
7064  fmt = GET_RTX_FORMAT (code);
7065  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7066    {
7067      if (fmt[i] == 'e')
7068	SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7069      else if (fmt[i] == 'E')
7070	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7071	  SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7072						cond, reg, val));
7073    }
7074
7075  return x;
7076}
7077
7078/* See if X and Y are equal for the purposes of seeing if we can rewrite an
7079   assignment as a field assignment.  */
7080
7081static int
7082rtx_equal_for_field_assignment_p (x, y)
7083     rtx x;
7084     rtx y;
7085{
7086  if (x == y || rtx_equal_p (x, y))
7087    return 1;
7088
7089  if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7090    return 0;
7091
7092  /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7093     Note that all SUBREGs of MEM are paradoxical; otherwise they
7094     would have been rewritten.  */
7095  if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7096      && GET_CODE (SUBREG_REG (y)) == MEM
7097      && rtx_equal_p (SUBREG_REG (y),
7098		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7099    return 1;
7100
7101  if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7102      && GET_CODE (SUBREG_REG (x)) == MEM
7103      && rtx_equal_p (SUBREG_REG (x),
7104		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7105    return 1;
7106
7107  /* We used to see if get_last_value of X and Y were the same but that's
7108     not correct.  In one direction, we'll cause the assignment to have
7109     the wrong destination and in the case, we'll import a register into this
7110     insn that might have already have been dead.   So fail if none of the
7111     above cases are true.  */
7112  return 0;
7113}
7114
7115/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7116   Return that assignment if so.
7117
7118   We only handle the most common cases.  */
7119
7120static rtx
7121make_field_assignment (x)
7122     rtx x;
7123{
7124  rtx dest = SET_DEST (x);
7125  rtx src = SET_SRC (x);
7126  rtx assign;
7127  rtx rhs, lhs;
7128  HOST_WIDE_INT c1;
7129  int pos, len;
7130  rtx other;
7131  enum machine_mode mode;
7132
7133  /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7134     a clear of a one-bit field.  We will have changed it to
7135     (and (rotate (const_int -2) POS) DEST), so check for that.  Also check
7136     for a SUBREG.  */
7137
7138  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7139      && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7140      && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7141      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7142    {
7143      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7144				1, 1, 1, 0);
7145      if (assign != 0)
7146	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7147      return x;
7148    }
7149
7150  else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7151	   && subreg_lowpart_p (XEXP (src, 0))
7152	   && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7153	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7154	   && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7155	   && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7156	   && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7157    {
7158      assign = make_extraction (VOIDmode, dest, 0,
7159				XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7160				1, 1, 1, 0);
7161      if (assign != 0)
7162	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7163      return x;
7164    }
7165
7166  /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
7167     one-bit field.  */
7168  else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7169	   && XEXP (XEXP (src, 0), 0) == const1_rtx
7170	   && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7171    {
7172      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7173				1, 1, 1, 0);
7174      if (assign != 0)
7175	return gen_rtx_SET (VOIDmode, assign, const1_rtx);
7176      return x;
7177    }
7178
7179  /* The other case we handle is assignments into a constant-position
7180     field.  They look like (ior/xor (and DEST C1) OTHER).  If C1 represents
7181     a mask that has all one bits except for a group of zero bits and
7182     OTHER is known to have zeros where C1 has ones, this is such an
7183     assignment.  Compute the position and length from C1.  Shift OTHER
7184     to the appropriate position, force it to the required mode, and
7185     make the extraction.  Check for the AND in both operands.  */
7186
7187  if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
7188    return x;
7189
7190  rhs = expand_compound_operation (XEXP (src, 0));
7191  lhs = expand_compound_operation (XEXP (src, 1));
7192
7193  if (GET_CODE (rhs) == AND
7194      && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7195      && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7196    c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7197  else if (GET_CODE (lhs) == AND
7198	   && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7199	   && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7200    c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
7201  else
7202    return x;
7203
7204  pos = get_pos_from_mask ((~ c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
7205  if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
7206      || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7207      || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
7208    return x;
7209
7210  assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
7211  if (assign == 0)
7212    return x;
7213
7214  /* The mode to use for the source is the mode of the assignment, or of
7215     what is inside a possible STRICT_LOW_PART.  */
7216  mode = (GET_CODE (assign) == STRICT_LOW_PART
7217	  ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
7218
7219  /* Shift OTHER right POS places and make it the source, restricting it
7220     to the proper length and mode.  */
7221
7222  src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7223					     GET_MODE (src), other, pos),
7224		       mode,
7225		       GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7226		       ? GET_MODE_MASK (mode)
7227		       : ((HOST_WIDE_INT) 1 << len) - 1,
7228		       dest, 0);
7229
7230  return gen_rtx_combine (SET, VOIDmode, assign, src);
7231}
7232
7233/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7234   if so.  */
7235
7236static rtx
7237apply_distributive_law (x)
7238     rtx x;
7239{
7240  enum rtx_code code = GET_CODE (x);
7241  rtx lhs, rhs, other;
7242  rtx tem;
7243  enum rtx_code inner_code;
7244
7245  /* Distributivity is not true for floating point.
7246     It can change the value.  So don't do it.
7247     -- rms and moshier@world.std.com.  */
7248  if (FLOAT_MODE_P (GET_MODE (x)))
7249    return x;
7250
7251  /* The outer operation can only be one of the following:  */
7252  if (code != IOR && code != AND && code != XOR
7253      && code != PLUS && code != MINUS)
7254    return x;
7255
7256  lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7257
7258  /* If either operand is a primitive we can't do anything, so get out
7259     fast.  */
7260  if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
7261      || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
7262    return x;
7263
7264  lhs = expand_compound_operation (lhs);
7265  rhs = expand_compound_operation (rhs);
7266  inner_code = GET_CODE (lhs);
7267  if (inner_code != GET_CODE (rhs))
7268    return x;
7269
7270  /* See if the inner and outer operations distribute.  */
7271  switch (inner_code)
7272    {
7273    case LSHIFTRT:
7274    case ASHIFTRT:
7275    case AND:
7276    case IOR:
7277      /* These all distribute except over PLUS.  */
7278      if (code == PLUS || code == MINUS)
7279	return x;
7280      break;
7281
7282    case MULT:
7283      if (code != PLUS && code != MINUS)
7284	return x;
7285      break;
7286
7287    case ASHIFT:
7288      /* This is also a multiply, so it distributes over everything.  */
7289      break;
7290
7291    case SUBREG:
7292      /* Non-paradoxical SUBREGs distributes over all operations, provided
7293	 the inner modes and word numbers are the same, this is an extraction
7294	 of a low-order part, we don't convert an fp operation to int or
7295	 vice versa, and we would not be converting a single-word
7296	 operation into a multi-word operation.  The latter test is not
7297	 required, but it prevents generating unneeded multi-word operations.
7298	 Some of the previous tests are redundant given the latter test, but
7299	 are retained because they are required for correctness.
7300
7301	 We produce the result slightly differently in this case.  */
7302
7303      if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7304	  || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
7305	  || ! subreg_lowpart_p (lhs)
7306	  || (GET_MODE_CLASS (GET_MODE (lhs))
7307	      != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
7308	  || (GET_MODE_SIZE (GET_MODE (lhs))
7309	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
7310	  || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
7311	return x;
7312
7313      tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7314			SUBREG_REG (lhs), SUBREG_REG (rhs));
7315      return gen_lowpart_for_combine (GET_MODE (x), tem);
7316
7317    default:
7318      return x;
7319    }
7320
7321  /* Set LHS and RHS to the inner operands (A and B in the example
7322     above) and set OTHER to the common operand (C in the example).
7323     These is only one way to do this unless the inner operation is
7324     commutative.  */
7325  if (GET_RTX_CLASS (inner_code) == 'c'
7326      && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7327    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7328  else if (GET_RTX_CLASS (inner_code) == 'c'
7329	   && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7330    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7331  else if (GET_RTX_CLASS (inner_code) == 'c'
7332	   && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7333    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7334  else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7335    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7336  else
7337    return x;
7338
7339  /* Form the new inner operation, seeing if it simplifies first.  */
7340  tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7341
7342  /* There is one exception to the general way of distributing:
7343     (a ^ b) | (a ^ c) -> (~a) & (b ^ c)  */
7344  if (code == XOR && inner_code == IOR)
7345    {
7346      inner_code = AND;
7347      other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
7348    }
7349
7350  /* We may be able to continuing distributing the result, so call
7351     ourselves recursively on the inner operation before forming the
7352     outer operation, which we return.  */
7353  return gen_binary (inner_code, GET_MODE (x),
7354		     apply_distributive_law (tem), other);
7355}
7356
7357/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7358   in MODE.
7359
7360   Return an equivalent form, if different from X.  Otherwise, return X.  If
7361   X is zero, we are to always construct the equivalent form.  */
7362
7363static rtx
7364simplify_and_const_int (x, mode, varop, constop)
7365     rtx x;
7366     enum machine_mode mode;
7367     rtx varop;
7368     unsigned HOST_WIDE_INT constop;
7369{
7370  unsigned HOST_WIDE_INT nonzero;
7371  int width = GET_MODE_BITSIZE (mode);
7372  int i;
7373
7374  /* Simplify VAROP knowing that we will be only looking at some of the
7375     bits in it.  */
7376  varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
7377
7378  /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7379     CONST_INT, we are done.  */
7380  if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7381    return varop;
7382
7383  /* See what bits may be nonzero in VAROP.  Unlike the general case of
7384     a call to nonzero_bits, here we don't care about bits outside
7385     MODE.  */
7386
7387  nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7388
7389  /* If this would be an entire word for the target, but is not for
7390     the host, then sign-extend on the host so that the number will look
7391     the same way on the host that it would on the target.
7392
7393     For example, when building a 64 bit alpha hosted 32 bit sparc
7394     targeted compiler, then we want the 32 bit unsigned value -1 to be
7395     represented as a 64 bit value -1, and not as 0x00000000ffffffff.
7396     The later confuses the sparc backend.  */
7397
7398  if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
7399      && (nonzero & ((HOST_WIDE_INT) 1 << (width - 1))))
7400    nonzero |= ((HOST_WIDE_INT) (-1) << width);
7401
7402  /* Turn off all bits in the constant that are known to already be zero.
7403     Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
7404     which is tested below.  */
7405
7406  constop &= nonzero;
7407
7408  /* If we don't have any bits left, return zero.  */
7409  if (constop == 0)
7410    return const0_rtx;
7411
7412  /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7413     a power of two, we can replace this with a ASHIFT.  */
7414  if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7415      && (i = exact_log2 (constop)) >= 0)
7416    return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7417
7418  /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7419     or XOR, then try to apply the distributive law.  This may eliminate
7420     operations if either branch can be simplified because of the AND.
7421     It may also make some cases more complex, but those cases probably
7422     won't match a pattern either with or without this.  */
7423
7424  if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7425    return
7426      gen_lowpart_for_combine
7427	(mode,
7428	 apply_distributive_law
7429	 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7430		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7431					      XEXP (varop, 0), constop),
7432		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7433					      XEXP (varop, 1), constop))));
7434
7435  /* Get VAROP in MODE.  Try to get a SUBREG if not.  Don't make a new SUBREG
7436     if we already had one (just check for the simplest cases).  */
7437  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7438      && GET_MODE (XEXP (x, 0)) == mode
7439      && SUBREG_REG (XEXP (x, 0)) == varop)
7440    varop = XEXP (x, 0);
7441  else
7442    varop = gen_lowpart_for_combine (mode, varop);
7443
7444  /* If we can't make the SUBREG, try to return what we were given.  */
7445  if (GET_CODE (varop) == CLOBBER)
7446    return x ? x : varop;
7447
7448  /* If we are only masking insignificant bits, return VAROP.  */
7449  if (constop == nonzero)
7450    x = varop;
7451
7452  /* Otherwise, return an AND.  See how much, if any, of X we can use.  */
7453  else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
7454    x = gen_binary (AND, mode, varop, GEN_INT (constop));
7455
7456  else
7457    {
7458      if (GET_CODE (XEXP (x, 1)) != CONST_INT
7459	  || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
7460	SUBST (XEXP (x, 1), GEN_INT (constop));
7461
7462      SUBST (XEXP (x, 0), varop);
7463    }
7464
7465  return x;
7466}
7467
7468/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7469   We don't let nonzero_bits recur into num_sign_bit_copies, because that
7470   is less useful.  We can't allow both, because that results in exponential
7471   run time recursion.  There is a nullstone testcase that triggered
7472   this.  This macro avoids accidental uses of num_sign_bit_copies.  */
7473#define num_sign_bit_copies()
7474
7475/* Given an expression, X, compute which bits in X can be non-zero.
7476   We don't care about bits outside of those defined in MODE.
7477
7478   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7479   a shift, AND, or zero_extract, we can do better.  */
7480
7481static unsigned HOST_WIDE_INT
7482nonzero_bits (x, mode)
7483     rtx x;
7484     enum machine_mode mode;
7485{
7486  unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7487  unsigned HOST_WIDE_INT inner_nz;
7488  enum rtx_code code;
7489  int mode_width = GET_MODE_BITSIZE (mode);
7490  rtx tem;
7491
7492  /* For floating-point values, assume all bits are needed.  */
7493  if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7494    return nonzero;
7495
7496  /* If X is wider than MODE, use its mode instead.  */
7497  if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7498    {
7499      mode = GET_MODE (x);
7500      nonzero = GET_MODE_MASK (mode);
7501      mode_width = GET_MODE_BITSIZE (mode);
7502    }
7503
7504  if (mode_width > HOST_BITS_PER_WIDE_INT)
7505    /* Our only callers in this case look for single bit values.  So
7506       just return the mode mask.  Those tests will then be false.  */
7507    return nonzero;
7508
7509#ifndef WORD_REGISTER_OPERATIONS
7510  /* If MODE is wider than X, but both are a single word for both the host
7511     and target machines, we can compute this from which bits of the
7512     object might be nonzero in its own mode, taking into account the fact
7513     that on many CISC machines, accessing an object in a wider mode
7514     causes the high-order bits to become undefined.  So they are
7515     not known to be zero.  */
7516
7517  if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7518      && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7519      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7520      && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
7521    {
7522      nonzero &= nonzero_bits (x, GET_MODE (x));
7523      nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
7524      return nonzero;
7525    }
7526#endif
7527
7528  code = GET_CODE (x);
7529  switch (code)
7530    {
7531    case REG:
7532#ifdef POINTERS_EXTEND_UNSIGNED
7533      /* If pointers extend unsigned and this is a pointer in Pmode, say that
7534	 all the bits above ptr_mode are known to be zero.  */
7535      if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
7536	  && REGNO_POINTER_FLAG (REGNO (x)))
7537	nonzero &= GET_MODE_MASK (ptr_mode);
7538#endif
7539
7540#ifdef STACK_BOUNDARY
7541      /* If this is the stack pointer, we may know something about its
7542	 alignment.  If PUSH_ROUNDING is defined, it is possible for the
7543	 stack to be momentarily aligned only to that amount, so we pick
7544	 the least alignment.  */
7545
7546      /* We can't check for arg_pointer_rtx here, because it is not
7547	 guaranteed to have as much alignment as the stack pointer.
7548	 In particular, in the Irix6 n64 ABI, the stack has 128 bit
7549	 alignment but the argument pointer has only 64 bit alignment.  */
7550
7551      if ((x == frame_pointer_rtx
7552	   || x == stack_pointer_rtx
7553	   || x == hard_frame_pointer_rtx
7554	   || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
7555	       && REGNO (x) <= LAST_VIRTUAL_REGISTER))
7556#ifdef STACK_BIAS
7557	  && !STACK_BIAS
7558#endif
7559	      )
7560	{
7561	  int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
7562
7563#ifdef PUSH_ROUNDING
7564	  if (REGNO (x) == STACK_POINTER_REGNUM)
7565	    sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
7566#endif
7567
7568	  /* We must return here, otherwise we may get a worse result from
7569	     one of the choices below.  There is nothing useful below as
7570	     far as the stack pointer is concerned.  */
7571	  return nonzero &= ~ (sp_alignment - 1);
7572	}
7573#endif
7574
7575      /* If X is a register whose nonzero bits value is current, use it.
7576	 Otherwise, if X is a register whose value we can find, use that
7577	 value.  Otherwise, use the previously-computed global nonzero bits
7578	 for this register.  */
7579
7580      if (reg_last_set_value[REGNO (x)] != 0
7581	  && reg_last_set_mode[REGNO (x)] == mode
7582	  && (REG_N_SETS (REGNO (x)) == 1
7583	      || reg_last_set_label[REGNO (x)] == label_tick)
7584	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7585	return reg_last_set_nonzero_bits[REGNO (x)];
7586
7587      tem = get_last_value (x);
7588
7589      if (tem)
7590	{
7591#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7592	  /* If X is narrower than MODE and TEM is a non-negative
7593	     constant that would appear negative in the mode of X,
7594	     sign-extend it for use in reg_nonzero_bits because some
7595	     machines (maybe most) will actually do the sign-extension
7596	     and this is the conservative approach.
7597
7598	     ??? For 2.5, try to tighten up the MD files in this regard
7599	     instead of this kludge.  */
7600
7601	  if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
7602	      && GET_CODE (tem) == CONST_INT
7603	      && INTVAL (tem) > 0
7604	      && 0 != (INTVAL (tem)
7605		       & ((HOST_WIDE_INT) 1
7606			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7607	    tem = GEN_INT (INTVAL (tem)
7608			   | ((HOST_WIDE_INT) (-1)
7609			      << GET_MODE_BITSIZE (GET_MODE (x))));
7610#endif
7611	  return nonzero_bits (tem, mode);
7612	}
7613      else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
7614	return reg_nonzero_bits[REGNO (x)] & nonzero;
7615      else
7616	return nonzero;
7617
7618    case CONST_INT:
7619#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7620      /* If X is negative in MODE, sign-extend the value.  */
7621      if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
7622	  && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
7623	return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
7624#endif
7625
7626      return INTVAL (x);
7627
7628    case MEM:
7629#ifdef LOAD_EXTEND_OP
7630      /* In many, if not most, RISC machines, reading a byte from memory
7631	 zeros the rest of the register.  Noticing that fact saves a lot
7632	 of extra zero-extends.  */
7633      if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
7634	nonzero &= GET_MODE_MASK (GET_MODE (x));
7635#endif
7636      break;
7637
7638    case EQ:  case NE:
7639    case GT:  case GTU:
7640    case LT:  case LTU:
7641    case GE:  case GEU:
7642    case LE:  case LEU:
7643
7644      /* If this produces an integer result, we know which bits are set.
7645	 Code here used to clear bits outside the mode of X, but that is
7646	 now done above.  */
7647
7648      if (GET_MODE_CLASS (mode) == MODE_INT
7649	  && mode_width <= HOST_BITS_PER_WIDE_INT)
7650	nonzero = STORE_FLAG_VALUE;
7651      break;
7652
7653    case NEG:
7654#if 0
7655      /* Disabled to avoid exponential mutual recursion between nonzero_bits
7656	 and num_sign_bit_copies.  */
7657      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7658	  == GET_MODE_BITSIZE (GET_MODE (x)))
7659	nonzero = 1;
7660#endif
7661
7662      if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
7663	nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
7664      break;
7665
7666    case ABS:
7667#if 0
7668      /* Disabled to avoid exponential mutual recursion between nonzero_bits
7669	 and num_sign_bit_copies.  */
7670      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7671	  == GET_MODE_BITSIZE (GET_MODE (x)))
7672	nonzero = 1;
7673#endif
7674      break;
7675
7676    case TRUNCATE:
7677      nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
7678      break;
7679
7680    case ZERO_EXTEND:
7681      nonzero &= nonzero_bits (XEXP (x, 0), mode);
7682      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
7683	nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
7684      break;
7685
7686    case SIGN_EXTEND:
7687      /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
7688	 Otherwise, show all the bits in the outer mode but not the inner
7689	 may be non-zero.  */
7690      inner_nz = nonzero_bits (XEXP (x, 0), mode);
7691      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
7692	{
7693	  inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
7694	  if (inner_nz
7695	      & (((HOST_WIDE_INT) 1
7696		  << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
7697	    inner_nz |= (GET_MODE_MASK (mode)
7698			  & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
7699	}
7700
7701      nonzero &= inner_nz;
7702      break;
7703
7704    case AND:
7705      nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7706		  & nonzero_bits (XEXP (x, 1), mode));
7707      break;
7708
7709    case XOR:   case IOR:
7710    case UMIN:  case UMAX:  case SMIN:  case SMAX:
7711      nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7712		  | nonzero_bits (XEXP (x, 1), mode));
7713      break;
7714
7715    case PLUS:  case MINUS:
7716    case MULT:
7717    case DIV:   case UDIV:
7718    case MOD:   case UMOD:
7719      /* We can apply the rules of arithmetic to compute the number of
7720	 high- and low-order zero bits of these operations.  We start by
7721	 computing the width (position of the highest-order non-zero bit)
7722	 and the number of low-order zero bits for each value.  */
7723      {
7724	unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
7725	unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
7726	int width0 = floor_log2 (nz0) + 1;
7727	int width1 = floor_log2 (nz1) + 1;
7728	int low0 = floor_log2 (nz0 & -nz0);
7729	int low1 = floor_log2 (nz1 & -nz1);
7730	HOST_WIDE_INT op0_maybe_minusp
7731	  = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7732	HOST_WIDE_INT op1_maybe_minusp
7733	  = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7734	int result_width = mode_width;
7735	int result_low = 0;
7736
7737	switch (code)
7738	  {
7739	  case PLUS:
7740#ifdef STACK_BIAS
7741	    if (STACK_BIAS
7742	        && (XEXP (x, 0) == stack_pointer_rtx
7743	            || XEXP (x, 0) == frame_pointer_rtx)
7744	        && GET_CODE (XEXP (x, 1)) == CONST_INT)
7745	      {
7746		int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
7747
7748	        nz0 = (GET_MODE_MASK (mode) & ~ (sp_alignment - 1));
7749	        nz1 = INTVAL (XEXP (x, 1)) - STACK_BIAS;
7750	        width0 = floor_log2 (nz0) + 1;
7751	        width1 = floor_log2 (nz1) + 1;
7752	        low0 = floor_log2 (nz0 & -nz0);
7753	        low1 = floor_log2 (nz1 & -nz1);
7754	      }
7755#endif
7756	    result_width = MAX (width0, width1) + 1;
7757	    result_low = MIN (low0, low1);
7758	    break;
7759	  case MINUS:
7760	    result_low = MIN (low0, low1);
7761	    break;
7762	  case MULT:
7763	    result_width = width0 + width1;
7764	    result_low = low0 + low1;
7765	    break;
7766	  case DIV:
7767	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
7768	      result_width = width0;
7769	    break;
7770	  case UDIV:
7771	    result_width = width0;
7772	    break;
7773	  case MOD:
7774	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
7775	      result_width = MIN (width0, width1);
7776	    result_low = MIN (low0, low1);
7777	    break;
7778	  case UMOD:
7779	    result_width = MIN (width0, width1);
7780	    result_low = MIN (low0, low1);
7781	    break;
7782	  default:
7783	    abort ();
7784	  }
7785
7786	if (result_width < mode_width)
7787	  nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
7788
7789	if (result_low > 0)
7790	  nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
7791      }
7792      break;
7793
7794    case ZERO_EXTRACT:
7795      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7796	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7797	nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
7798      break;
7799
7800    case SUBREG:
7801      /* If this is a SUBREG formed for a promoted variable that has
7802	 been zero-extended, we know that at least the high-order bits
7803	 are zero, though others might be too.  */
7804
7805      if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
7806	nonzero = (GET_MODE_MASK (GET_MODE (x))
7807		   & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
7808
7809      /* If the inner mode is a single word for both the host and target
7810	 machines, we can compute this from which bits of the inner
7811	 object might be nonzero.  */
7812      if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
7813	  && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7814	      <= HOST_BITS_PER_WIDE_INT))
7815	{
7816	  nonzero &= nonzero_bits (SUBREG_REG (x), mode);
7817
7818#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
7819	  /* If this is a typical RISC machine, we only have to worry
7820	     about the way loads are extended.  */
7821	  if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
7822	      ? (nonzero
7823		 & (1L << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1)))
7824	      : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
7825#endif
7826	    {
7827	      /* On many CISC machines, accessing an object in a wider mode
7828		 causes the high-order bits to become undefined.  So they are
7829		 not known to be zero.  */
7830	      if (GET_MODE_SIZE (GET_MODE (x))
7831		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7832		nonzero |= (GET_MODE_MASK (GET_MODE (x))
7833			    & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
7834	    }
7835	}
7836      break;
7837
7838    case ASHIFTRT:
7839    case LSHIFTRT:
7840    case ASHIFT:
7841    case ROTATE:
7842      /* The nonzero bits are in two classes: any bits within MODE
7843	 that aren't in GET_MODE (x) are always significant.  The rest of the
7844	 nonzero bits are those that are significant in the operand of
7845	 the shift when shifted the appropriate number of bits.  This
7846	 shows that high-order bits are cleared by the right shift and
7847	 low-order bits by left shifts.  */
7848      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7849	  && INTVAL (XEXP (x, 1)) >= 0
7850	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7851	{
7852	  enum machine_mode inner_mode = GET_MODE (x);
7853	  int width = GET_MODE_BITSIZE (inner_mode);
7854	  int count = INTVAL (XEXP (x, 1));
7855	  unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
7856	  unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
7857	  unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
7858	  unsigned HOST_WIDE_INT outer = 0;
7859
7860	  if (mode_width > width)
7861	    outer = (op_nonzero & nonzero & ~ mode_mask);
7862
7863	  if (code == LSHIFTRT)
7864	    inner >>= count;
7865	  else if (code == ASHIFTRT)
7866	    {
7867	      inner >>= count;
7868
7869	      /* If the sign bit may have been nonzero before the shift, we
7870		 need to mark all the places it could have been copied to
7871		 by the shift as possibly nonzero.  */
7872	      if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7873		inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
7874	    }
7875	  else if (code == ASHIFT)
7876	    inner <<= count;
7877	  else
7878	    inner = ((inner << (count % width)
7879		      | (inner >> (width - (count % width)))) & mode_mask);
7880
7881	  nonzero &= (outer | inner);
7882	}
7883      break;
7884
7885    case FFS:
7886      /* This is at most the number of bits in the mode.  */
7887      nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
7888      break;
7889
7890    case IF_THEN_ELSE:
7891      nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7892		  | nonzero_bits (XEXP (x, 2), mode));
7893      break;
7894
7895    default:
7896      break;
7897    }
7898
7899  return nonzero;
7900}
7901
7902/* See the macro definition above.  */
7903#undef num_sign_bit_copies
7904
7905/* Return the number of bits at the high-order end of X that are known to
7906   be equal to the sign bit.  X will be used in mode MODE; if MODE is
7907   VOIDmode, X will be used in its own mode.  The returned value  will always
7908   be between 1 and the number of bits in MODE.  */
7909
7910static int
7911num_sign_bit_copies (x, mode)
7912     rtx x;
7913     enum machine_mode mode;
7914{
7915  enum rtx_code code = GET_CODE (x);
7916  int bitwidth;
7917  int num0, num1, result;
7918  unsigned HOST_WIDE_INT nonzero;
7919  rtx tem;
7920
7921  /* If we weren't given a mode, use the mode of X.  If the mode is still
7922     VOIDmode, we don't know anything.  Likewise if one of the modes is
7923     floating-point.  */
7924
7925  if (mode == VOIDmode)
7926    mode = GET_MODE (x);
7927
7928  if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
7929    return 1;
7930
7931  bitwidth = GET_MODE_BITSIZE (mode);
7932
7933  /* For a smaller object, just ignore the high bits.  */
7934  if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7935    return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7936		    - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7937
7938  if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7939    {
7940#ifndef WORD_REGISTER_OPERATIONS
7941  /* If this machine does not do all register operations on the entire
7942     register and MODE is wider than the mode of X, we can say nothing
7943     at all about the high-order bits.  */
7944      return 1;
7945#else
7946      /* Likewise on machines that do, if the mode of the object is smaller
7947	 than a word and loads of that size don't sign extend, we can say
7948	 nothing about the high order bits.  */
7949      if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
7950#ifdef LOAD_EXTEND_OP
7951	  && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
7952#endif
7953	  )
7954	return 1;
7955#endif
7956    }
7957
7958  switch (code)
7959    {
7960    case REG:
7961
7962#ifdef POINTERS_EXTEND_UNSIGNED
7963      /* If pointers extend signed and this is a pointer in Pmode, say that
7964	 all the bits above ptr_mode are known to be sign bit copies.  */
7965      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
7966	  && REGNO_POINTER_FLAG (REGNO (x)))
7967	return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
7968#endif
7969
7970      if (reg_last_set_value[REGNO (x)] != 0
7971	  && reg_last_set_mode[REGNO (x)] == mode
7972	  && (REG_N_SETS (REGNO (x)) == 1
7973	      || reg_last_set_label[REGNO (x)] == label_tick)
7974	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7975	return reg_last_set_sign_bit_copies[REGNO (x)];
7976
7977      tem =  get_last_value (x);
7978      if (tem != 0)
7979	return num_sign_bit_copies (tem, mode);
7980
7981      if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
7982	return reg_sign_bit_copies[REGNO (x)];
7983      break;
7984
7985    case MEM:
7986#ifdef LOAD_EXTEND_OP
7987      /* Some RISC machines sign-extend all loads of smaller than a word.  */
7988      if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
7989	return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
7990#endif
7991      break;
7992
7993    case CONST_INT:
7994      /* If the constant is negative, take its 1's complement and remask.
7995	 Then see how many zero bits we have.  */
7996      nonzero = INTVAL (x) & GET_MODE_MASK (mode);
7997      if (bitwidth <= HOST_BITS_PER_WIDE_INT
7998	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7999	nonzero = (~ nonzero) & GET_MODE_MASK (mode);
8000
8001      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8002
8003    case SUBREG:
8004      /* If this is a SUBREG for a promoted object that is sign-extended
8005	 and we are looking at it in a wider mode, we know that at least the
8006	 high-order bits are known to be sign bit copies.  */
8007
8008      if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
8009	return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
8010		    num_sign_bit_copies (SUBREG_REG (x), mode));
8011
8012      /* For a smaller object, just ignore the high bits.  */
8013      if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
8014	{
8015	  num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
8016	  return MAX (1, (num0
8017			  - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8018			     - bitwidth)));
8019	}
8020
8021#ifdef WORD_REGISTER_OPERATIONS
8022#ifdef LOAD_EXTEND_OP
8023      /* For paradoxical SUBREGs on machines where all register operations
8024	 affect the entire register, just look inside.  Note that we are
8025	 passing MODE to the recursive call, so the number of sign bit copies
8026	 will remain relative to that mode, not the inner mode.  */
8027
8028      /* This works only if loads sign extend.  Otherwise, if we get a
8029	 reload for the inner part, it may be loaded from the stack, and
8030	 then we lose all sign bit copies that existed before the store
8031	 to the stack.  */
8032
8033      if ((GET_MODE_SIZE (GET_MODE (x))
8034	   > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8035	  && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
8036	return num_sign_bit_copies (SUBREG_REG (x), mode);
8037#endif
8038#endif
8039      break;
8040
8041    case SIGN_EXTRACT:
8042      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
8043	return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
8044      break;
8045
8046    case SIGN_EXTEND:
8047      return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8048	      + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
8049
8050    case TRUNCATE:
8051      /* For a smaller object, just ignore the high bits.  */
8052      num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
8053      return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8054			      - bitwidth)));
8055
8056    case NOT:
8057      return num_sign_bit_copies (XEXP (x, 0), mode);
8058
8059    case ROTATE:       case ROTATERT:
8060      /* If we are rotating left by a number of bits less than the number
8061	 of sign bit copies, we can just subtract that amount from the
8062	 number.  */
8063      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8064	  && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
8065	{
8066	  num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8067	  return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
8068				 : bitwidth - INTVAL (XEXP (x, 1))));
8069	}
8070      break;
8071
8072    case NEG:
8073      /* In general, this subtracts one sign bit copy.  But if the value
8074	 is known to be positive, the number of sign bit copies is the
8075	 same as that of the input.  Finally, if the input has just one bit
8076	 that might be nonzero, all the bits are copies of the sign bit.  */
8077      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8078      if (bitwidth > HOST_BITS_PER_WIDE_INT)
8079	return num0 > 1 ? num0 - 1 : 1;
8080
8081      nonzero = nonzero_bits (XEXP (x, 0), mode);
8082      if (nonzero == 1)
8083	return bitwidth;
8084
8085      if (num0 > 1
8086	  && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
8087	num0--;
8088
8089      return num0;
8090
8091    case IOR:   case AND:   case XOR:
8092    case SMIN:  case SMAX:  case UMIN:  case UMAX:
8093      /* Logical operations will preserve the number of sign-bit copies.
8094	 MIN and MAX operations always return one of the operands.  */
8095      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8096      num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8097      return MIN (num0, num1);
8098
8099    case PLUS:  case MINUS:
8100      /* For addition and subtraction, we can have a 1-bit carry.  However,
8101	 if we are subtracting 1 from a positive number, there will not
8102	 be such a carry.  Furthermore, if the positive number is known to
8103	 be 0 or 1, we know the result is either -1 or 0.  */
8104
8105      if (code == PLUS && XEXP (x, 1) == constm1_rtx
8106	  && bitwidth <= HOST_BITS_PER_WIDE_INT)
8107	{
8108	  nonzero = nonzero_bits (XEXP (x, 0), mode);
8109	  if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8110	    return (nonzero == 1 || nonzero == 0 ? bitwidth
8111		    : bitwidth - floor_log2 (nonzero) - 1);
8112	}
8113
8114      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8115      num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8116      return MAX (1, MIN (num0, num1) - 1);
8117
8118    case MULT:
8119      /* The number of bits of the product is the sum of the number of
8120	 bits of both terms.  However, unless one of the terms if known
8121	 to be positive, we must allow for an additional bit since negating
8122	 a negative number can remove one sign bit copy.  */
8123
8124      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8125      num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8126
8127      result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8128      if (result > 0
8129	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8130	      || (((nonzero_bits (XEXP (x, 0), mode)
8131		    & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8132		  && ((nonzero_bits (XEXP (x, 1), mode)
8133		       & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
8134	result--;
8135
8136      return MAX (1, result);
8137
8138    case UDIV:
8139      /* The result must be <= the first operand.  If the first operand
8140         has the high bit set, we know nothing about the number of sign
8141         bit copies.  */
8142      if (bitwidth > HOST_BITS_PER_WIDE_INT)
8143	return 1;
8144      else if ((nonzero_bits (XEXP (x, 0), mode)
8145		& ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8146	return 1;
8147      else
8148	return num_sign_bit_copies (XEXP (x, 0), mode);
8149
8150    case UMOD:
8151      /* The result must be <= the scond operand.  */
8152      return num_sign_bit_copies (XEXP (x, 1), mode);
8153
8154    case DIV:
8155      /* Similar to unsigned division, except that we have to worry about
8156	 the case where the divisor is negative, in which case we have
8157	 to add 1.  */
8158      result = num_sign_bit_copies (XEXP (x, 0), mode);
8159      if (result > 1
8160	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8161	      || (nonzero_bits (XEXP (x, 1), mode)
8162		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8163	result--;
8164
8165      return result;
8166
8167    case MOD:
8168      result = num_sign_bit_copies (XEXP (x, 1), mode);
8169      if (result > 1
8170	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8171	      || (nonzero_bits (XEXP (x, 1), mode)
8172		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8173	result--;
8174
8175      return result;
8176
8177    case ASHIFTRT:
8178      /* Shifts by a constant add to the number of bits equal to the
8179	 sign bit.  */
8180      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8181      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8182	  && INTVAL (XEXP (x, 1)) > 0)
8183	num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
8184
8185      return num0;
8186
8187    case ASHIFT:
8188      /* Left shifts destroy copies.  */
8189      if (GET_CODE (XEXP (x, 1)) != CONST_INT
8190	  || INTVAL (XEXP (x, 1)) < 0
8191	  || INTVAL (XEXP (x, 1)) >= bitwidth)
8192	return 1;
8193
8194      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8195      return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8196
8197    case IF_THEN_ELSE:
8198      num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8199      num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8200      return MIN (num0, num1);
8201
8202    case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
8203    case GEU: case GTU: case LEU: case LTU:
8204      if (STORE_FLAG_VALUE == -1)
8205	return bitwidth;
8206      break;
8207
8208    default:
8209      break;
8210    }
8211
8212  /* If we haven't been able to figure it out by one of the above rules,
8213     see if some of the high-order bits are known to be zero.  If so,
8214     count those bits and return one less than that amount.  If we can't
8215     safely compute the mask for this mode, always return BITWIDTH.  */
8216
8217  if (bitwidth > HOST_BITS_PER_WIDE_INT)
8218    return 1;
8219
8220  nonzero = nonzero_bits (x, mode);
8221  return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
8222	  ? 1 : bitwidth - floor_log2 (nonzero) - 1);
8223}
8224
8225/* Return the number of "extended" bits there are in X, when interpreted
8226   as a quantity in MODE whose signedness is indicated by UNSIGNEDP.  For
8227   unsigned quantities, this is the number of high-order zero bits.
8228   For signed quantities, this is the number of copies of the sign bit
8229   minus 1.  In both case, this function returns the number of "spare"
8230   bits.  For example, if two quantities for which this function returns
8231   at least 1 are added, the addition is known not to overflow.
8232
8233   This function will always return 0 unless called during combine, which
8234   implies that it must be called from a define_split.  */
8235
8236int
8237extended_count (x, mode, unsignedp)
8238     rtx x;
8239     enum machine_mode mode;
8240     int unsignedp;
8241{
8242  if (nonzero_sign_valid == 0)
8243    return 0;
8244
8245  return (unsignedp
8246	  ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8247	     && (GET_MODE_BITSIZE (mode) - 1
8248		 - floor_log2 (nonzero_bits (x, mode))))
8249	  : num_sign_bit_copies (x, mode) - 1);
8250}
8251
8252/* This function is called from `simplify_shift_const' to merge two
8253   outer operations.  Specifically, we have already found that we need
8254   to perform operation *POP0 with constant *PCONST0 at the outermost
8255   position.  We would now like to also perform OP1 with constant CONST1
8256   (with *POP0 being done last).
8257
8258   Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8259   the resulting operation.  *PCOMP_P is set to 1 if we would need to
8260   complement the innermost operand, otherwise it is unchanged.
8261
8262   MODE is the mode in which the operation will be done.  No bits outside
8263   the width of this mode matter.  It is assumed that the width of this mode
8264   is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8265
8266   If *POP0 or OP1 are NIL, it means no operation is required.  Only NEG, PLUS,
8267   IOR, XOR, and AND are supported.  We may set *POP0 to SET if the proper
8268   result is simply *PCONST0.
8269
8270   If the resulting operation cannot be expressed as one operation, we
8271   return 0 and do not change *POP0, *PCONST0, and *PCOMP_P.  */
8272
8273static int
8274merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8275     enum rtx_code *pop0;
8276     HOST_WIDE_INT *pconst0;
8277     enum rtx_code op1;
8278     HOST_WIDE_INT const1;
8279     enum machine_mode mode;
8280     int *pcomp_p;
8281{
8282  enum rtx_code op0 = *pop0;
8283  HOST_WIDE_INT const0 = *pconst0;
8284  int width = GET_MODE_BITSIZE (mode);
8285
8286  const0 &= GET_MODE_MASK (mode);
8287  const1 &= GET_MODE_MASK (mode);
8288
8289  /* If OP0 is an AND, clear unimportant bits in CONST1.  */
8290  if (op0 == AND)
8291    const1 &= const0;
8292
8293  /* If OP0 or OP1 is NIL, this is easy.  Similarly if they are the same or
8294     if OP0 is SET.  */
8295
8296  if (op1 == NIL || op0 == SET)
8297    return 1;
8298
8299  else if (op0 == NIL)
8300    op0 = op1, const0 = const1;
8301
8302  else if (op0 == op1)
8303    {
8304      switch (op0)
8305	{
8306	case AND:
8307	  const0 &= const1;
8308	  break;
8309	case IOR:
8310	  const0 |= const1;
8311	  break;
8312	case XOR:
8313	  const0 ^= const1;
8314	  break;
8315	case PLUS:
8316	  const0 += const1;
8317	  break;
8318	case NEG:
8319	  op0 = NIL;
8320	  break;
8321	default:
8322	  break;
8323	}
8324    }
8325
8326  /* Otherwise, if either is a PLUS or NEG, we can't do anything.  */
8327  else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8328    return 0;
8329
8330  /* If the two constants aren't the same, we can't do anything.  The
8331     remaining six cases can all be done.  */
8332  else if (const0 != const1)
8333    return 0;
8334
8335  else
8336    switch (op0)
8337      {
8338      case IOR:
8339	if (op1 == AND)
8340	  /* (a & b) | b == b */
8341	  op0 = SET;
8342	else /* op1 == XOR */
8343	  /* (a ^ b) | b == a | b */
8344	  {;}
8345	break;
8346
8347      case XOR:
8348	if (op1 == AND)
8349	  /* (a & b) ^ b == (~a) & b */
8350	  op0 = AND, *pcomp_p = 1;
8351	else /* op1 == IOR */
8352	  /* (a | b) ^ b == a & ~b */
8353	  op0 = AND, *pconst0 = ~ const0;
8354	break;
8355
8356      case AND:
8357	if (op1 == IOR)
8358	  /* (a | b) & b == b */
8359	op0 = SET;
8360	else /* op1 == XOR */
8361	  /* (a ^ b) & b) == (~a) & b */
8362	  *pcomp_p = 1;
8363	break;
8364      default:
8365	break;
8366      }
8367
8368  /* Check for NO-OP cases.  */
8369  const0 &= GET_MODE_MASK (mode);
8370  if (const0 == 0
8371      && (op0 == IOR || op0 == XOR || op0 == PLUS))
8372    op0 = NIL;
8373  else if (const0 == 0 && op0 == AND)
8374    op0 = SET;
8375  else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8376	   && op0 == AND)
8377    op0 = NIL;
8378
8379  /* If this would be an entire word for the target, but is not for
8380     the host, then sign-extend on the host so that the number will look
8381     the same way on the host that it would on the target.
8382
8383     For example, when building a 64 bit alpha hosted 32 bit sparc
8384     targeted compiler, then we want the 32 bit unsigned value -1 to be
8385     represented as a 64 bit value -1, and not as 0x00000000ffffffff.
8386     The later confuses the sparc backend.  */
8387
8388  if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
8389      && (const0 & ((HOST_WIDE_INT) 1 << (width - 1))))
8390    const0 |= ((HOST_WIDE_INT) (-1) << width);
8391
8392  *pop0 = op0;
8393  *pconst0 = const0;
8394
8395  return 1;
8396}
8397
8398/* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
8399   The result of the shift is RESULT_MODE.  X, if non-zero, is an expression
8400   that we started with.
8401
8402   The shift is normally computed in the widest mode we find in VAROP, as
8403   long as it isn't a different number of words than RESULT_MODE.  Exceptions
8404   are ASHIFTRT and ROTATE, which are always done in their original mode,  */
8405
8406static rtx
8407simplify_shift_const (x, code, result_mode, varop, count)
8408     rtx x;
8409     enum rtx_code code;
8410     enum machine_mode result_mode;
8411     rtx varop;
8412     int count;
8413{
8414  enum rtx_code orig_code = code;
8415  int orig_count = count;
8416  enum machine_mode mode = result_mode;
8417  enum machine_mode shift_mode, tmode;
8418  int mode_words
8419    = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8420  /* We form (outer_op (code varop count) (outer_const)).  */
8421  enum rtx_code outer_op = NIL;
8422  HOST_WIDE_INT outer_const = 0;
8423  rtx const_rtx;
8424  int complement_p = 0;
8425  rtx new;
8426
8427  /* If we were given an invalid count, don't do anything except exactly
8428     what was requested.  */
8429
8430  if (count < 0 || count > GET_MODE_BITSIZE (mode))
8431    {
8432      if (x)
8433	return x;
8434
8435      return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (count));
8436    }
8437
8438  /* Unless one of the branches of the `if' in this loop does a `continue',
8439     we will `break' the loop after the `if'.  */
8440
8441  while (count != 0)
8442    {
8443      /* If we have an operand of (clobber (const_int 0)), just return that
8444	 value.  */
8445      if (GET_CODE (varop) == CLOBBER)
8446	return varop;
8447
8448      /* If we discovered we had to complement VAROP, leave.  Making a NOT
8449	 here would cause an infinite loop.  */
8450      if (complement_p)
8451	break;
8452
8453      /* Convert ROTATERT to ROTATE.  */
8454      if (code == ROTATERT)
8455	code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8456
8457      /* We need to determine what mode we will do the shift in.  If the
8458	 shift is a right shift or a ROTATE, we must always do it in the mode
8459	 it was originally done in.  Otherwise, we can do it in MODE, the
8460	 widest mode encountered.  */
8461      shift_mode
8462	= (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8463	   ? result_mode : mode);
8464
8465      /* Handle cases where the count is greater than the size of the mode
8466	 minus 1.  For ASHIFT, use the size minus one as the count (this can
8467	 occur when simplifying (lshiftrt (ashiftrt ..))).  For rotates,
8468	 take the count modulo the size.  For other shifts, the result is
8469	 zero.
8470
8471	 Since these shifts are being produced by the compiler by combining
8472	 multiple operations, each of which are defined, we know what the
8473	 result is supposed to be.  */
8474
8475      if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8476	{
8477	  if (code == ASHIFTRT)
8478	    count = GET_MODE_BITSIZE (shift_mode) - 1;
8479	  else if (code == ROTATE || code == ROTATERT)
8480	    count %= GET_MODE_BITSIZE (shift_mode);
8481	  else
8482	    {
8483	      /* We can't simply return zero because there may be an
8484		 outer op.  */
8485	      varop = const0_rtx;
8486	      count = 0;
8487	      break;
8488	    }
8489	}
8490
8491      /* Negative counts are invalid and should not have been made (a
8492	 programmer-specified negative count should have been handled
8493	 above).  */
8494      else if (count < 0)
8495	abort ();
8496
8497      /* An arithmetic right shift of a quantity known to be -1 or 0
8498	 is a no-op.  */
8499      if (code == ASHIFTRT
8500	  && (num_sign_bit_copies (varop, shift_mode)
8501	      == GET_MODE_BITSIZE (shift_mode)))
8502	{
8503	  count = 0;
8504	  break;
8505	}
8506
8507      /* If we are doing an arithmetic right shift and discarding all but
8508	 the sign bit copies, this is equivalent to doing a shift by the
8509	 bitsize minus one.  Convert it into that shift because it will often
8510	 allow other simplifications.  */
8511
8512      if (code == ASHIFTRT
8513	  && (count + num_sign_bit_copies (varop, shift_mode)
8514	      >= GET_MODE_BITSIZE (shift_mode)))
8515	count = GET_MODE_BITSIZE (shift_mode) - 1;
8516
8517      /* We simplify the tests below and elsewhere by converting
8518	 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8519	 `make_compound_operation' will convert it to a ASHIFTRT for
8520	 those machines (such as Vax) that don't have a LSHIFTRT.  */
8521      if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8522	  && code == ASHIFTRT
8523	  && ((nonzero_bits (varop, shift_mode)
8524	       & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8525	      == 0))
8526	code = LSHIFTRT;
8527
8528      switch (GET_CODE (varop))
8529	{
8530	case SIGN_EXTEND:
8531	case ZERO_EXTEND:
8532	case SIGN_EXTRACT:
8533	case ZERO_EXTRACT:
8534	  new = expand_compound_operation (varop);
8535	  if (new != varop)
8536	    {
8537	      varop = new;
8538	      continue;
8539	    }
8540	  break;
8541
8542	case MEM:
8543	  /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8544	     minus the width of a smaller mode, we can do this with a
8545	     SIGN_EXTEND or ZERO_EXTEND from the narrower memory location.  */
8546	  if ((code == ASHIFTRT || code == LSHIFTRT)
8547	      && ! mode_dependent_address_p (XEXP (varop, 0))
8548	      && ! MEM_VOLATILE_P (varop)
8549	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8550					 MODE_INT, 1)) != BLKmode)
8551	    {
8552	      if (BYTES_BIG_ENDIAN)
8553		new = gen_rtx_MEM (tmode, XEXP (varop, 0));
8554	      else
8555		new = gen_rtx_MEM (tmode,
8556				   plus_constant (XEXP (varop, 0),
8557						  count / BITS_PER_UNIT));
8558	      RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
8559	      MEM_COPY_ATTRIBUTES (new, varop);
8560	      varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8561				       : ZERO_EXTEND, mode, new);
8562	      count = 0;
8563	      continue;
8564	    }
8565	  break;
8566
8567	case USE:
8568	  /* Similar to the case above, except that we can only do this if
8569	     the resulting mode is the same as that of the underlying
8570	     MEM and adjust the address depending on the *bits* endianness
8571	     because of the way that bit-field extract insns are defined.  */
8572	  if ((code == ASHIFTRT || code == LSHIFTRT)
8573	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8574					 MODE_INT, 1)) != BLKmode
8575	      && tmode == GET_MODE (XEXP (varop, 0)))
8576	    {
8577	      if (BITS_BIG_ENDIAN)
8578		new = XEXP (varop, 0);
8579	      else
8580		{
8581		  new = copy_rtx (XEXP (varop, 0));
8582		  SUBST (XEXP (new, 0),
8583			 plus_constant (XEXP (new, 0),
8584					count / BITS_PER_UNIT));
8585		}
8586
8587	      varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8588				       : ZERO_EXTEND, mode, new);
8589	      count = 0;
8590	      continue;
8591	    }
8592	  break;
8593
8594	case SUBREG:
8595	  /* If VAROP is a SUBREG, strip it as long as the inner operand has
8596	     the same number of words as what we've seen so far.  Then store
8597	     the widest mode in MODE.  */
8598	  if (subreg_lowpart_p (varop)
8599	      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8600		  > GET_MODE_SIZE (GET_MODE (varop)))
8601	      && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8602		    + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
8603		  == mode_words))
8604	    {
8605	      varop = SUBREG_REG (varop);
8606	      if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
8607		mode = GET_MODE (varop);
8608	      continue;
8609	    }
8610	  break;
8611
8612	case MULT:
8613	  /* Some machines use MULT instead of ASHIFT because MULT
8614	     is cheaper.  But it is still better on those machines to
8615	     merge two shifts into one.  */
8616	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8617	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8618	    {
8619	      varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
8620				  GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
8621	      continue;
8622	    }
8623	  break;
8624
8625	case UDIV:
8626	  /* Similar, for when divides are cheaper.  */
8627	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8628	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8629	    {
8630	      varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
8631				  GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
8632	      continue;
8633	    }
8634	  break;
8635
8636	case ASHIFTRT:
8637	  /* If we are extracting just the sign bit of an arithmetic right
8638	     shift, that shift is not needed.  */
8639	  if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
8640	    {
8641	      varop = XEXP (varop, 0);
8642	      continue;
8643	    }
8644
8645	  /* ... fall through ...  */
8646
8647	case LSHIFTRT:
8648	case ASHIFT:
8649	case ROTATE:
8650	  /* Here we have two nested shifts.  The result is usually the
8651	     AND of a new shift with a mask.  We compute the result below.  */
8652	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8653	      && INTVAL (XEXP (varop, 1)) >= 0
8654	      && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
8655	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8656	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8657	    {
8658	      enum rtx_code first_code = GET_CODE (varop);
8659	      int first_count = INTVAL (XEXP (varop, 1));
8660	      unsigned HOST_WIDE_INT mask;
8661	      rtx mask_rtx;
8662
8663	      /* We have one common special case.  We can't do any merging if
8664		 the inner code is an ASHIFTRT of a smaller mode.  However, if
8665		 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8666		 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8667		 we can convert it to
8668		 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8669		 This simplifies certain SIGN_EXTEND operations.  */
8670	      if (code == ASHIFT && first_code == ASHIFTRT
8671		  && (GET_MODE_BITSIZE (result_mode)
8672		      - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
8673		{
8674		  /* C3 has the low-order C1 bits zero.  */
8675
8676		  mask = (GET_MODE_MASK (mode)
8677			  & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
8678
8679		  varop = simplify_and_const_int (NULL_RTX, result_mode,
8680						  XEXP (varop, 0), mask);
8681		  varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
8682						varop, count);
8683		  count = first_count;
8684		  code = ASHIFTRT;
8685		  continue;
8686		}
8687
8688	      /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8689		 than C1 high-order bits equal to the sign bit, we can convert
8690		 this to either an ASHIFT or a ASHIFTRT depending on the
8691		 two counts.
8692
8693		 We cannot do this if VAROP's mode is not SHIFT_MODE.  */
8694
8695	      if (code == ASHIFTRT && first_code == ASHIFT
8696		  && GET_MODE (varop) == shift_mode
8697		  && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
8698		      > first_count))
8699		{
8700		  count -= first_count;
8701		  if (count < 0)
8702		    count = - count, code = ASHIFT;
8703		  varop = XEXP (varop, 0);
8704		  continue;
8705		}
8706
8707	      /* There are some cases we can't do.  If CODE is ASHIFTRT,
8708		 we can only do this if FIRST_CODE is also ASHIFTRT.
8709
8710		 We can't do the case when CODE is ROTATE and FIRST_CODE is
8711		 ASHIFTRT.
8712
8713		 If the mode of this shift is not the mode of the outer shift,
8714		 we can't do this if either shift is a right shift or ROTATE.
8715
8716		 Finally, we can't do any of these if the mode is too wide
8717		 unless the codes are the same.
8718
8719		 Handle the case where the shift codes are the same
8720		 first.  */
8721
8722	      if (code == first_code)
8723		{
8724		  if (GET_MODE (varop) != result_mode
8725		      && (code == ASHIFTRT || code == LSHIFTRT
8726			  || code == ROTATE))
8727		    break;
8728
8729		  count += first_count;
8730		  varop = XEXP (varop, 0);
8731		  continue;
8732		}
8733
8734	      if (code == ASHIFTRT
8735		  || (code == ROTATE && first_code == ASHIFTRT)
8736		  || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
8737		  || (GET_MODE (varop) != result_mode
8738		      && (first_code == ASHIFTRT || first_code == LSHIFTRT
8739			  || first_code == ROTATE
8740			  || code == ROTATE)))
8741		break;
8742
8743	      /* To compute the mask to apply after the shift, shift the
8744		 nonzero bits of the inner shift the same way the
8745		 outer shift will.  */
8746
8747	      mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
8748
8749	      mask_rtx
8750		= simplify_binary_operation (code, result_mode, mask_rtx,
8751					     GEN_INT (count));
8752
8753	      /* Give up if we can't compute an outer operation to use.  */
8754	      if (mask_rtx == 0
8755		  || GET_CODE (mask_rtx) != CONST_INT
8756		  || ! merge_outer_ops (&outer_op, &outer_const, AND,
8757					INTVAL (mask_rtx),
8758					result_mode, &complement_p))
8759		break;
8760
8761	      /* If the shifts are in the same direction, we add the
8762		 counts.  Otherwise, we subtract them.  */
8763	      if ((code == ASHIFTRT || code == LSHIFTRT)
8764		  == (first_code == ASHIFTRT || first_code == LSHIFTRT))
8765		count += first_count;
8766	      else
8767		count -= first_count;
8768
8769	      /* If COUNT is positive, the new shift is usually CODE,
8770		 except for the two exceptions below, in which case it is
8771		 FIRST_CODE.  If the count is negative, FIRST_CODE should
8772		 always be used  */
8773	      if (count > 0
8774		  && ((first_code == ROTATE && code == ASHIFT)
8775		      || (first_code == ASHIFTRT && code == LSHIFTRT)))
8776		code = first_code;
8777	      else if (count < 0)
8778		code = first_code, count = - count;
8779
8780	      varop = XEXP (varop, 0);
8781	      continue;
8782	    }
8783
8784	  /* If we have (A << B << C) for any shift, we can convert this to
8785	     (A << C << B).  This wins if A is a constant.  Only try this if
8786	     B is not a constant.  */
8787
8788	  else if (GET_CODE (varop) == code
8789		   && GET_CODE (XEXP (varop, 1)) != CONST_INT
8790		   && 0 != (new
8791			    = simplify_binary_operation (code, mode,
8792							 XEXP (varop, 0),
8793							 GEN_INT (count))))
8794	    {
8795	      varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
8796	      count = 0;
8797	      continue;
8798	    }
8799	  break;
8800
8801	case NOT:
8802	  /* Make this fit the case below.  */
8803	  varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
8804				   GEN_INT (GET_MODE_MASK (mode)));
8805	  continue;
8806
8807	case IOR:
8808	case AND:
8809	case XOR:
8810	  /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
8811	     with C the size of VAROP - 1 and the shift is logical if
8812	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8813	     we have an (le X 0) operation.   If we have an arithmetic shift
8814	     and STORE_FLAG_VALUE is 1 or we have a logical shift with
8815	     STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
8816
8817	  if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
8818	      && XEXP (XEXP (varop, 0), 1) == constm1_rtx
8819	      && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8820	      && (code == LSHIFTRT || code == ASHIFTRT)
8821	      && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8822	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8823	    {
8824	      count = 0;
8825	      varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
8826				       const0_rtx);
8827
8828	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8829		varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8830
8831	      continue;
8832	    }
8833
8834	  /* If we have (shift (logical)), move the logical to the outside
8835	     to allow it to possibly combine with another logical and the
8836	     shift to combine with another shift.  This also canonicalizes to
8837	     what a ZERO_EXTRACT looks like.  Also, some machines have
8838	     (and (shift)) insns.  */
8839
8840	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8841	      && (new = simplify_binary_operation (code, result_mode,
8842						   XEXP (varop, 1),
8843						   GEN_INT (count))) != 0
8844	      && GET_CODE(new) == CONST_INT
8845	      && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
8846				  INTVAL (new), result_mode, &complement_p))
8847	    {
8848	      varop = XEXP (varop, 0);
8849	      continue;
8850	    }
8851
8852	  /* If we can't do that, try to simplify the shift in each arm of the
8853	     logical expression, make a new logical expression, and apply
8854	     the inverse distributive law.  */
8855	  {
8856	    rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8857					    XEXP (varop, 0), count);
8858	    rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8859					    XEXP (varop, 1), count);
8860
8861	    varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
8862	    varop = apply_distributive_law (varop);
8863
8864	    count = 0;
8865	  }
8866	  break;
8867
8868	case EQ:
8869	  /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
8870	     says that the sign bit can be tested, FOO has mode MODE, C is
8871	     GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
8872	     that may be nonzero.  */
8873	  if (code == LSHIFTRT
8874	      && XEXP (varop, 1) == const0_rtx
8875	      && GET_MODE (XEXP (varop, 0)) == result_mode
8876	      && count == GET_MODE_BITSIZE (result_mode) - 1
8877	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8878	      && ((STORE_FLAG_VALUE
8879		   & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
8880	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
8881	      && merge_outer_ops (&outer_op, &outer_const, XOR,
8882				  (HOST_WIDE_INT) 1, result_mode,
8883				  &complement_p))
8884	    {
8885	      varop = XEXP (varop, 0);
8886	      count = 0;
8887	      continue;
8888	    }
8889	  break;
8890
8891	case NEG:
8892	  /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
8893	     than the number of bits in the mode is equivalent to A.  */
8894	  if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8895	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
8896	    {
8897	      varop = XEXP (varop, 0);
8898	      count = 0;
8899	      continue;
8900	    }
8901
8902	  /* NEG commutes with ASHIFT since it is multiplication.  Move the
8903	     NEG outside to allow shifts to combine.  */
8904	  if (code == ASHIFT
8905	      && merge_outer_ops (&outer_op, &outer_const, NEG,
8906				  (HOST_WIDE_INT) 0, result_mode,
8907				  &complement_p))
8908	    {
8909	      varop = XEXP (varop, 0);
8910	      continue;
8911	    }
8912	  break;
8913
8914	case PLUS:
8915	  /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
8916	     is one less than the number of bits in the mode is
8917	     equivalent to (xor A 1).  */
8918	  if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8919	      && XEXP (varop, 1) == constm1_rtx
8920	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
8921	      && merge_outer_ops (&outer_op, &outer_const, XOR,
8922				  (HOST_WIDE_INT) 1, result_mode,
8923				  &complement_p))
8924	    {
8925	      count = 0;
8926	      varop = XEXP (varop, 0);
8927	      continue;
8928	    }
8929
8930	  /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
8931	     that might be nonzero in BAR are those being shifted out and those
8932	     bits are known zero in FOO, we can replace the PLUS with FOO.
8933	     Similarly in the other operand order.  This code occurs when
8934	     we are computing the size of a variable-size array.  */
8935
8936	  if ((code == ASHIFTRT || code == LSHIFTRT)
8937	      && count < HOST_BITS_PER_WIDE_INT
8938	      && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8939	      && (nonzero_bits (XEXP (varop, 1), result_mode)
8940		  & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
8941	    {
8942	      varop = XEXP (varop, 0);
8943	      continue;
8944	    }
8945	  else if ((code == ASHIFTRT || code == LSHIFTRT)
8946		   && count < HOST_BITS_PER_WIDE_INT
8947		   && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8948		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8949			    >> count)
8950		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8951			    & nonzero_bits (XEXP (varop, 1),
8952						 result_mode)))
8953	    {
8954	      varop = XEXP (varop, 1);
8955	      continue;
8956	    }
8957
8958	  /* (ashift (plus foo C) N) is (plus (ashift foo N) C').  */
8959	  if (code == ASHIFT
8960	      && GET_CODE (XEXP (varop, 1)) == CONST_INT
8961	      && (new = simplify_binary_operation (ASHIFT, result_mode,
8962						   XEXP (varop, 1),
8963						   GEN_INT (count))) != 0
8964	      && GET_CODE(new) == CONST_INT
8965	      && merge_outer_ops (&outer_op, &outer_const, PLUS,
8966				  INTVAL (new), result_mode, &complement_p))
8967	    {
8968	      varop = XEXP (varop, 0);
8969	      continue;
8970	    }
8971	  break;
8972
8973	case MINUS:
8974	  /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8975	     with C the size of VAROP - 1 and the shift is logical if
8976	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8977	     we have a (gt X 0) operation.  If the shift is arithmetic with
8978	     STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8979	     we have a (neg (gt X 0)) operation.  */
8980
8981	  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8982	      && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
8983	      && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8984	      && (code == LSHIFTRT || code == ASHIFTRT)
8985	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8986	      && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8987	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8988	    {
8989	      count = 0;
8990	      varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8991				       const0_rtx);
8992
8993	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8994		varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8995
8996	      continue;
8997	    }
8998	  break;
8999
9000	case TRUNCATE:
9001	  /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9002	     if the truncate does not affect the value.  */
9003	  if (code == LSHIFTRT
9004	      && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9005	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9006	      && (INTVAL (XEXP (XEXP (varop, 0), 1))
9007		  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9008		      - GET_MODE_BITSIZE (GET_MODE (varop)))))
9009	    {
9010	      rtx varop_inner = XEXP (varop, 0);
9011
9012	      varop_inner = gen_rtx_combine (LSHIFTRT,
9013					     GET_MODE (varop_inner),
9014					     XEXP (varop_inner, 0),
9015					     GEN_INT (count + INTVAL (XEXP (varop_inner, 1))));
9016	      varop = gen_rtx_combine (TRUNCATE, GET_MODE (varop),
9017				       varop_inner);
9018	      count = 0;
9019	      continue;
9020	    }
9021	  break;
9022
9023	default:
9024	  break;
9025	}
9026
9027      break;
9028    }
9029
9030  /* We need to determine what mode to do the shift in.  If the shift is
9031     a right shift or ROTATE, we must always do it in the mode it was
9032     originally done in.  Otherwise, we can do it in MODE, the widest mode
9033     encountered.  The code we care about is that of the shift that will
9034     actually be done, not the shift that was originally requested.  */
9035  shift_mode
9036    = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9037       ? result_mode : mode);
9038
9039  /* We have now finished analyzing the shift.  The result should be
9040     a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places.  If
9041     OUTER_OP is non-NIL, it is an operation that needs to be applied
9042     to the result of the shift.  OUTER_CONST is the relevant constant,
9043     but we must turn off all bits turned off in the shift.
9044
9045     If we were passed a value for X, see if we can use any pieces of
9046     it.  If not, make new rtx.  */
9047
9048  if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
9049      && GET_CODE (XEXP (x, 1)) == CONST_INT
9050      && INTVAL (XEXP (x, 1)) == count)
9051    const_rtx = XEXP (x, 1);
9052  else
9053    const_rtx = GEN_INT (count);
9054
9055  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9056      && GET_MODE (XEXP (x, 0)) == shift_mode
9057      && SUBREG_REG (XEXP (x, 0)) == varop)
9058    varop = XEXP (x, 0);
9059  else if (GET_MODE (varop) != shift_mode)
9060    varop = gen_lowpart_for_combine (shift_mode, varop);
9061
9062  /* If we can't make the SUBREG, try to return what we were given.  */
9063  if (GET_CODE (varop) == CLOBBER)
9064    return x ? x : varop;
9065
9066  new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9067  if (new != 0)
9068    x = new;
9069  else
9070    {
9071      if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
9072	x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
9073
9074      SUBST (XEXP (x, 0), varop);
9075      SUBST (XEXP (x, 1), const_rtx);
9076    }
9077
9078  /* If we have an outer operation and we just made a shift, it is
9079     possible that we could have simplified the shift were it not
9080     for the outer operation.  So try to do the simplification
9081     recursively.  */
9082
9083  if (outer_op != NIL && GET_CODE (x) == code
9084      && GET_CODE (XEXP (x, 1)) == CONST_INT)
9085    x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9086			      INTVAL (XEXP (x, 1)));
9087
9088  /* If we were doing a LSHIFTRT in a wider mode than it was originally,
9089     turn off all the bits that the shift would have turned off.  */
9090  if (orig_code == LSHIFTRT && result_mode != shift_mode)
9091    x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9092				GET_MODE_MASK (result_mode) >> orig_count);
9093
9094  /* Do the remainder of the processing in RESULT_MODE.  */
9095  x = gen_lowpart_for_combine (result_mode, x);
9096
9097  /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9098     operation.  */
9099  if (complement_p)
9100    x = gen_unary (NOT, result_mode, result_mode, x);
9101
9102  if (outer_op != NIL)
9103    {
9104      if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9105	{
9106	  int width = GET_MODE_BITSIZE (result_mode);
9107
9108	  outer_const &= GET_MODE_MASK (result_mode);
9109
9110	  /* If this would be an entire word for the target, but is not for
9111	     the host, then sign-extend on the host so that the number will
9112	     look the same way on the host that it would on the target.
9113
9114	     For example, when building a 64 bit alpha hosted 32 bit sparc
9115	     targeted compiler, then we want the 32 bit unsigned value -1 to be
9116	     represented as a 64 bit value -1, and not as 0x00000000ffffffff.
9117	     The later confuses the sparc backend.  */
9118
9119	  if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
9120	      && (outer_const & ((HOST_WIDE_INT) 1 << (width - 1))))
9121	    outer_const |= ((HOST_WIDE_INT) (-1) << width);
9122	}
9123
9124      if (outer_op == AND)
9125	x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9126      else if (outer_op == SET)
9127	/* This means that we have determined that the result is
9128	   equivalent to a constant.  This should be rare.  */
9129	x = GEN_INT (outer_const);
9130      else if (GET_RTX_CLASS (outer_op) == '1')
9131	x = gen_unary (outer_op, result_mode, result_mode, x);
9132      else
9133	x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
9134    }
9135
9136  return x;
9137}
9138
9139/* Like recog, but we receive the address of a pointer to a new pattern.
9140   We try to match the rtx that the pointer points to.
9141   If that fails, we may try to modify or replace the pattern,
9142   storing the replacement into the same pointer object.
9143
9144   Modifications include deletion or addition of CLOBBERs.
9145
9146   PNOTES is a pointer to a location where any REG_UNUSED notes added for
9147   the CLOBBERs are placed.
9148
9149   The value is the final insn code from the pattern ultimately matched,
9150   or -1.  */
9151
9152static int
9153recog_for_combine (pnewpat, insn, pnotes)
9154     rtx *pnewpat;
9155     rtx insn;
9156     rtx *pnotes;
9157{
9158  register rtx pat = *pnewpat;
9159  int insn_code_number;
9160  int num_clobbers_to_add = 0;
9161  int i;
9162  rtx notes = 0;
9163
9164  /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9165     we use to indicate that something didn't match.  If we find such a
9166     thing, force rejection.  */
9167  if (GET_CODE (pat) == PARALLEL)
9168    for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9169      if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9170	  && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
9171	return -1;
9172
9173  /* Is the result of combination a valid instruction?  */
9174  insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9175
9176  /* If it isn't, there is the possibility that we previously had an insn
9177     that clobbered some register as a side effect, but the combined
9178     insn doesn't need to do that.  So try once more without the clobbers
9179     unless this represents an ASM insn.  */
9180
9181  if (insn_code_number < 0 && ! check_asm_operands (pat)
9182      && GET_CODE (pat) == PARALLEL)
9183    {
9184      int pos;
9185
9186      for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9187	if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9188	  {
9189	    if (i != pos)
9190	      SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9191	    pos++;
9192	  }
9193
9194      SUBST_INT (XVECLEN (pat, 0), pos);
9195
9196      if (pos == 1)
9197	pat = XVECEXP (pat, 0, 0);
9198
9199      insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9200    }
9201
9202  /* If we had any clobbers to add, make a new pattern than contains
9203     them.  Then check to make sure that all of them are dead.  */
9204  if (num_clobbers_to_add)
9205    {
9206      rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9207				     gen_rtvec (GET_CODE (pat) == PARALLEL
9208						? XVECLEN (pat, 0) + num_clobbers_to_add
9209						: num_clobbers_to_add + 1));
9210
9211      if (GET_CODE (pat) == PARALLEL)
9212	for (i = 0; i < XVECLEN (pat, 0); i++)
9213	  XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9214      else
9215	XVECEXP (newpat, 0, 0) = pat;
9216
9217      add_clobbers (newpat, insn_code_number);
9218
9219      for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9220	   i < XVECLEN (newpat, 0); i++)
9221	{
9222	  if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9223	      && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9224	    return -1;
9225	  notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9226				     XEXP (XVECEXP (newpat, 0, i), 0), notes);
9227	}
9228      pat = newpat;
9229    }
9230
9231  *pnewpat = pat;
9232  *pnotes = notes;
9233
9234  return insn_code_number;
9235}
9236
9237/* Like gen_lowpart but for use by combine.  In combine it is not possible
9238   to create any new pseudoregs.  However, it is safe to create
9239   invalid memory addresses, because combine will try to recognize
9240   them and all they will do is make the combine attempt fail.
9241
9242   If for some reason this cannot do its job, an rtx
9243   (clobber (const_int 0)) is returned.
9244   An insn containing that will not be recognized.  */
9245
9246#undef gen_lowpart
9247
9248static rtx
9249gen_lowpart_for_combine (mode, x)
9250     enum machine_mode mode;
9251     register rtx x;
9252{
9253  rtx result;
9254
9255  if (GET_MODE (x) == mode)
9256    return x;
9257
9258  /* We can only support MODE being wider than a word if X is a
9259     constant integer or has a mode the same size.  */
9260
9261  if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9262      && ! ((GET_MODE (x) == VOIDmode
9263	     && (GET_CODE (x) == CONST_INT
9264		 || GET_CODE (x) == CONST_DOUBLE))
9265	    || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
9266    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9267
9268  /* X might be a paradoxical (subreg (mem)).  In that case, gen_lowpart
9269     won't know what to do.  So we will strip off the SUBREG here and
9270     process normally.  */
9271  if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9272    {
9273      x = SUBREG_REG (x);
9274      if (GET_MODE (x) == mode)
9275	return x;
9276    }
9277
9278  result = gen_lowpart_common (mode, x);
9279  if (result != 0
9280      && GET_CODE (result) == SUBREG
9281      && GET_CODE (SUBREG_REG (result)) == REG
9282      && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
9283      && (GET_MODE_SIZE (GET_MODE (result))
9284	  != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
9285    REG_CHANGES_SIZE (REGNO (SUBREG_REG (result))) = 1;
9286
9287  if (result)
9288    return result;
9289
9290  if (GET_CODE (x) == MEM)
9291    {
9292      register int offset = 0;
9293      rtx new;
9294
9295      /* Refuse to work on a volatile memory ref or one with a mode-dependent
9296	 address.  */
9297      if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
9298	return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9299
9300      /* If we want to refer to something bigger than the original memref,
9301	 generate a perverse subreg instead.  That will force a reload
9302	 of the original memref X.  */
9303      if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
9304	return gen_rtx_SUBREG (mode, x, 0);
9305
9306      if (WORDS_BIG_ENDIAN)
9307	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9308		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
9309      if (BYTES_BIG_ENDIAN)
9310	{
9311	  /* Adjust the address so that the address-after-the-data is
9312	     unchanged.  */
9313	  offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9314		     - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9315	}
9316      new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
9317      RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
9318      MEM_COPY_ATTRIBUTES (new, x);
9319      return new;
9320    }
9321
9322  /* If X is a comparison operator, rewrite it in a new mode.  This
9323     probably won't match, but may allow further simplifications.  */
9324  else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
9325    return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
9326
9327  /* If we couldn't simplify X any other way, just enclose it in a
9328     SUBREG.  Normally, this SUBREG won't match, but some patterns may
9329     include an explicit SUBREG or we may simplify it further in combine.  */
9330  else
9331    {
9332      int word = 0;
9333
9334      if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
9335	word = ((GET_MODE_SIZE (GET_MODE (x))
9336		 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
9337		/ UNITS_PER_WORD);
9338      return gen_rtx_SUBREG (mode, x, word);
9339    }
9340}
9341
9342/* Make an rtx expression.  This is a subset of gen_rtx and only supports
9343   expressions of 1, 2, or 3 operands, each of which are rtx expressions.
9344
9345   If the identical expression was previously in the insn (in the undobuf),
9346   it will be returned.  Only if it is not found will a new expression
9347   be made.  */
9348
9349/*VARARGS2*/
9350static rtx
9351gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
9352{
9353#ifndef ANSI_PROTOTYPES
9354  enum rtx_code code;
9355  enum machine_mode mode;
9356#endif
9357  va_list p;
9358  int n_args;
9359  rtx args[3];
9360  int j;
9361  char *fmt;
9362  rtx rt;
9363  struct undo *undo;
9364
9365  VA_START (p, mode);
9366
9367#ifndef ANSI_PROTOTYPES
9368  code = va_arg (p, enum rtx_code);
9369  mode = va_arg (p, enum machine_mode);
9370#endif
9371
9372  n_args = GET_RTX_LENGTH (code);
9373  fmt = GET_RTX_FORMAT (code);
9374
9375  if (n_args == 0 || n_args > 3)
9376    abort ();
9377
9378  /* Get each arg and verify that it is supposed to be an expression.  */
9379  for (j = 0; j < n_args; j++)
9380    {
9381      if (*fmt++ != 'e')
9382	abort ();
9383
9384      args[j] = va_arg (p, rtx);
9385    }
9386
9387  /* See if this is in undobuf.  Be sure we don't use objects that came
9388     from another insn; this could produce circular rtl structures.  */
9389
9390  for (undo = undobuf.undos; undo != undobuf.previous_undos; undo = undo->next)
9391    if (!undo->is_int
9392	&& GET_CODE (undo->old_contents.r) == code
9393	&& GET_MODE (undo->old_contents.r) == mode)
9394      {
9395	for (j = 0; j < n_args; j++)
9396	  if (XEXP (undo->old_contents.r, j) != args[j])
9397	    break;
9398
9399	if (j == n_args)
9400	  return undo->old_contents.r;
9401      }
9402
9403  /* Otherwise make a new rtx.  We know we have 1, 2, or 3 args.
9404     Use rtx_alloc instead of gen_rtx because it's faster on RISC.  */
9405  rt = rtx_alloc (code);
9406  PUT_MODE (rt, mode);
9407  XEXP (rt, 0) = args[0];
9408  if (n_args > 1)
9409    {
9410      XEXP (rt, 1) = args[1];
9411      if (n_args > 2)
9412	XEXP (rt, 2) = args[2];
9413    }
9414  return rt;
9415}
9416
9417/* These routines make binary and unary operations by first seeing if they
9418   fold; if not, a new expression is allocated.  */
9419
9420static rtx
9421gen_binary (code, mode, op0, op1)
9422     enum rtx_code code;
9423     enum machine_mode mode;
9424     rtx op0, op1;
9425{
9426  rtx result;
9427  rtx tem;
9428
9429  if (GET_RTX_CLASS (code) == 'c'
9430      && (GET_CODE (op0) == CONST_INT
9431	  || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
9432    tem = op0, op0 = op1, op1 = tem;
9433
9434  if (GET_RTX_CLASS (code) == '<')
9435    {
9436      enum machine_mode op_mode = GET_MODE (op0);
9437
9438      /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
9439	 just (REL_OP X Y).  */
9440      if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9441	{
9442	  op1 = XEXP (op0, 1);
9443	  op0 = XEXP (op0, 0);
9444	  op_mode = GET_MODE (op0);
9445	}
9446
9447      if (op_mode == VOIDmode)
9448	op_mode = GET_MODE (op1);
9449      result = simplify_relational_operation (code, op_mode, op0, op1);
9450    }
9451  else
9452    result = simplify_binary_operation (code, mode, op0, op1);
9453
9454  if (result)
9455    return result;
9456
9457  /* Put complex operands first and constants second.  */
9458  if (GET_RTX_CLASS (code) == 'c'
9459      && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9460	  || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
9461	      && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
9462	  || (GET_CODE (op0) == SUBREG
9463	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
9464	      && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
9465    return gen_rtx_combine (code, mode, op1, op0);
9466
9467  /* If we are turning off bits already known off in OP0, we need not do
9468     an AND.  */
9469  else if (code == AND && GET_CODE (op1) == CONST_INT
9470	   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9471	   && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
9472    return op0;
9473
9474  return gen_rtx_combine (code, mode, op0, op1);
9475}
9476
9477static rtx
9478gen_unary (code, mode, op0_mode, op0)
9479     enum rtx_code code;
9480     enum machine_mode mode, op0_mode;
9481     rtx op0;
9482{
9483  rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
9484
9485  if (result)
9486    return result;
9487
9488  return gen_rtx_combine (code, mode, op0);
9489}
9490
9491/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9492   comparison code that will be tested.
9493
9494   The result is a possibly different comparison code to use.  *POP0 and
9495   *POP1 may be updated.
9496
9497   It is possible that we might detect that a comparison is either always
9498   true or always false.  However, we do not perform general constant
9499   folding in combine, so this knowledge isn't useful.  Such tautologies
9500   should have been detected earlier.  Hence we ignore all such cases.  */
9501
9502static enum rtx_code
9503simplify_comparison (code, pop0, pop1)
9504     enum rtx_code code;
9505     rtx *pop0;
9506     rtx *pop1;
9507{
9508  rtx op0 = *pop0;
9509  rtx op1 = *pop1;
9510  rtx tem, tem1;
9511  int i;
9512  enum machine_mode mode, tmode;
9513
9514  /* Try a few ways of applying the same transformation to both operands.  */
9515  while (1)
9516    {
9517#ifndef WORD_REGISTER_OPERATIONS
9518      /* The test below this one won't handle SIGN_EXTENDs on these machines,
9519	 so check specially.  */
9520      if (code != GTU && code != GEU && code != LTU && code != LEU
9521	  && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9522	  && GET_CODE (XEXP (op0, 0)) == ASHIFT
9523	  && GET_CODE (XEXP (op1, 0)) == ASHIFT
9524	  && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9525	  && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9526	  && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
9527	      == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
9528	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
9529	  && GET_CODE (XEXP (op1, 1)) == CONST_INT
9530	  && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9531	  && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
9532	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
9533	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
9534	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
9535	  && (INTVAL (XEXP (op0, 1))
9536	      == (GET_MODE_BITSIZE (GET_MODE (op0))
9537		  - (GET_MODE_BITSIZE
9538		     (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9539	{
9540	  op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9541	  op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9542	}
9543#endif
9544
9545      /* If both operands are the same constant shift, see if we can ignore the
9546	 shift.  We can if the shift is a rotate or if the bits shifted out of
9547	 this shift are known to be zero for both inputs and if the type of
9548	 comparison is compatible with the shift.  */
9549      if (GET_CODE (op0) == GET_CODE (op1)
9550	  && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9551	  && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
9552	      || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
9553		  && (code != GT && code != LT && code != GE && code != LE))
9554	      || (GET_CODE (op0) == ASHIFTRT
9555		  && (code != GTU && code != LTU
9556		      && code != GEU && code != GEU)))
9557	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
9558	  && INTVAL (XEXP (op0, 1)) >= 0
9559	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9560	  && XEXP (op0, 1) == XEXP (op1, 1))
9561	{
9562	  enum machine_mode mode = GET_MODE (op0);
9563	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9564	  int shift_count = INTVAL (XEXP (op0, 1));
9565
9566	  if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9567	    mask &= (mask >> shift_count) << shift_count;
9568	  else if (GET_CODE (op0) == ASHIFT)
9569	    mask = (mask & (mask << shift_count)) >> shift_count;
9570
9571	  if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
9572	      && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
9573	    op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9574	  else
9575	    break;
9576	}
9577
9578      /* If both operands are AND's of a paradoxical SUBREG by constant, the
9579	 SUBREGs are of the same mode, and, in both cases, the AND would
9580	 be redundant if the comparison was done in the narrower mode,
9581	 do the comparison in the narrower mode (e.g., we are AND'ing with 1
9582	 and the operand's possibly nonzero bits are 0xffffff01; in that case
9583	 if we only care about QImode, we don't need the AND).  This case
9584	 occurs if the output mode of an scc insn is not SImode and
9585	 STORE_FLAG_VALUE == 1 (e.g., the 386).
9586
9587	 Similarly, check for a case where the AND's are ZERO_EXTEND
9588	 operations from some narrower mode even though a SUBREG is not
9589	 present.  */
9590
9591      else if  (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9592		&& GET_CODE (XEXP (op0, 1)) == CONST_INT
9593		&& GET_CODE (XEXP (op1, 1)) == CONST_INT)
9594	{
9595	  rtx inner_op0 = XEXP (op0, 0);
9596	  rtx inner_op1 = XEXP (op1, 0);
9597	  HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9598	  HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9599	  int changed = 0;
9600
9601	  if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9602	      && (GET_MODE_SIZE (GET_MODE (inner_op0))
9603		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9604	      && (GET_MODE (SUBREG_REG (inner_op0))
9605		  == GET_MODE (SUBREG_REG (inner_op1)))
9606	      && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
9607		  <= HOST_BITS_PER_WIDE_INT)
9608	      && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
9609					     GET_MODE (SUBREG_REG (inner_op0)))))
9610	      && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9611					     GET_MODE (SUBREG_REG (inner_op1))))))
9612	    {
9613	      op0 = SUBREG_REG (inner_op0);
9614	      op1 = SUBREG_REG (inner_op1);
9615
9616	      /* The resulting comparison is always unsigned since we masked
9617		 off the original sign bit.  */
9618	      code = unsigned_condition (code);
9619
9620	      changed = 1;
9621	    }
9622
9623	  else if (c0 == c1)
9624	    for (tmode = GET_CLASS_NARROWEST_MODE
9625		 (GET_MODE_CLASS (GET_MODE (op0)));
9626		 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
9627	      if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
9628		{
9629		  op0 = gen_lowpart_for_combine (tmode, inner_op0);
9630		  op1 = gen_lowpart_for_combine (tmode, inner_op1);
9631		  code = unsigned_condition (code);
9632		  changed = 1;
9633		  break;
9634		}
9635
9636	  if (! changed)
9637	    break;
9638	}
9639
9640      /* If both operands are NOT, we can strip off the outer operation
9641	 and adjust the comparison code for swapped operands; similarly for
9642	 NEG, except that this must be an equality comparison.  */
9643      else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9644	       || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9645		   && (code == EQ || code == NE)))
9646	op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
9647
9648      else
9649	break;
9650    }
9651
9652  /* If the first operand is a constant, swap the operands and adjust the
9653     comparison code appropriately, but don't do this if the second operand
9654     is already a constant integer.  */
9655  if (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9656    {
9657      tem = op0, op0 = op1, op1 = tem;
9658      code = swap_condition (code);
9659    }
9660
9661  /* We now enter a loop during which we will try to simplify the comparison.
9662     For the most part, we only are concerned with comparisons with zero,
9663     but some things may really be comparisons with zero but not start
9664     out looking that way.  */
9665
9666  while (GET_CODE (op1) == CONST_INT)
9667    {
9668      enum machine_mode mode = GET_MODE (op0);
9669      int mode_width = GET_MODE_BITSIZE (mode);
9670      unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9671      int equality_comparison_p;
9672      int sign_bit_comparison_p;
9673      int unsigned_comparison_p;
9674      HOST_WIDE_INT const_op;
9675
9676      /* We only want to handle integral modes.  This catches VOIDmode,
9677	 CCmode, and the floating-point modes.  An exception is that we
9678	 can handle VOIDmode if OP0 is a COMPARE or a comparison
9679	 operation.  */
9680
9681      if (GET_MODE_CLASS (mode) != MODE_INT
9682	  && ! (mode == VOIDmode
9683		&& (GET_CODE (op0) == COMPARE
9684		    || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
9685	break;
9686
9687      /* Get the constant we are comparing against and turn off all bits
9688	 not on in our mode.  */
9689      const_op = INTVAL (op1);
9690      if (mode_width <= HOST_BITS_PER_WIDE_INT)
9691	const_op &= mask;
9692
9693      /* If we are comparing against a constant power of two and the value
9694	 being compared can only have that single bit nonzero (e.g., it was
9695	 `and'ed with that bit), we can replace this with a comparison
9696	 with zero.  */
9697      if (const_op
9698	  && (code == EQ || code == NE || code == GE || code == GEU
9699	      || code == LT || code == LTU)
9700	  && mode_width <= HOST_BITS_PER_WIDE_INT
9701	  && exact_log2 (const_op) >= 0
9702	  && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
9703	{
9704	  code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9705	  op1 = const0_rtx, const_op = 0;
9706	}
9707
9708      /* Similarly, if we are comparing a value known to be either -1 or
9709	 0 with -1, change it to the opposite comparison against zero.  */
9710
9711      if (const_op == -1
9712	  && (code == EQ || code == NE || code == GT || code == LE
9713	      || code == GEU || code == LTU)
9714	  && num_sign_bit_copies (op0, mode) == mode_width)
9715	{
9716	  code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9717	  op1 = const0_rtx, const_op = 0;
9718	}
9719
9720      /* Do some canonicalizations based on the comparison code.  We prefer
9721	 comparisons against zero and then prefer equality comparisons.
9722	 If we can reduce the size of a constant, we will do that too.  */
9723
9724      switch (code)
9725	{
9726	case LT:
9727	  /* < C is equivalent to <= (C - 1) */
9728	  if (const_op > 0)
9729	    {
9730	      const_op -= 1;
9731	      op1 = GEN_INT (const_op);
9732	      code = LE;
9733	      /* ... fall through to LE case below.  */
9734	    }
9735	  else
9736	    break;
9737
9738	case LE:
9739	  /* <= C is equivalent to < (C + 1); we do this for C < 0  */
9740	  if (const_op < 0)
9741	    {
9742	      const_op += 1;
9743	      op1 = GEN_INT (const_op);
9744	      code = LT;
9745	    }
9746
9747	  /* If we are doing a <= 0 comparison on a value known to have
9748	     a zero sign bit, we can replace this with == 0.  */
9749	  else if (const_op == 0
9750		   && mode_width <= HOST_BITS_PER_WIDE_INT
9751		   && (nonzero_bits (op0, mode)
9752		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
9753	    code = EQ;
9754	  break;
9755
9756	case GE:
9757	  /* >= C is equivalent to > (C - 1).  */
9758	  if (const_op > 0)
9759	    {
9760	      const_op -= 1;
9761	      op1 = GEN_INT (const_op);
9762	      code = GT;
9763	      /* ... fall through to GT below.  */
9764	    }
9765	  else
9766	    break;
9767
9768	case GT:
9769	  /* > C is equivalent to >= (C + 1); we do this for C < 0*/
9770	  if (const_op < 0)
9771	    {
9772	      const_op += 1;
9773	      op1 = GEN_INT (const_op);
9774	      code = GE;
9775	    }
9776
9777	  /* If we are doing a > 0 comparison on a value known to have
9778	     a zero sign bit, we can replace this with != 0.  */
9779	  else if (const_op == 0
9780		   && mode_width <= HOST_BITS_PER_WIDE_INT
9781		   && (nonzero_bits (op0, mode)
9782		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
9783	    code = NE;
9784	  break;
9785
9786	case LTU:
9787	  /* < C is equivalent to <= (C - 1).  */
9788	  if (const_op > 0)
9789	    {
9790	      const_op -= 1;
9791	      op1 = GEN_INT (const_op);
9792	      code = LEU;
9793	      /* ... fall through ...  */
9794	    }
9795
9796	  /* (unsigned) < 0x80000000 is equivalent to >= 0.  */
9797	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9798		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9799	    {
9800	      const_op = 0, op1 = const0_rtx;
9801	      code = GE;
9802	      break;
9803	    }
9804	  else
9805	    break;
9806
9807	case LEU:
9808	  /* unsigned <= 0 is equivalent to == 0 */
9809	  if (const_op == 0)
9810	    code = EQ;
9811
9812	  /* (unsigned) <= 0x7fffffff is equivalent to >= 0.  */
9813	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9814		   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
9815	    {
9816	      const_op = 0, op1 = const0_rtx;
9817	      code = GE;
9818	    }
9819	  break;
9820
9821	case GEU:
9822	  /* >= C is equivalent to < (C - 1).  */
9823	  if (const_op > 1)
9824	    {
9825	      const_op -= 1;
9826	      op1 = GEN_INT (const_op);
9827	      code = GTU;
9828	      /* ... fall through ...  */
9829	    }
9830
9831	  /* (unsigned) >= 0x80000000 is equivalent to < 0.  */
9832	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9833		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9834	    {
9835	      const_op = 0, op1 = const0_rtx;
9836	      code = LT;
9837	      break;
9838	    }
9839	  else
9840	    break;
9841
9842	case GTU:
9843	  /* unsigned > 0 is equivalent to != 0 */
9844	  if (const_op == 0)
9845	    code = NE;
9846
9847	  /* (unsigned) > 0x7fffffff is equivalent to < 0.  */
9848	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9849		    && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
9850	    {
9851	      const_op = 0, op1 = const0_rtx;
9852	      code = LT;
9853	    }
9854	  break;
9855
9856	default:
9857	  break;
9858	}
9859
9860      /* Compute some predicates to simplify code below.  */
9861
9862      equality_comparison_p = (code == EQ || code == NE);
9863      sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
9864      unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
9865			       || code == LEU);
9866
9867      /* If this is a sign bit comparison and we can do arithmetic in
9868	 MODE, say that we will only be needing the sign bit of OP0.  */
9869      if (sign_bit_comparison_p
9870	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9871	op0 = force_to_mode (op0, mode,
9872			     ((HOST_WIDE_INT) 1
9873			      << (GET_MODE_BITSIZE (mode) - 1)),
9874			     NULL_RTX, 0);
9875
9876      /* Now try cases based on the opcode of OP0.  If none of the cases
9877	 does a "continue", we exit this loop immediately after the
9878	 switch.  */
9879
9880      switch (GET_CODE (op0))
9881	{
9882	case ZERO_EXTRACT:
9883	  /* If we are extracting a single bit from a variable position in
9884	     a constant that has only a single bit set and are comparing it
9885	     with zero, we can convert this into an equality comparison
9886	     between the position and the location of the single bit.  */
9887
9888	  if (GET_CODE (XEXP (op0, 0)) == CONST_INT
9889	      && XEXP (op0, 1) == const1_rtx
9890	      && equality_comparison_p && const_op == 0
9891	      && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
9892	    {
9893	      if (BITS_BIG_ENDIAN)
9894		{
9895#ifdef HAVE_extzv
9896		  mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
9897		  if (mode == VOIDmode)
9898		    mode = word_mode;
9899		  i = (GET_MODE_BITSIZE (mode) - 1 - i);
9900#else
9901	          i = BITS_PER_WORD - 1 - i;
9902#endif
9903		}
9904
9905	      op0 = XEXP (op0, 2);
9906	      op1 = GEN_INT (i);
9907	      const_op = i;
9908
9909	      /* Result is nonzero iff shift count is equal to I.  */
9910	      code = reverse_condition (code);
9911	      continue;
9912	    }
9913
9914	  /* ... fall through ...  */
9915
9916	case SIGN_EXTRACT:
9917	  tem = expand_compound_operation (op0);
9918	  if (tem != op0)
9919	    {
9920	      op0 = tem;
9921	      continue;
9922	    }
9923	  break;
9924
9925	case NOT:
9926	  /* If testing for equality, we can take the NOT of the constant.  */
9927	  if (equality_comparison_p
9928	      && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
9929	    {
9930	      op0 = XEXP (op0, 0);
9931	      op1 = tem;
9932	      continue;
9933	    }
9934
9935	  /* If just looking at the sign bit, reverse the sense of the
9936	     comparison.  */
9937	  if (sign_bit_comparison_p)
9938	    {
9939	      op0 = XEXP (op0, 0);
9940	      code = (code == GE ? LT : GE);
9941	      continue;
9942	    }
9943	  break;
9944
9945	case NEG:
9946	  /* If testing for equality, we can take the NEG of the constant.  */
9947	  if (equality_comparison_p
9948	      && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
9949	    {
9950	      op0 = XEXP (op0, 0);
9951	      op1 = tem;
9952	      continue;
9953	    }
9954
9955	  /* The remaining cases only apply to comparisons with zero.  */
9956	  if (const_op != 0)
9957	    break;
9958
9959	  /* When X is ABS or is known positive,
9960	     (neg X) is < 0 if and only if X != 0.  */
9961
9962	  if (sign_bit_comparison_p
9963	      && (GET_CODE (XEXP (op0, 0)) == ABS
9964		  || (mode_width <= HOST_BITS_PER_WIDE_INT
9965		      && (nonzero_bits (XEXP (op0, 0), mode)
9966			  & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
9967	    {
9968	      op0 = XEXP (op0, 0);
9969	      code = (code == LT ? NE : EQ);
9970	      continue;
9971	    }
9972
9973	  /* If we have NEG of something whose two high-order bits are the
9974	     same, we know that "(-a) < 0" is equivalent to "a > 0".  */
9975	  if (num_sign_bit_copies (op0, mode) >= 2)
9976	    {
9977	      op0 = XEXP (op0, 0);
9978	      code = swap_condition (code);
9979	      continue;
9980	    }
9981	  break;
9982
9983	case ROTATE:
9984	  /* If we are testing equality and our count is a constant, we
9985	     can perform the inverse operation on our RHS.  */
9986	  if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9987	      && (tem = simplify_binary_operation (ROTATERT, mode,
9988						   op1, XEXP (op0, 1))) != 0)
9989	    {
9990	      op0 = XEXP (op0, 0);
9991	      op1 = tem;
9992	      continue;
9993	    }
9994
9995	  /* If we are doing a < 0 or >= 0 comparison, it means we are testing
9996	     a particular bit.  Convert it to an AND of a constant of that
9997	     bit.  This will be converted into a ZERO_EXTRACT.  */
9998	  if (const_op == 0 && sign_bit_comparison_p
9999	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10000	      && mode_width <= HOST_BITS_PER_WIDE_INT)
10001	    {
10002	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10003					    ((HOST_WIDE_INT) 1
10004					     << (mode_width - 1
10005						 - INTVAL (XEXP (op0, 1)))));
10006	      code = (code == LT ? NE : EQ);
10007	      continue;
10008	    }
10009
10010	  /* ... fall through ...  */
10011
10012	case ABS:
10013	  /* ABS is ignorable inside an equality comparison with zero.  */
10014	  if (const_op == 0 && equality_comparison_p)
10015	    {
10016	      op0 = XEXP (op0, 0);
10017	      continue;
10018	    }
10019	  break;
10020
10021
10022	case SIGN_EXTEND:
10023	  /* Can simplify (compare (zero/sign_extend FOO) CONST)
10024	     to (compare FOO CONST) if CONST fits in FOO's mode and we
10025	     are either testing inequality or have an unsigned comparison
10026	     with ZERO_EXTEND or a signed comparison with SIGN_EXTEND.  */
10027	  if (! unsigned_comparison_p
10028	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10029		  <= HOST_BITS_PER_WIDE_INT)
10030	      && ((unsigned HOST_WIDE_INT) const_op
10031		  < (((unsigned HOST_WIDE_INT) 1
10032		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
10033	    {
10034	      op0 = XEXP (op0, 0);
10035	      continue;
10036	    }
10037	  break;
10038
10039	case SUBREG:
10040	  /* Check for the case where we are comparing A - C1 with C2,
10041	     both constants are smaller than 1/2 the maximum positive
10042	     value in MODE, and the comparison is equality or unsigned.
10043	     In that case, if A is either zero-extended to MODE or has
10044	     sufficient sign bits so that the high-order bit in MODE
10045	     is a copy of the sign in the inner mode, we can prove that it is
10046	     safe to do the operation in the wider mode.  This simplifies
10047	     many range checks.  */
10048
10049	  if (mode_width <= HOST_BITS_PER_WIDE_INT
10050	      && subreg_lowpart_p (op0)
10051	      && GET_CODE (SUBREG_REG (op0)) == PLUS
10052	      && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
10053	      && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
10054	      && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
10055		  < (HOST_WIDE_INT)(GET_MODE_MASK (mode) / 2))
10056	      && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
10057	      && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
10058				      GET_MODE (SUBREG_REG (op0)))
10059			& ~ GET_MODE_MASK (mode))
10060		  || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
10061					   GET_MODE (SUBREG_REG (op0)))
10062		      > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10063			 - GET_MODE_BITSIZE (mode)))))
10064	    {
10065	      op0 = SUBREG_REG (op0);
10066	      continue;
10067	    }
10068
10069	  /* If the inner mode is narrower and we are extracting the low part,
10070	     we can treat the SUBREG as if it were a ZERO_EXTEND.  */
10071	  if (subreg_lowpart_p (op0)
10072	      && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10073	    /* Fall through */ ;
10074	  else
10075	    break;
10076
10077	  /* ... fall through ...  */
10078
10079	case ZERO_EXTEND:
10080	  if ((unsigned_comparison_p || equality_comparison_p)
10081	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10082		  <= HOST_BITS_PER_WIDE_INT)
10083	      && ((unsigned HOST_WIDE_INT) const_op
10084		  < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10085	    {
10086	      op0 = XEXP (op0, 0);
10087	      continue;
10088	    }
10089	  break;
10090
10091	case PLUS:
10092	  /* (eq (plus X A) B) -> (eq X (minus B A)).  We can only do
10093	     this for equality comparisons due to pathological cases involving
10094	     overflows.  */
10095	  if (equality_comparison_p
10096	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10097							op1, XEXP (op0, 1))))
10098	    {
10099	      op0 = XEXP (op0, 0);
10100	      op1 = tem;
10101	      continue;
10102	    }
10103
10104	  /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0.  */
10105	  if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10106	      && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10107	    {
10108	      op0 = XEXP (XEXP (op0, 0), 0);
10109	      code = (code == LT ? EQ : NE);
10110	      continue;
10111	    }
10112	  break;
10113
10114	case MINUS:
10115	  /* (eq (minus A B) C) -> (eq A (plus B C)) or
10116	     (eq B (minus A C)), whichever simplifies.  We can only do
10117	     this for equality comparisons due to pathological cases involving
10118	     overflows.  */
10119	  if (equality_comparison_p
10120	      && 0 != (tem = simplify_binary_operation (PLUS, mode,
10121							XEXP (op0, 1), op1)))
10122	    {
10123	      op0 = XEXP (op0, 0);
10124	      op1 = tem;
10125	      continue;
10126	    }
10127
10128	  if (equality_comparison_p
10129	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10130							XEXP (op0, 0), op1)))
10131	    {
10132	      op0 = XEXP (op0, 1);
10133	      op1 = tem;
10134	      continue;
10135	    }
10136
10137	  /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10138	     of bits in X minus 1, is one iff X > 0.  */
10139	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10140	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10141	      && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
10142	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10143	    {
10144	      op0 = XEXP (op0, 1);
10145	      code = (code == GE ? LE : GT);
10146	      continue;
10147	    }
10148	  break;
10149
10150	case XOR:
10151	  /* (eq (xor A B) C) -> (eq A (xor B C)).  This is a simplification
10152	     if C is zero or B is a constant.  */
10153	  if (equality_comparison_p
10154	      && 0 != (tem = simplify_binary_operation (XOR, mode,
10155							XEXP (op0, 1), op1)))
10156	    {
10157	      op0 = XEXP (op0, 0);
10158	      op1 = tem;
10159	      continue;
10160	    }
10161	  break;
10162
10163	case EQ:  case NE:
10164	case LT:  case LTU:  case LE:  case LEU:
10165	case GT:  case GTU:  case GE:  case GEU:
10166	  /* We can't do anything if OP0 is a condition code value, rather
10167	     than an actual data value.  */
10168	  if (const_op != 0
10169#ifdef HAVE_cc0
10170	      || XEXP (op0, 0) == cc0_rtx
10171#endif
10172	      || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10173	    break;
10174
10175	  /* Get the two operands being compared.  */
10176	  if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10177	    tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10178	  else
10179	    tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10180
10181	  /* Check for the cases where we simply want the result of the
10182	     earlier test or the opposite of that result.  */
10183	  if (code == NE
10184	      || (code == EQ && reversible_comparison_p (op0))
10185	      || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10186		  && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10187		  && (STORE_FLAG_VALUE
10188		      & (((HOST_WIDE_INT) 1
10189			  << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
10190		  && (code == LT
10191		      || (code == GE && reversible_comparison_p (op0)))))
10192	    {
10193	      code = (code == LT || code == NE
10194		      ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
10195	      op0 = tem, op1 = tem1;
10196	      continue;
10197	    }
10198	  break;
10199
10200	case IOR:
10201	  /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10202	     iff X <= 0.  */
10203	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10204	      && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10205	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10206	    {
10207	      op0 = XEXP (op0, 1);
10208	      code = (code == GE ? GT : LE);
10209	      continue;
10210	    }
10211	  break;
10212
10213	case AND:
10214	  /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1).  This
10215	     will be converted to a ZERO_EXTRACT later.  */
10216	  if (const_op == 0 && equality_comparison_p
10217	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
10218	      && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10219	    {
10220	      op0 = simplify_and_const_int
10221		(op0, mode, gen_rtx_combine (LSHIFTRT, mode,
10222					     XEXP (op0, 1),
10223					     XEXP (XEXP (op0, 0), 1)),
10224		 (HOST_WIDE_INT) 1);
10225	      continue;
10226	    }
10227
10228	  /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10229	     zero and X is a comparison and C1 and C2 describe only bits set
10230	     in STORE_FLAG_VALUE, we can compare with X.  */
10231	  if (const_op == 0 && equality_comparison_p
10232	      && mode_width <= HOST_BITS_PER_WIDE_INT
10233	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10234	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10235	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10236	      && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
10237	      && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
10238	    {
10239	      mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10240		      << INTVAL (XEXP (XEXP (op0, 0), 1)));
10241	      if ((~ STORE_FLAG_VALUE & mask) == 0
10242		  && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10243		      || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10244			  && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10245		{
10246		  op0 = XEXP (XEXP (op0, 0), 0);
10247		  continue;
10248		}
10249	    }
10250
10251	  /* If we are doing an equality comparison of an AND of a bit equal
10252	     to the sign bit, replace this with a LT or GE comparison of
10253	     the underlying value.  */
10254	  if (equality_comparison_p
10255	      && const_op == 0
10256	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10257	      && mode_width <= HOST_BITS_PER_WIDE_INT
10258	      && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10259		  == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
10260	    {
10261	      op0 = XEXP (op0, 0);
10262	      code = (code == EQ ? GE : LT);
10263	      continue;
10264	    }
10265
10266	  /* If this AND operation is really a ZERO_EXTEND from a narrower
10267	     mode, the constant fits within that mode, and this is either an
10268	     equality or unsigned comparison, try to do this comparison in
10269	     the narrower mode.  */
10270	  if ((equality_comparison_p || unsigned_comparison_p)
10271	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10272	      && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10273				   & GET_MODE_MASK (mode))
10274				  + 1)) >= 0
10275	      && const_op >> i == 0
10276	      && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10277	    {
10278	      op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10279	      continue;
10280	    }
10281
10282	  /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
10283	     in both M1 and M2 and the SUBREG is either paradoxical or
10284	     represents the low part, permute the SUBREG and the AND and
10285	     try again.  */
10286	  if (GET_CODE (XEXP (op0, 0)) == SUBREG
10287	      && ((mode_width
10288		   >= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10289#ifdef WORD_REGISTER_OPERATIONS
10290		  || subreg_lowpart_p (XEXP (op0, 0))
10291#endif
10292		  )
10293#ifndef WORD_REGISTER_OPERATIONS
10294	      /* It is unsafe to commute the AND into the SUBREG if the SUBREG
10295		 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
10296		 As originally written the upper bits have a defined value
10297		 due to the AND operation.  However, if we commute the AND
10298		 inside the SUBREG then they no longer have defined values
10299		 and the meaning of the code has been changed.  */
10300	      && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
10301		  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10302#endif
10303	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10304	      && mode_width <= HOST_BITS_PER_WIDE_INT
10305	      && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10306		  <= HOST_BITS_PER_WIDE_INT)
10307	      && (INTVAL (XEXP (op0, 1)) & ~ mask) == 0
10308	      && 0 == (~ GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10309		       & INTVAL (XEXP (op0, 1)))
10310	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) != mask
10311	      && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10312		  != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10313
10314	    {
10315	      op0
10316		= gen_lowpart_for_combine
10317		  (mode,
10318		   gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
10319			       SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
10320	      continue;
10321	    }
10322
10323	  break;
10324
10325	case ASHIFT:
10326	  /* If we have (compare (ashift FOO N) (const_int C)) and
10327	     the high order N bits of FOO (N+1 if an inequality comparison)
10328	     are known to be zero, we can do this by comparing FOO with C
10329	     shifted right N bits so long as the low-order N bits of C are
10330	     zero.  */
10331	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10332	      && INTVAL (XEXP (op0, 1)) >= 0
10333	      && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
10334		  < HOST_BITS_PER_WIDE_INT)
10335	      && ((const_op
10336		   & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
10337	      && mode_width <= HOST_BITS_PER_WIDE_INT
10338	      && (nonzero_bits (XEXP (op0, 0), mode)
10339		  & ~ (mask >> (INTVAL (XEXP (op0, 1))
10340				+ ! equality_comparison_p))) == 0)
10341	    {
10342	      const_op >>= INTVAL (XEXP (op0, 1));
10343	      op1 = GEN_INT (const_op);
10344	      op0 = XEXP (op0, 0);
10345	      continue;
10346	    }
10347
10348	  /* If we are doing a sign bit comparison, it means we are testing
10349	     a particular bit.  Convert it to the appropriate AND.  */
10350	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10351	      && mode_width <= HOST_BITS_PER_WIDE_INT)
10352	    {
10353	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10354					    ((HOST_WIDE_INT) 1
10355					     << (mode_width - 1
10356						 - INTVAL (XEXP (op0, 1)))));
10357	      code = (code == LT ? NE : EQ);
10358	      continue;
10359	    }
10360
10361	  /* If this an equality comparison with zero and we are shifting
10362	     the low bit to the sign bit, we can convert this to an AND of the
10363	     low-order bit.  */
10364	  if (const_op == 0 && equality_comparison_p
10365	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10366	      && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10367	    {
10368	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10369					    (HOST_WIDE_INT) 1);
10370	      continue;
10371	    }
10372	  break;
10373
10374	case ASHIFTRT:
10375	  /* If this is an equality comparison with zero, we can do this
10376	     as a logical shift, which might be much simpler.  */
10377	  if (equality_comparison_p && const_op == 0
10378	      && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10379	    {
10380	      op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10381					  XEXP (op0, 0),
10382					  INTVAL (XEXP (op0, 1)));
10383	      continue;
10384	    }
10385
10386	  /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10387	     do the comparison in a narrower mode.  */
10388	  if (! unsigned_comparison_p
10389	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10390	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
10391	      && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10392	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10393					 MODE_INT, 1)) != BLKmode
10394	      && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
10395		  || ((unsigned HOST_WIDE_INT) - const_op
10396		      <= GET_MODE_MASK (tmode))))
10397	    {
10398	      op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10399	      continue;
10400	    }
10401
10402	  /* ... fall through ...  */
10403	case LSHIFTRT:
10404	  /* If we have (compare (xshiftrt FOO N) (const_int C)) and
10405	     the low order N bits of FOO are known to be zero, we can do this
10406	     by comparing FOO with C shifted left N bits so long as no
10407	     overflow occurs.  */
10408	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10409	      && INTVAL (XEXP (op0, 1)) >= 0
10410	      && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10411	      && mode_width <= HOST_BITS_PER_WIDE_INT
10412	      && (nonzero_bits (XEXP (op0, 0), mode)
10413		  & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
10414	      && (const_op == 0
10415		  || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
10416		      < mode_width)))
10417	    {
10418	      const_op <<= INTVAL (XEXP (op0, 1));
10419	      op1 = GEN_INT (const_op);
10420	      op0 = XEXP (op0, 0);
10421	      continue;
10422	    }
10423
10424	  /* If we are using this shift to extract just the sign bit, we
10425	     can replace this with an LT or GE comparison.  */
10426	  if (const_op == 0
10427	      && (equality_comparison_p || sign_bit_comparison_p)
10428	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10429	      && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10430	    {
10431	      op0 = XEXP (op0, 0);
10432	      code = (code == NE || code == GT ? LT : GE);
10433	      continue;
10434	    }
10435	  break;
10436
10437	default:
10438	  break;
10439	}
10440
10441      break;
10442    }
10443
10444  /* Now make any compound operations involved in this comparison.  Then,
10445     check for an outmost SUBREG on OP0 that is not doing anything or is
10446     paradoxical.  The latter case can only occur when it is known that the
10447     "extra" bits will be zero.  Therefore, it is safe to remove the SUBREG.
10448     We can never remove a SUBREG for a non-equality comparison because the
10449     sign bit is in a different place in the underlying object.  */
10450
10451  op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10452  op1 = make_compound_operation (op1, SET);
10453
10454  if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10455      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10456      && (code == NE || code == EQ)
10457      && ((GET_MODE_SIZE (GET_MODE (op0))
10458	   > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
10459    {
10460      op0 = SUBREG_REG (op0);
10461      op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
10462    }
10463
10464  else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10465	   && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10466	   && (code == NE || code == EQ)
10467	   && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10468	       <= HOST_BITS_PER_WIDE_INT)
10469	   && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
10470	       & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
10471	   && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
10472					      op1),
10473	       (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
10474		& ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
10475    op0 = SUBREG_REG (op0), op1 = tem;
10476
10477  /* We now do the opposite procedure: Some machines don't have compare
10478     insns in all modes.  If OP0's mode is an integer mode smaller than a
10479     word and we can't do a compare in that mode, see if there is a larger
10480     mode for which we can do the compare.  There are a number of cases in
10481     which we can use the wider mode.  */
10482
10483  mode = GET_MODE (op0);
10484  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10485      && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10486      && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
10487    for (tmode = GET_MODE_WIDER_MODE (mode);
10488	 (tmode != VOIDmode
10489	  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
10490	 tmode = GET_MODE_WIDER_MODE (tmode))
10491      if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
10492	{
10493	  /* If the only nonzero bits in OP0 and OP1 are those in the
10494	     narrower mode and this is an equality or unsigned comparison,
10495	     we can use the wider mode.  Similarly for sign-extended
10496	     values, in which case it is true for all comparisons.  */
10497	  if (((code == EQ || code == NE
10498		|| code == GEU || code == GTU || code == LEU || code == LTU)
10499	       && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
10500	       && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
10501	      || ((num_sign_bit_copies (op0, tmode)
10502		   > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
10503		  && (num_sign_bit_copies (op1, tmode)
10504		      > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
10505	    {
10506	      op0 = gen_lowpart_for_combine (tmode, op0);
10507	      op1 = gen_lowpart_for_combine (tmode, op1);
10508	      break;
10509	    }
10510
10511	  /* If this is a test for negative, we can make an explicit
10512	     test of the sign bit.  */
10513
10514	  if (op1 == const0_rtx && (code == LT || code == GE)
10515	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10516	    {
10517	      op0 = gen_binary (AND, tmode,
10518				gen_lowpart_for_combine (tmode, op0),
10519				GEN_INT ((HOST_WIDE_INT) 1
10520					 << (GET_MODE_BITSIZE (mode) - 1)));
10521	      code = (code == LT) ? NE : EQ;
10522	      break;
10523	    }
10524	}
10525
10526#ifdef CANONICALIZE_COMPARISON
10527  /* If this machine only supports a subset of valid comparisons, see if we
10528     can convert an unsupported one into a supported one.  */
10529  CANONICALIZE_COMPARISON (code, op0, op1);
10530#endif
10531
10532  *pop0 = op0;
10533  *pop1 = op1;
10534
10535  return code;
10536}
10537
10538/* Return 1 if we know that X, a comparison operation, is not operating
10539   on a floating-point value or is EQ or NE, meaning that we can safely
10540   reverse it.  */
10541
10542static int
10543reversible_comparison_p (x)
10544     rtx x;
10545{
10546  if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
10547      || flag_fast_math
10548      || GET_CODE (x) == NE || GET_CODE (x) == EQ)
10549    return 1;
10550
10551  switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
10552    {
10553    case MODE_INT:
10554    case MODE_PARTIAL_INT:
10555    case MODE_COMPLEX_INT:
10556      return 1;
10557
10558    case MODE_CC:
10559      /* If the mode of the condition codes tells us that this is safe,
10560	 we need look no further.  */
10561      if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
10562	return 1;
10563
10564      /* Otherwise try and find where the condition codes were last set and
10565	 use that.  */
10566      x = get_last_value (XEXP (x, 0));
10567      return (x && GET_CODE (x) == COMPARE
10568	      && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
10569
10570    default:
10571      return 0;
10572    }
10573}
10574
10575/* Utility function for following routine.  Called when X is part of a value
10576   being stored into reg_last_set_value.  Sets reg_last_set_table_tick
10577   for each register mentioned.  Similar to mention_regs in cse.c  */
10578
10579static void
10580update_table_tick (x)
10581     rtx x;
10582{
10583  register enum rtx_code code = GET_CODE (x);
10584  register char *fmt = GET_RTX_FORMAT (code);
10585  register int i;
10586
10587  if (code == REG)
10588    {
10589      int regno = REGNO (x);
10590      int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10591			      ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10592
10593      for (i = regno; i < endregno; i++)
10594	reg_last_set_table_tick[i] = label_tick;
10595
10596      return;
10597    }
10598
10599  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10600    /* Note that we can't have an "E" in values stored; see
10601       get_last_value_validate.  */
10602    if (fmt[i] == 'e')
10603      update_table_tick (XEXP (x, i));
10604}
10605
10606/* Record that REG is set to VALUE in insn INSN.  If VALUE is zero, we
10607   are saying that the register is clobbered and we no longer know its
10608   value.  If INSN is zero, don't update reg_last_set; this is only permitted
10609   with VALUE also zero and is used to invalidate the register.  */
10610
10611static void
10612record_value_for_reg (reg, insn, value)
10613     rtx reg;
10614     rtx insn;
10615     rtx value;
10616{
10617  int regno = REGNO (reg);
10618  int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10619			  ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
10620  int i;
10621
10622  /* If VALUE contains REG and we have a previous value for REG, substitute
10623     the previous value.  */
10624  if (value && insn && reg_overlap_mentioned_p (reg, value))
10625    {
10626      rtx tem;
10627
10628      /* Set things up so get_last_value is allowed to see anything set up to
10629	 our insn.  */
10630      subst_low_cuid = INSN_CUID (insn);
10631      tem = get_last_value (reg);
10632
10633      if (tem)
10634	value = replace_rtx (copy_rtx (value), reg, tem);
10635    }
10636
10637  /* For each register modified, show we don't know its value, that
10638     we don't know about its bitwise content, that its value has been
10639     updated, and that we don't know the location of the death of the
10640     register.  */
10641  for (i = regno; i < endregno; i ++)
10642    {
10643      if (insn)
10644	reg_last_set[i] = insn;
10645      reg_last_set_value[i] = 0;
10646      reg_last_set_mode[i] = 0;
10647      reg_last_set_nonzero_bits[i] = 0;
10648      reg_last_set_sign_bit_copies[i] = 0;
10649      reg_last_death[i] = 0;
10650    }
10651
10652  /* Mark registers that are being referenced in this value.  */
10653  if (value)
10654    update_table_tick (value);
10655
10656  /* Now update the status of each register being set.
10657     If someone is using this register in this block, set this register
10658     to invalid since we will get confused between the two lives in this
10659     basic block.  This makes using this register always invalid.  In cse, we
10660     scan the table to invalidate all entries using this register, but this
10661     is too much work for us.  */
10662
10663  for (i = regno; i < endregno; i++)
10664    {
10665      reg_last_set_label[i] = label_tick;
10666      if (value && reg_last_set_table_tick[i] == label_tick)
10667	reg_last_set_invalid[i] = 1;
10668      else
10669	reg_last_set_invalid[i] = 0;
10670    }
10671
10672  /* The value being assigned might refer to X (like in "x++;").  In that
10673     case, we must replace it with (clobber (const_int 0)) to prevent
10674     infinite loops.  */
10675  if (value && ! get_last_value_validate (&value, insn,
10676					  reg_last_set_label[regno], 0))
10677    {
10678      value = copy_rtx (value);
10679      if (! get_last_value_validate (&value, insn,
10680				     reg_last_set_label[regno], 1))
10681	value = 0;
10682    }
10683
10684  /* For the main register being modified, update the value, the mode, the
10685     nonzero bits, and the number of sign bit copies.  */
10686
10687  reg_last_set_value[regno] = value;
10688
10689  if (value)
10690    {
10691      subst_low_cuid = INSN_CUID (insn);
10692      reg_last_set_mode[regno] = GET_MODE (reg);
10693      reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
10694      reg_last_set_sign_bit_copies[regno]
10695	= num_sign_bit_copies (value, GET_MODE (reg));
10696    }
10697}
10698
10699/* Used for communication between the following two routines.  */
10700static rtx record_dead_insn;
10701
10702/* Called via note_stores from record_dead_and_set_regs to handle one
10703   SET or CLOBBER in an insn.  */
10704
10705static void
10706record_dead_and_set_regs_1 (dest, setter)
10707     rtx dest, setter;
10708{
10709  if (GET_CODE (dest) == SUBREG)
10710    dest = SUBREG_REG (dest);
10711
10712  if (GET_CODE (dest) == REG)
10713    {
10714      /* If we are setting the whole register, we know its value.  Otherwise
10715	 show that we don't know the value.  We can handle SUBREG in
10716	 some cases.  */
10717      if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
10718	record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
10719      else if (GET_CODE (setter) == SET
10720	       && GET_CODE (SET_DEST (setter)) == SUBREG
10721	       && SUBREG_REG (SET_DEST (setter)) == dest
10722	       && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
10723	       && subreg_lowpart_p (SET_DEST (setter)))
10724	record_value_for_reg (dest, record_dead_insn,
10725			      gen_lowpart_for_combine (GET_MODE (dest),
10726						       SET_SRC (setter)));
10727      else
10728	record_value_for_reg (dest, record_dead_insn, NULL_RTX);
10729    }
10730  else if (GET_CODE (dest) == MEM
10731	   /* Ignore pushes, they clobber nothing.  */
10732	   && ! push_operand (dest, GET_MODE (dest)))
10733    mem_last_set = INSN_CUID (record_dead_insn);
10734}
10735
10736/* Update the records of when each REG was most recently set or killed
10737   for the things done by INSN.  This is the last thing done in processing
10738   INSN in the combiner loop.
10739
10740   We update reg_last_set, reg_last_set_value, reg_last_set_mode,
10741   reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
10742   and also the similar information mem_last_set (which insn most recently
10743   modified memory) and last_call_cuid (which insn was the most recent
10744   subroutine call).  */
10745
10746static void
10747record_dead_and_set_regs (insn)
10748     rtx insn;
10749{
10750  register rtx link;
10751  int i;
10752
10753  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
10754    {
10755      if (REG_NOTE_KIND (link) == REG_DEAD
10756	  && GET_CODE (XEXP (link, 0)) == REG)
10757	{
10758	  int regno = REGNO (XEXP (link, 0));
10759	  int endregno
10760	    = regno + (regno < FIRST_PSEUDO_REGISTER
10761		       ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
10762		       : 1);
10763
10764	  for (i = regno; i < endregno; i++)
10765	    reg_last_death[i] = insn;
10766	}
10767      else if (REG_NOTE_KIND (link) == REG_INC)
10768	record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
10769    }
10770
10771  if (GET_CODE (insn) == CALL_INSN)
10772    {
10773      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
10774	if (call_used_regs[i])
10775	  {
10776	    reg_last_set_value[i] = 0;
10777	    reg_last_set_mode[i] = 0;
10778	    reg_last_set_nonzero_bits[i] = 0;
10779	    reg_last_set_sign_bit_copies[i] = 0;
10780	    reg_last_death[i] = 0;
10781	  }
10782
10783      last_call_cuid = mem_last_set = INSN_CUID (insn);
10784    }
10785
10786  record_dead_insn = insn;
10787  note_stores (PATTERN (insn), record_dead_and_set_regs_1);
10788}
10789
10790/* Utility routine for the following function.  Verify that all the registers
10791   mentioned in *LOC are valid when *LOC was part of a value set when
10792   label_tick == TICK.  Return 0 if some are not.
10793
10794   If REPLACE is non-zero, replace the invalid reference with
10795   (clobber (const_int 0)) and return 1.  This replacement is useful because
10796   we often can get useful information about the form of a value (e.g., if
10797   it was produced by a shift that always produces -1 or 0) even though
10798   we don't know exactly what registers it was produced from.  */
10799
10800static int
10801get_last_value_validate (loc, insn, tick, replace)
10802     rtx *loc;
10803     rtx insn;
10804     int tick;
10805     int replace;
10806{
10807  rtx x = *loc;
10808  char *fmt = GET_RTX_FORMAT (GET_CODE (x));
10809  int len = GET_RTX_LENGTH (GET_CODE (x));
10810  int i;
10811
10812  if (GET_CODE (x) == REG)
10813    {
10814      int regno = REGNO (x);
10815      int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10816			      ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10817      int j;
10818
10819      for (j = regno; j < endregno; j++)
10820	if (reg_last_set_invalid[j]
10821	    /* If this is a pseudo-register that was only set once, it is
10822	       always valid.  */
10823	    || (! (regno >= FIRST_PSEUDO_REGISTER && REG_N_SETS (regno) == 1)
10824		&& reg_last_set_label[j] > tick))
10825	  {
10826	    if (replace)
10827	      *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10828	    return replace;
10829	  }
10830
10831      return 1;
10832    }
10833  /* If this is a memory reference, make sure that there were
10834     no stores after it that might have clobbered the value.  We don't
10835     have alias info, so we assume any store invalidates it.  */
10836  else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
10837	   && INSN_CUID (insn) <= mem_last_set)
10838    {
10839      if (replace)
10840	*loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10841      return replace;
10842    }
10843
10844  for (i = 0; i < len; i++)
10845    if ((fmt[i] == 'e'
10846	 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
10847	/* Don't bother with these.  They shouldn't occur anyway.  */
10848	|| fmt[i] == 'E')
10849      return 0;
10850
10851  /* If we haven't found a reason for it to be invalid, it is valid.  */
10852  return 1;
10853}
10854
10855/* Get the last value assigned to X, if known.  Some registers
10856   in the value may be replaced with (clobber (const_int 0)) if their value
10857   is known longer known reliably.  */
10858
10859static rtx
10860get_last_value (x)
10861     rtx x;
10862{
10863  int regno;
10864  rtx value;
10865
10866  /* If this is a non-paradoxical SUBREG, get the value of its operand and
10867     then convert it to the desired mode.  If this is a paradoxical SUBREG,
10868     we cannot predict what values the "extra" bits might have.  */
10869  if (GET_CODE (x) == SUBREG
10870      && subreg_lowpart_p (x)
10871      && (GET_MODE_SIZE (GET_MODE (x))
10872	  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
10873      && (value = get_last_value (SUBREG_REG (x))) != 0)
10874    return gen_lowpart_for_combine (GET_MODE (x), value);
10875
10876  if (GET_CODE (x) != REG)
10877    return 0;
10878
10879  regno = REGNO (x);
10880  value = reg_last_set_value[regno];
10881
10882  /* If we don't have a value or if it isn't for this basic block,
10883     return 0.  */
10884
10885  if (value == 0
10886      || (REG_N_SETS (regno) != 1
10887	  && reg_last_set_label[regno] != label_tick))
10888    return 0;
10889
10890  /* If the value was set in a later insn than the ones we are processing,
10891     we can't use it even if the register was only set once.  */
10892  if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
10893    return 0;
10894
10895  /* If the value has all its registers valid, return it.  */
10896  if (get_last_value_validate (&value, reg_last_set[regno],
10897			       reg_last_set_label[regno], 0))
10898    return value;
10899
10900  /* Otherwise, make a copy and replace any invalid register with
10901     (clobber (const_int 0)).  If that fails for some reason, return 0.  */
10902
10903  value = copy_rtx (value);
10904  if (get_last_value_validate (&value, reg_last_set[regno],
10905			       reg_last_set_label[regno], 1))
10906    return value;
10907
10908  return 0;
10909}
10910
10911/* Return nonzero if expression X refers to a REG or to memory
10912   that is set in an instruction more recent than FROM_CUID.  */
10913
10914static int
10915use_crosses_set_p (x, from_cuid)
10916     register rtx x;
10917     int from_cuid;
10918{
10919  register char *fmt;
10920  register int i;
10921  register enum rtx_code code = GET_CODE (x);
10922
10923  if (code == REG)
10924    {
10925      register int regno = REGNO (x);
10926      int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
10927			    ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10928
10929#ifdef PUSH_ROUNDING
10930      /* Don't allow uses of the stack pointer to be moved,
10931	 because we don't know whether the move crosses a push insn.  */
10932      if (regno == STACK_POINTER_REGNUM)
10933	return 1;
10934#endif
10935      for (;regno < endreg; regno++)
10936	if (reg_last_set[regno]
10937	    && INSN_CUID (reg_last_set[regno]) > from_cuid)
10938	  return 1;
10939      return 0;
10940    }
10941
10942  if (code == MEM && mem_last_set > from_cuid)
10943    return 1;
10944
10945  fmt = GET_RTX_FORMAT (code);
10946
10947  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10948    {
10949      if (fmt[i] == 'E')
10950	{
10951	  register int j;
10952	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10953	    if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
10954	      return 1;
10955	}
10956      else if (fmt[i] == 'e'
10957	       && use_crosses_set_p (XEXP (x, i), from_cuid))
10958	return 1;
10959    }
10960  return 0;
10961}
10962
10963/* Define three variables used for communication between the following
10964   routines.  */
10965
10966static int reg_dead_regno, reg_dead_endregno;
10967static int reg_dead_flag;
10968
10969/* Function called via note_stores from reg_dead_at_p.
10970
10971   If DEST is within [reg_dead_regno, reg_dead_endregno), set
10972   reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET.  */
10973
10974static void
10975reg_dead_at_p_1 (dest, x)
10976     rtx dest;
10977     rtx x;
10978{
10979  int regno, endregno;
10980
10981  if (GET_CODE (dest) != REG)
10982    return;
10983
10984  regno = REGNO (dest);
10985  endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10986		      ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
10987
10988  if (reg_dead_endregno > regno && reg_dead_regno < endregno)
10989    reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
10990}
10991
10992/* Return non-zero if REG is known to be dead at INSN.
10993
10994   We scan backwards from INSN.  If we hit a REG_DEAD note or a CLOBBER
10995   referencing REG, it is dead.  If we hit a SET referencing REG, it is
10996   live.  Otherwise, see if it is live or dead at the start of the basic
10997   block we are in.  Hard regs marked as being live in NEWPAT_USED_REGS
10998   must be assumed to be always live.  */
10999
11000static int
11001reg_dead_at_p (reg, insn)
11002     rtx reg;
11003     rtx insn;
11004{
11005  int block, i;
11006
11007  /* Set variables for reg_dead_at_p_1.  */
11008  reg_dead_regno = REGNO (reg);
11009  reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11010					? HARD_REGNO_NREGS (reg_dead_regno,
11011							    GET_MODE (reg))
11012					: 1);
11013
11014  reg_dead_flag = 0;
11015
11016  /* Check that reg isn't mentioned in NEWPAT_USED_REGS.  */
11017  if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11018    {
11019      for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11020	if (TEST_HARD_REG_BIT (newpat_used_regs, i))
11021	  return 0;
11022    }
11023
11024  /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11025     beginning of function.  */
11026  for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
11027       insn = prev_nonnote_insn (insn))
11028    {
11029      note_stores (PATTERN (insn), reg_dead_at_p_1);
11030      if (reg_dead_flag)
11031	return reg_dead_flag == 1 ? 1 : 0;
11032
11033      if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11034	return 1;
11035    }
11036
11037  /* Get the basic block number that we were in.  */
11038  if (insn == 0)
11039    block = 0;
11040  else
11041    {
11042      for (block = 0; block < n_basic_blocks; block++)
11043	if (insn == BLOCK_HEAD (block))
11044	  break;
11045
11046      if (block == n_basic_blocks)
11047	return 0;
11048    }
11049
11050  for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11051    if (REGNO_REG_SET_P (BASIC_BLOCK (block)->global_live_at_start, i))
11052      return 0;
11053
11054  return 1;
11055}
11056
11057/* Note hard registers in X that are used.  This code is similar to
11058   that in flow.c, but much simpler since we don't care about pseudos.  */
11059
11060static void
11061mark_used_regs_combine (x)
11062     rtx x;
11063{
11064  register RTX_CODE code = GET_CODE (x);
11065  register int regno;
11066  int i;
11067
11068  switch (code)
11069    {
11070    case LABEL_REF:
11071    case SYMBOL_REF:
11072    case CONST_INT:
11073    case CONST:
11074    case CONST_DOUBLE:
11075    case PC:
11076    case ADDR_VEC:
11077    case ADDR_DIFF_VEC:
11078    case ASM_INPUT:
11079#ifdef HAVE_cc0
11080    /* CC0 must die in the insn after it is set, so we don't need to take
11081       special note of it here.  */
11082    case CC0:
11083#endif
11084      return;
11085
11086    case CLOBBER:
11087      /* If we are clobbering a MEM, mark any hard registers inside the
11088	 address as used.  */
11089      if (GET_CODE (XEXP (x, 0)) == MEM)
11090	mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11091      return;
11092
11093    case REG:
11094      regno = REGNO (x);
11095      /* A hard reg in a wide mode may really be multiple registers.
11096	 If so, mark all of them just like the first.  */
11097      if (regno < FIRST_PSEUDO_REGISTER)
11098	{
11099	  /* None of this applies to the stack, frame or arg pointers */
11100	  if (regno == STACK_POINTER_REGNUM
11101#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11102	      || regno == HARD_FRAME_POINTER_REGNUM
11103#endif
11104#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11105	      || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11106#endif
11107	      || regno == FRAME_POINTER_REGNUM)
11108	    return;
11109
11110	  i = HARD_REGNO_NREGS (regno, GET_MODE (x));
11111	  while (i-- > 0)
11112	    SET_HARD_REG_BIT (newpat_used_regs, regno + i);
11113	}
11114      return;
11115
11116    case SET:
11117      {
11118	/* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11119	   the address.  */
11120	register rtx testreg = SET_DEST (x);
11121
11122	while (GET_CODE (testreg) == SUBREG
11123	       || GET_CODE (testreg) == ZERO_EXTRACT
11124	       || GET_CODE (testreg) == SIGN_EXTRACT
11125	       || GET_CODE (testreg) == STRICT_LOW_PART)
11126	  testreg = XEXP (testreg, 0);
11127
11128	if (GET_CODE (testreg) == MEM)
11129	  mark_used_regs_combine (XEXP (testreg, 0));
11130
11131	mark_used_regs_combine (SET_SRC (x));
11132      }
11133      return;
11134
11135    default:
11136      break;
11137    }
11138
11139  /* Recursively scan the operands of this expression.  */
11140
11141  {
11142    register char *fmt = GET_RTX_FORMAT (code);
11143
11144    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11145      {
11146        if (fmt[i] == 'e')
11147	  mark_used_regs_combine (XEXP (x, i));
11148        else if (fmt[i] == 'E')
11149          {
11150            register int j;
11151
11152            for (j = 0; j < XVECLEN (x, i); j++)
11153              mark_used_regs_combine (XVECEXP (x, i, j));
11154          }
11155      }
11156  }
11157}
11158
11159
11160/* Remove register number REGNO from the dead registers list of INSN.
11161
11162   Return the note used to record the death, if there was one.  */
11163
11164rtx
11165remove_death (regno, insn)
11166     int regno;
11167     rtx insn;
11168{
11169  register rtx note = find_regno_note (insn, REG_DEAD, regno);
11170
11171  if (note)
11172    {
11173      REG_N_DEATHS (regno)--;
11174      remove_note (insn, note);
11175    }
11176
11177  return note;
11178}
11179
11180/* For each register (hardware or pseudo) used within expression X, if its
11181   death is in an instruction with cuid between FROM_CUID (inclusive) and
11182   TO_INSN (exclusive), put a REG_DEAD note for that register in the
11183   list headed by PNOTES.
11184
11185   That said, don't move registers killed by maybe_kill_insn.
11186
11187   This is done when X is being merged by combination into TO_INSN.  These
11188   notes will then be distributed as needed.  */
11189
11190static void
11191move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
11192     rtx x;
11193     rtx maybe_kill_insn;
11194     int from_cuid;
11195     rtx to_insn;
11196     rtx *pnotes;
11197{
11198  register char *fmt;
11199  register int len, i;
11200  register enum rtx_code code = GET_CODE (x);
11201
11202  if (code == REG)
11203    {
11204      register int regno = REGNO (x);
11205      register rtx where_dead = reg_last_death[regno];
11206      register rtx before_dead, after_dead;
11207
11208      /* Don't move the register if it gets killed in between from and to */
11209      if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11210	  && !reg_referenced_p (x, maybe_kill_insn))
11211	return;
11212
11213      /* WHERE_DEAD could be a USE insn made by combine, so first we
11214	 make sure that we have insns with valid INSN_CUID values.  */
11215      before_dead = where_dead;
11216      while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11217	before_dead = PREV_INSN (before_dead);
11218      after_dead = where_dead;
11219      while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11220	after_dead = NEXT_INSN (after_dead);
11221
11222      if (before_dead && after_dead
11223	  && INSN_CUID (before_dead) >= from_cuid
11224	  && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11225	      || (where_dead != after_dead
11226		  && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
11227	{
11228	  rtx note = remove_death (regno, where_dead);
11229
11230	  /* It is possible for the call above to return 0.  This can occur
11231	     when reg_last_death points to I2 or I1 that we combined with.
11232	     In that case make a new note.
11233
11234	     We must also check for the case where X is a hard register
11235	     and NOTE is a death note for a range of hard registers
11236	     including X.  In that case, we must put REG_DEAD notes for
11237	     the remaining registers in place of NOTE.  */
11238
11239	  if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11240	      && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11241		  > GET_MODE_SIZE (GET_MODE (x))))
11242	    {
11243	      int deadregno = REGNO (XEXP (note, 0));
11244	      int deadend
11245		= (deadregno + HARD_REGNO_NREGS (deadregno,
11246						 GET_MODE (XEXP (note, 0))));
11247	      int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11248	      int i;
11249
11250	      for (i = deadregno; i < deadend; i++)
11251		if (i < regno || i >= ourend)
11252		  REG_NOTES (where_dead)
11253		    = gen_rtx_EXPR_LIST (REG_DEAD,
11254					 gen_rtx_REG (reg_raw_mode[i], i),
11255					 REG_NOTES (where_dead));
11256	    }
11257	  /* If we didn't find any note, or if we found a REG_DEAD note that
11258	     covers only part of the given reg, and we have a multi-reg hard
11259	     register, then to be safe we must check for REG_DEAD notes
11260	     for each register other than the first.  They could have
11261	     their own REG_DEAD notes lying around.  */
11262	  else if ((note == 0
11263		    || (note != 0
11264			&& (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11265			    < GET_MODE_SIZE (GET_MODE (x)))))
11266		   && regno < FIRST_PSEUDO_REGISTER
11267		   && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
11268	    {
11269	      int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11270	      int i, offset;
11271	      rtx oldnotes = 0;
11272
11273	      if (note)
11274		offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
11275	      else
11276		offset = 1;
11277
11278	      for (i = regno + offset; i < ourend; i++)
11279		move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
11280			     maybe_kill_insn, from_cuid, to_insn, &oldnotes);
11281	    }
11282
11283	  if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
11284	    {
11285	      XEXP (note, 1) = *pnotes;
11286	      *pnotes = note;
11287	    }
11288	  else
11289	    *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
11290
11291	  REG_N_DEATHS (regno)++;
11292	}
11293
11294      return;
11295    }
11296
11297  else if (GET_CODE (x) == SET)
11298    {
11299      rtx dest = SET_DEST (x);
11300
11301      move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
11302
11303      /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11304	 that accesses one word of a multi-word item, some
11305	 piece of everything register in the expression is used by
11306	 this insn, so remove any old death.  */
11307
11308      if (GET_CODE (dest) == ZERO_EXTRACT
11309	  || GET_CODE (dest) == STRICT_LOW_PART
11310	  || (GET_CODE (dest) == SUBREG
11311	      && (((GET_MODE_SIZE (GET_MODE (dest))
11312		    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11313		  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11314		       + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
11315	{
11316	  move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
11317	  return;
11318	}
11319
11320      /* If this is some other SUBREG, we know it replaces the entire
11321	 value, so use that as the destination.  */
11322      if (GET_CODE (dest) == SUBREG)
11323	dest = SUBREG_REG (dest);
11324
11325      /* If this is a MEM, adjust deaths of anything used in the address.
11326	 For a REG (the only other possibility), the entire value is
11327	 being replaced so the old value is not used in this insn.  */
11328
11329      if (GET_CODE (dest) == MEM)
11330	move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11331		     to_insn, pnotes);
11332      return;
11333    }
11334
11335  else if (GET_CODE (x) == CLOBBER)
11336    return;
11337
11338  len = GET_RTX_LENGTH (code);
11339  fmt = GET_RTX_FORMAT (code);
11340
11341  for (i = 0; i < len; i++)
11342    {
11343      if (fmt[i] == 'E')
11344	{
11345	  register int j;
11346	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11347	    move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11348			 to_insn, pnotes);
11349	}
11350      else if (fmt[i] == 'e')
11351	move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
11352    }
11353}
11354
11355/* Return 1 if X is the target of a bit-field assignment in BODY, the
11356   pattern of an insn.  X must be a REG.  */
11357
11358static int
11359reg_bitfield_target_p (x, body)
11360     rtx x;
11361     rtx body;
11362{
11363  int i;
11364
11365  if (GET_CODE (body) == SET)
11366    {
11367      rtx dest = SET_DEST (body);
11368      rtx target;
11369      int regno, tregno, endregno, endtregno;
11370
11371      if (GET_CODE (dest) == ZERO_EXTRACT)
11372	target = XEXP (dest, 0);
11373      else if (GET_CODE (dest) == STRICT_LOW_PART)
11374	target = SUBREG_REG (XEXP (dest, 0));
11375      else
11376	return 0;
11377
11378      if (GET_CODE (target) == SUBREG)
11379	target = SUBREG_REG (target);
11380
11381      if (GET_CODE (target) != REG)
11382	return 0;
11383
11384      tregno = REGNO (target), regno = REGNO (x);
11385      if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11386	return target == x;
11387
11388      endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
11389      endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11390
11391      return endregno > tregno && regno < endtregno;
11392    }
11393
11394  else if (GET_CODE (body) == PARALLEL)
11395    for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
11396      if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
11397	return 1;
11398
11399  return 0;
11400}
11401
11402/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11403   as appropriate.  I3 and I2 are the insns resulting from the combination
11404   insns including FROM (I2 may be zero).
11405
11406   ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11407   not need REG_DEAD notes because they are being substituted for.  This
11408   saves searching in the most common cases.
11409
11410   Each note in the list is either ignored or placed on some insns, depending
11411   on the type of note.  */
11412
11413static void
11414distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
11415     rtx notes;
11416     rtx from_insn;
11417     rtx i3, i2;
11418     rtx elim_i2, elim_i1;
11419{
11420  rtx note, next_note;
11421  rtx tem;
11422
11423  for (note = notes; note; note = next_note)
11424    {
11425      rtx place = 0, place2 = 0;
11426
11427      /* If this NOTE references a pseudo register, ensure it references
11428	 the latest copy of that register.  */
11429      if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
11430	  && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
11431	XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
11432
11433      next_note = XEXP (note, 1);
11434      switch (REG_NOTE_KIND (note))
11435	{
11436	case REG_BR_PROB:
11437	case REG_EXEC_COUNT:
11438	  /* Doesn't matter much where we put this, as long as it's somewhere.
11439	     It is preferable to keep these notes on branches, which is most
11440	     likely to be i3.  */
11441	  place = i3;
11442	  break;
11443
11444	case REG_EH_REGION:
11445	  /* This note must remain with the call.  It should not be possible
11446	     for both I2 and I3 to be a call.  */
11447	  if (GET_CODE (i3) == CALL_INSN)
11448	    place = i3;
11449	  else if (i2 && GET_CODE (i2) == CALL_INSN)
11450	    place = i2;
11451	  else
11452	    abort ();
11453	  break;
11454
11455	case REG_UNUSED:
11456	  /* Any clobbers for i3 may still exist, and so we must process
11457	     REG_UNUSED notes from that insn.
11458
11459	     Any clobbers from i2 or i1 can only exist if they were added by
11460	     recog_for_combine.  In that case, recog_for_combine created the
11461	     necessary REG_UNUSED notes.  Trying to keep any original
11462	     REG_UNUSED notes from these insns can cause incorrect output
11463	     if it is for the same register as the original i3 dest.
11464	     In that case, we will notice that the register is set in i3,
11465	     and then add a REG_UNUSED note for the destination of i3, which
11466	     is wrong.  However, it is possible to have REG_UNUSED notes from
11467	     i2 or i1 for register which were both used and clobbered, so
11468	     we keep notes from i2 or i1 if they will turn into REG_DEAD
11469	     notes.  */
11470
11471	  /* If this register is set or clobbered in I3, put the note there
11472	     unless there is one already.  */
11473	  if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
11474	    {
11475	      if (from_insn != i3)
11476		break;
11477
11478	      if (! (GET_CODE (XEXP (note, 0)) == REG
11479		     ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
11480		     : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
11481		place = i3;
11482	    }
11483	  /* Otherwise, if this register is used by I3, then this register
11484	     now dies here, so we must put a REG_DEAD note here unless there
11485	     is one already.  */
11486	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
11487		   && ! (GET_CODE (XEXP (note, 0)) == REG
11488			 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
11489			 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
11490	    {
11491	      PUT_REG_NOTE_KIND (note, REG_DEAD);
11492	      place = i3;
11493	    }
11494	  break;
11495
11496	case REG_EQUAL:
11497	case REG_EQUIV:
11498	case REG_NONNEG:
11499	case REG_NOALIAS:
11500	  /* These notes say something about results of an insn.  We can
11501	     only support them if they used to be on I3 in which case they
11502	     remain on I3.  Otherwise they are ignored.
11503
11504	     If the note refers to an expression that is not a constant, we
11505	     must also ignore the note since we cannot tell whether the
11506	     equivalence is still true.  It might be possible to do
11507	     slightly better than this (we only have a problem if I2DEST
11508	     or I1DEST is present in the expression), but it doesn't
11509	     seem worth the trouble.  */
11510
11511	  if (from_insn == i3
11512	      && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
11513	    place = i3;
11514	  break;
11515
11516	case REG_INC:
11517	case REG_NO_CONFLICT:
11518	  /* These notes say something about how a register is used.  They must
11519	     be present on any use of the register in I2 or I3.  */
11520	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
11521	    place = i3;
11522
11523	  if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
11524	    {
11525	      if (place)
11526		place2 = i2;
11527	      else
11528		place = i2;
11529	    }
11530	  break;
11531
11532	case REG_LABEL:
11533	  /* This can show up in several ways -- either directly in the
11534	     pattern, or hidden off in the constant pool with (or without?)
11535	     a REG_EQUAL note.  */
11536	  /* ??? Ignore the without-reg_equal-note problem for now.  */
11537	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
11538	      || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
11539		  && GET_CODE (XEXP (tem, 0)) == LABEL_REF
11540		  && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
11541	    place = i3;
11542
11543	  if (i2
11544	      && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
11545	          || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
11546		      && GET_CODE (XEXP (tem, 0)) == LABEL_REF
11547		      && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
11548	    {
11549	      if (place)
11550		place2 = i2;
11551	      else
11552		place = i2;
11553	    }
11554	  break;
11555
11556	case REG_WAS_0:
11557	  /* It is too much trouble to try to see if this note is still
11558	     correct in all situations.  It is better to simply delete it.  */
11559	  break;
11560
11561	case REG_RETVAL:
11562	  /* If the insn previously containing this note still exists,
11563	     put it back where it was.  Otherwise move it to the previous
11564	     insn.  Adjust the corresponding REG_LIBCALL note.  */
11565	  if (GET_CODE (from_insn) != NOTE)
11566	    place = from_insn;
11567	  else
11568	    {
11569	      tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
11570	      place = prev_real_insn (from_insn);
11571	      if (tem && place)
11572		XEXP (tem, 0) = place;
11573	    }
11574	  break;
11575
11576	case REG_LIBCALL:
11577	  /* This is handled similarly to REG_RETVAL.  */
11578	  if (GET_CODE (from_insn) != NOTE)
11579	    place = from_insn;
11580	  else
11581	    {
11582	      tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
11583	      place = next_real_insn (from_insn);
11584	      if (tem && place)
11585		XEXP (tem, 0) = place;
11586	    }
11587	  break;
11588
11589	case REG_DEAD:
11590	  /* If the register is used as an input in I3, it dies there.
11591	     Similarly for I2, if it is non-zero and adjacent to I3.
11592
11593	     If the register is not used as an input in either I3 or I2
11594	     and it is not one of the registers we were supposed to eliminate,
11595	     there are two possibilities.  We might have a non-adjacent I2
11596	     or we might have somehow eliminated an additional register
11597	     from a computation.  For example, we might have had A & B where
11598	     we discover that B will always be zero.  In this case we will
11599	     eliminate the reference to A.
11600
11601	     In both cases, we must search to see if we can find a previous
11602	     use of A and put the death note there.  */
11603
11604	  if (from_insn
11605	      && GET_CODE (from_insn) == CALL_INSN
11606              && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
11607	    place = from_insn;
11608	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
11609	    place = i3;
11610	  else if (i2 != 0 && next_nonnote_insn (i2) == i3
11611		   && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11612	    place = i2;
11613
11614	  if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
11615	    break;
11616
11617	  /* If the register is used in both I2 and I3 and it dies in I3,
11618	     we might have added another reference to it.  If reg_n_refs
11619	     was 2, bump it to 3.  This has to be correct since the
11620	     register must have been set somewhere.  The reason this is
11621	     done is because local-alloc.c treats 2 references as a
11622	     special case.  */
11623
11624	  if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
11625	      && REG_N_REFS (REGNO (XEXP (note, 0)))== 2
11626	      && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11627	    REG_N_REFS (REGNO (XEXP (note, 0))) = 3;
11628
11629	  if (place == 0)
11630	    {
11631	      for (tem = prev_nonnote_insn (i3);
11632		   place == 0 && tem
11633		   && (GET_CODE (tem) == INSN || GET_CODE (tem) == CALL_INSN);
11634		   tem = prev_nonnote_insn (tem))
11635		{
11636		  /* If the register is being set at TEM, see if that is all
11637		     TEM is doing.  If so, delete TEM.  Otherwise, make this
11638		     into a REG_UNUSED note instead.  */
11639		  if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
11640		    {
11641		      rtx set = single_set (tem);
11642		      rtx inner_dest = 0;
11643#ifdef HAVE_cc0
11644		      rtx cc0_setter = NULL_RTX;
11645#endif
11646
11647		      if (set != 0)
11648			for (inner_dest = SET_DEST (set);
11649			     GET_CODE (inner_dest) == STRICT_LOW_PART
11650			     || GET_CODE (inner_dest) == SUBREG
11651			     || GET_CODE (inner_dest) == ZERO_EXTRACT;
11652			     inner_dest = XEXP (inner_dest, 0))
11653			  ;
11654
11655		      /* Verify that it was the set, and not a clobber that
11656			 modified the register.
11657
11658			 CC0 targets must be careful to maintain setter/user
11659			 pairs.  If we cannot delete the setter due to side
11660			 effects, mark the user with an UNUSED note instead
11661			 of deleting it.  */
11662
11663		      if (set != 0 && ! side_effects_p (SET_SRC (set))
11664			  && rtx_equal_p (XEXP (note, 0), inner_dest)
11665#ifdef HAVE_cc0
11666			  && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
11667			      || ((cc0_setter = prev_cc0_setter (tem)) != NULL
11668				  && sets_cc0_p (PATTERN (cc0_setter)) > 0))
11669#endif
11670			  )
11671			{
11672			  /* Move the notes and links of TEM elsewhere.
11673			     This might delete other dead insns recursively.
11674			     First set the pattern to something that won't use
11675			     any register.  */
11676
11677			  PATTERN (tem) = pc_rtx;
11678
11679			  distribute_notes (REG_NOTES (tem), tem, tem,
11680					    NULL_RTX, NULL_RTX, NULL_RTX);
11681			  distribute_links (LOG_LINKS (tem));
11682
11683			  PUT_CODE (tem, NOTE);
11684			  NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
11685			  NOTE_SOURCE_FILE (tem) = 0;
11686
11687#ifdef HAVE_cc0
11688			  /* Delete the setter too.  */
11689			  if (cc0_setter)
11690			    {
11691			      PATTERN (cc0_setter) = pc_rtx;
11692
11693			      distribute_notes (REG_NOTES (cc0_setter),
11694						cc0_setter, cc0_setter,
11695						NULL_RTX, NULL_RTX, NULL_RTX);
11696			      distribute_links (LOG_LINKS (cc0_setter));
11697
11698			      PUT_CODE (cc0_setter, NOTE);
11699			      NOTE_LINE_NUMBER (cc0_setter) = NOTE_INSN_DELETED;
11700			      NOTE_SOURCE_FILE (cc0_setter) = 0;
11701			    }
11702#endif
11703			}
11704		      /* If the register is both set and used here, put the
11705			 REG_DEAD note here, but place a REG_UNUSED note
11706			 here too unless there already is one.  */
11707		      else if (reg_referenced_p (XEXP (note, 0),
11708						 PATTERN (tem)))
11709			{
11710			  place = tem;
11711
11712			  if (! find_regno_note (tem, REG_UNUSED,
11713						 REGNO (XEXP (note, 0))))
11714			    REG_NOTES (tem)
11715			      = gen_rtx_EXPR_LIST (REG_UNUSED,
11716						   XEXP (note, 0),
11717						   REG_NOTES (tem));
11718			}
11719		      else
11720			{
11721			  PUT_REG_NOTE_KIND (note, REG_UNUSED);
11722
11723			  /*  If there isn't already a REG_UNUSED note, put one
11724			      here.  */
11725			  if (! find_regno_note (tem, REG_UNUSED,
11726						 REGNO (XEXP (note, 0))))
11727			    place = tem;
11728			  break;
11729		      }
11730		  }
11731		else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
11732			 || (GET_CODE (tem) == CALL_INSN
11733			     && find_reg_fusage (tem, USE, XEXP (note, 0))))
11734		  {
11735		    place = tem;
11736
11737		    /* If we are doing a 3->2 combination, and we have a
11738		       register which formerly died in i3 and was not used
11739		       by i2, which now no longer dies in i3 and is used in
11740		       i2 but does not die in i2, and place is between i2
11741		       and i3, then we may need to move a link from place to
11742		       i2.  */
11743		    if (i2 && INSN_UID (place) <= max_uid_cuid
11744			&& INSN_CUID (place) > INSN_CUID (i2)
11745			&& from_insn && INSN_CUID (from_insn) > INSN_CUID (i2)
11746			&& reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11747		      {
11748			rtx links = LOG_LINKS (place);
11749			LOG_LINKS (place) = 0;
11750			distribute_links (links);
11751		      }
11752		    break;
11753		  }
11754		}
11755
11756	      /* If we haven't found an insn for the death note and it
11757		 is still a REG_DEAD note, but we have hit a CODE_LABEL,
11758		 insert a USE insn for the register at that label and
11759		 put the death node there.  This prevents problems with
11760		 call-state tracking in caller-save.c.  */
11761	      if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0)
11762		{
11763		  place
11764		    = emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (note, 0)),
11765				       tem);
11766
11767		  /* If this insn was emitted between blocks, then update
11768		     BLOCK_HEAD of the current block to include it.  */
11769		  if (BLOCK_END (this_basic_block - 1) == tem)
11770		    BLOCK_HEAD (this_basic_block) = place;
11771		}
11772	    }
11773
11774	  /* If the register is set or already dead at PLACE, we needn't do
11775	     anything with this note if it is still a REG_DEAD note.
11776	     We can here if it is set at all, not if is it totally replace,
11777	     which is what `dead_or_set_p' checks, so also check for it being
11778	     set partially.  */
11779
11780
11781	  if (place && REG_NOTE_KIND (note) == REG_DEAD)
11782	    {
11783	      int regno = REGNO (XEXP (note, 0));
11784
11785	      if (dead_or_set_p (place, XEXP (note, 0))
11786		  || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
11787		{
11788		  /* Unless the register previously died in PLACE, clear
11789		     reg_last_death.  [I no longer understand why this is
11790		     being done.] */
11791		  if (reg_last_death[regno] != place)
11792		    reg_last_death[regno] = 0;
11793		  place = 0;
11794		}
11795	      else
11796		reg_last_death[regno] = place;
11797
11798	      /* If this is a death note for a hard reg that is occupying
11799		 multiple registers, ensure that we are still using all
11800		 parts of the object.  If we find a piece of the object
11801		 that is unused, we must add a USE for that piece before
11802		 PLACE and put the appropriate REG_DEAD note on it.
11803
11804		 An alternative would be to put a REG_UNUSED for the pieces
11805		 on the insn that set the register, but that can't be done if
11806		 it is not in the same block.  It is simpler, though less
11807		 efficient, to add the USE insns.  */
11808
11809	      if (place && regno < FIRST_PSEUDO_REGISTER
11810		  && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
11811		{
11812		  int endregno
11813		    = regno + HARD_REGNO_NREGS (regno,
11814						GET_MODE (XEXP (note, 0)));
11815		  int all_used = 1;
11816		  int i;
11817
11818		  for (i = regno; i < endregno; i++)
11819		    if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
11820			&& ! find_regno_fusage (place, USE, i))
11821		      {
11822			rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
11823			rtx p;
11824
11825			/* See if we already placed a USE note for this
11826			   register in front of PLACE.  */
11827			for (p = place;
11828			     GET_CODE (PREV_INSN (p)) == INSN
11829			     && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
11830			     p = PREV_INSN (p))
11831			  if (rtx_equal_p (piece,
11832					   XEXP (PATTERN (PREV_INSN (p)), 0)))
11833			    {
11834			      p = 0;
11835			      break;
11836			    }
11837
11838			if (p)
11839			  {
11840			    rtx use_insn
11841			      = emit_insn_before (gen_rtx_USE (VOIDmode,
11842							       piece),
11843						  p);
11844			    REG_NOTES (use_insn)
11845			      = gen_rtx_EXPR_LIST (REG_DEAD, piece,
11846						   REG_NOTES (use_insn));
11847			  }
11848
11849			all_used = 0;
11850		      }
11851
11852		  /* Check for the case where the register dying partially
11853		     overlaps the register set by this insn.  */
11854		  if (all_used)
11855		    for (i = regno; i < endregno; i++)
11856		      if (dead_or_set_regno_p (place, i))
11857			  {
11858			    all_used = 0;
11859			    break;
11860			  }
11861
11862		  if (! all_used)
11863		    {
11864		      /* Put only REG_DEAD notes for pieces that are
11865			 still used and that are not already dead or set.  */
11866
11867		      for (i = regno; i < endregno; i++)
11868			{
11869			  rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
11870
11871			  if ((reg_referenced_p (piece, PATTERN (place))
11872			       || (GET_CODE (place) == CALL_INSN
11873				   && find_reg_fusage (place, USE, piece)))
11874			      && ! dead_or_set_p (place, piece)
11875			      && ! reg_bitfield_target_p (piece,
11876							  PATTERN (place)))
11877			    REG_NOTES (place)
11878			      = gen_rtx_EXPR_LIST (REG_DEAD,
11879						   piece, REG_NOTES (place));
11880			}
11881
11882		      place = 0;
11883		    }
11884		}
11885	    }
11886	  break;
11887
11888	default:
11889	  /* Any other notes should not be present at this point in the
11890	     compilation.  */
11891	  abort ();
11892	}
11893
11894      if (place)
11895	{
11896	  XEXP (note, 1) = REG_NOTES (place);
11897	  REG_NOTES (place) = note;
11898	}
11899      else if ((REG_NOTE_KIND (note) == REG_DEAD
11900		|| REG_NOTE_KIND (note) == REG_UNUSED)
11901	       && GET_CODE (XEXP (note, 0)) == REG)
11902	REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
11903
11904      if (place2)
11905	{
11906	  if ((REG_NOTE_KIND (note) == REG_DEAD
11907	       || REG_NOTE_KIND (note) == REG_UNUSED)
11908	      && GET_CODE (XEXP (note, 0)) == REG)
11909	    REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
11910
11911	  REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
11912					       REG_NOTE_KIND (note),
11913					       XEXP (note, 0),
11914					       REG_NOTES (place2));
11915	}
11916    }
11917}
11918
11919/* Similarly to above, distribute the LOG_LINKS that used to be present on
11920   I3, I2, and I1 to new locations.  This is also called in one case to
11921   add a link pointing at I3 when I3's destination is changed.  */
11922
11923static void
11924distribute_links (links)
11925     rtx links;
11926{
11927  rtx link, next_link;
11928
11929  for (link = links; link; link = next_link)
11930    {
11931      rtx place = 0;
11932      rtx insn;
11933      rtx set, reg;
11934
11935      next_link = XEXP (link, 1);
11936
11937      /* If the insn that this link points to is a NOTE or isn't a single
11938	 set, ignore it.  In the latter case, it isn't clear what we
11939	 can do other than ignore the link, since we can't tell which
11940	 register it was for.  Such links wouldn't be used by combine
11941	 anyway.
11942
11943	 It is not possible for the destination of the target of the link to
11944	 have been changed by combine.  The only potential of this is if we
11945	 replace I3, I2, and I1 by I3 and I2.  But in that case the
11946	 destination of I2 also remains unchanged.  */
11947
11948      if (GET_CODE (XEXP (link, 0)) == NOTE
11949	  || (set = single_set (XEXP (link, 0))) == 0)
11950	continue;
11951
11952      reg = SET_DEST (set);
11953      while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
11954	     || GET_CODE (reg) == SIGN_EXTRACT
11955	     || GET_CODE (reg) == STRICT_LOW_PART)
11956	reg = XEXP (reg, 0);
11957
11958      /* A LOG_LINK is defined as being placed on the first insn that uses
11959	 a register and points to the insn that sets the register.  Start
11960	 searching at the next insn after the target of the link and stop
11961	 when we reach a set of the register or the end of the basic block.
11962
11963	 Note that this correctly handles the link that used to point from
11964	 I3 to I2.  Also note that not much searching is typically done here
11965	 since most links don't point very far away.  */
11966
11967      for (insn = NEXT_INSN (XEXP (link, 0));
11968	   (insn && (this_basic_block == n_basic_blocks - 1
11969		     || BLOCK_HEAD (this_basic_block + 1) != insn));
11970	   insn = NEXT_INSN (insn))
11971	if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
11972	    && reg_overlap_mentioned_p (reg, PATTERN (insn)))
11973	  {
11974	    if (reg_referenced_p (reg, PATTERN (insn)))
11975	      place = insn;
11976	    break;
11977	  }
11978	else if (GET_CODE (insn) == CALL_INSN
11979	      && find_reg_fusage (insn, USE, reg))
11980	  {
11981	    place = insn;
11982	    break;
11983	  }
11984
11985      /* If we found a place to put the link, place it there unless there
11986	 is already a link to the same insn as LINK at that point.  */
11987
11988      if (place)
11989	{
11990	  rtx link2;
11991
11992	  for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
11993	    if (XEXP (link2, 0) == XEXP (link, 0))
11994	      break;
11995
11996	  if (link2 == 0)
11997	    {
11998	      XEXP (link, 1) = LOG_LINKS (place);
11999	      LOG_LINKS (place) = link;
12000
12001	      /* Set added_links_insn to the earliest insn we added a
12002		 link to.  */
12003	      if (added_links_insn == 0
12004		  || INSN_CUID (added_links_insn) > INSN_CUID (place))
12005		added_links_insn = place;
12006	    }
12007	}
12008    }
12009}
12010
12011/* Compute INSN_CUID for INSN, which is an insn made by combine.  */
12012
12013static int
12014insn_cuid (insn)
12015     rtx insn;
12016{
12017  while (insn != 0 && INSN_UID (insn) > max_uid_cuid
12018	 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
12019    insn = NEXT_INSN (insn);
12020
12021  if (INSN_UID (insn) > max_uid_cuid)
12022    abort ();
12023
12024  return INSN_CUID (insn);
12025}
12026
12027void
12028dump_combine_stats (file)
12029     FILE *file;
12030{
12031  fnotice
12032    (file,
12033     ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12034     combine_attempts, combine_merges, combine_extras, combine_successes);
12035}
12036
12037void
12038dump_combine_total_stats (file)
12039     FILE *file;
12040{
12041  fnotice
12042    (file,
12043     "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12044     total_attempts, total_merges, total_extras, total_successes);
12045}
12046