combine.c revision 18334
1/* Optimize by combining instructions for GNU compiler.
2   Copyright (C) 1987, 88, 92, 93, 94, 1995 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING.  If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA.  */
20
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23   Portable Optimizer, but redone to work on our list-structured
24   representation for RTL instead of their string representation.
25
26   The LOG_LINKS of each insn identify the most recent assignment
27   to each REG used in the insn.  It is a list of previous insns,
28   each of which contains a SET for a REG that is used in this insn
29   and not used or set in between.  LOG_LINKs never cross basic blocks.
30   They were set up by the preceding pass (lifetime analysis).
31
32   We try to combine each pair of insns joined by a logical link.
33   We also try to combine triples of insns A, B and C when
34   C has a link back to B and B has a link back to A.
35
36   LOG_LINKS does not have links for use of the CC0.  They don't
37   need to, because the insn that sets the CC0 is always immediately
38   before the insn that tests it.  So we always regard a branch
39   insn as having a logical link to the preceding insn.  The same is true
40   for an insn explicitly using CC0.
41
42   We check (with use_crosses_set_p) to avoid combining in such a way
43   as to move a computation to a place where its value would be different.
44
45   Combination is done by mathematically substituting the previous
46   insn(s) values for the regs they set into the expressions in
47   the later insns that refer to these regs.  If the result is a valid insn
48   for our target machine, according to the machine description,
49   we install it, delete the earlier insns, and update the data flow
50   information (LOG_LINKS and REG_NOTES) for what we did.
51
52   There are a few exceptions where the dataflow information created by
53   flow.c aren't completely updated:
54
55   - reg_live_length is not updated
56   - reg_n_refs is not adjusted in the rare case when a register is
57     no longer required in a computation
58   - there are extremely rare cases (see distribute_regnotes) when a
59     REG_DEAD note is lost
60   - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61     removed because there is no way to know which register it was
62     linking
63
64   To simplify substitution, we combine only when the earlier insn(s)
65   consist of only a single assignment.  To simplify updating afterward,
66   we never combine when a subroutine call appears in the middle.
67
68   Since we do not represent assignments to CC0 explicitly except when that
69   is all an insn does, there is no LOG_LINKS entry in an insn that uses
70   the condition code for the insn that set the condition code.
71   Fortunately, these two insns must be consecutive.
72   Therefore, every JUMP_INSN is taken to have an implicit logical link
73   to the preceding insn.  This is not quite right, since non-jumps can
74   also use the condition code; but in practice such insns would not
75   combine anyway.  */
76
77#include "config.h"
78#ifdef __STDC__
79#include <stdarg.h>
80#else
81#include <varargs.h>
82#endif
83
84/* Must precede rtl.h for FFS.  */
85#include <stdio.h>
86
87#include "rtl.h"
88#include "flags.h"
89#include "regs.h"
90#include "hard-reg-set.h"
91#include "expr.h"
92#include "basic-block.h"
93#include "insn-config.h"
94#include "insn-flags.h"
95#include "insn-codes.h"
96#include "insn-attr.h"
97#include "recog.h"
98#include "real.h"
99
100/* It is not safe to use ordinary gen_lowpart in combine.
101   Use gen_lowpart_for_combine instead.  See comments there.  */
102#define gen_lowpart dont_use_gen_lowpart_you_dummy
103
104/* Number of attempts to combine instructions in this function.  */
105
106static int combine_attempts;
107
108/* Number of attempts that got as far as substitution in this function.  */
109
110static int combine_merges;
111
112/* Number of instructions combined with added SETs in this function.  */
113
114static int combine_extras;
115
116/* Number of instructions combined in this function.  */
117
118static int combine_successes;
119
120/* Totals over entire compilation.  */
121
122static int total_attempts, total_merges, total_extras, total_successes;
123
124/* Define a default value for REVERSIBLE_CC_MODE.
125   We can never assume that a condition code mode is safe to reverse unless
126   the md tells us so.  */
127#ifndef REVERSIBLE_CC_MODE
128#define REVERSIBLE_CC_MODE(MODE) 0
129#endif
130
131/* Vector mapping INSN_UIDs to cuids.
132   The cuids are like uids but increase monotonically always.
133   Combine always uses cuids so that it can compare them.
134   But actually renumbering the uids, which we used to do,
135   proves to be a bad idea because it makes it hard to compare
136   the dumps produced by earlier passes with those from later passes.  */
137
138static int *uid_cuid;
139static int max_uid_cuid;
140
141/* Get the cuid of an insn.  */
142
143#define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid_cuid		\
144			 ? (abort(), 0)				\
145			 : uid_cuid[INSN_UID (INSN)])
146
147/* Maximum register number, which is the size of the tables below.  */
148
149static int combine_max_regno;
150
151/* Record last point of death of (hard or pseudo) register n.  */
152
153static rtx *reg_last_death;
154
155/* Record last point of modification of (hard or pseudo) register n.  */
156
157static rtx *reg_last_set;
158
159/* Record the cuid of the last insn that invalidated memory
160   (anything that writes memory, and subroutine calls, but not pushes).  */
161
162static int mem_last_set;
163
164/* Record the cuid of the last CALL_INSN
165   so we can tell whether a potential combination crosses any calls.  */
166
167static int last_call_cuid;
168
169/* When `subst' is called, this is the insn that is being modified
170   (by combining in a previous insn).  The PATTERN of this insn
171   is still the old pattern partially modified and it should not be
172   looked at, but this may be used to examine the successors of the insn
173   to judge whether a simplification is valid.  */
174
175static rtx subst_insn;
176
177/* This is an insn that belongs before subst_insn, but is not currently
178   on the insn chain.  */
179
180static rtx subst_prev_insn;
181
182/* This is the lowest CUID that `subst' is currently dealing with.
183   get_last_value will not return a value if the register was set at or
184   after this CUID.  If not for this mechanism, we could get confused if
185   I2 or I1 in try_combine were an insn that used the old value of a register
186   to obtain a new value.  In that case, we might erroneously get the
187   new value of the register when we wanted the old one.  */
188
189static int subst_low_cuid;
190
191/* This contains any hard registers that are used in newpat; reg_dead_at_p
192   must consider all these registers to be always live.  */
193
194static HARD_REG_SET newpat_used_regs;
195
196/* This is an insn to which a LOG_LINKS entry has been added.  If this
197   insn is the earlier than I2 or I3, combine should rescan starting at
198   that location.  */
199
200static rtx added_links_insn;
201
202/* This is the value of undobuf.num_undo when we started processing this
203   substitution.  This will prevent gen_rtx_combine from re-used a piece
204   from the previous expression.  Doing so can produce circular rtl
205   structures.  */
206
207static int previous_num_undos;
208
209/* Basic block number of the block in which we are performing combines.  */
210static int this_basic_block;
211
212/* The next group of arrays allows the recording of the last value assigned
213   to (hard or pseudo) register n.  We use this information to see if a
214   operation being processed is redundant given a prior operation performed
215   on the register.  For example, an `and' with a constant is redundant if
216   all the zero bits are already known to be turned off.
217
218   We use an approach similar to that used by cse, but change it in the
219   following ways:
220
221   (1) We do not want to reinitialize at each label.
222   (2) It is useful, but not critical, to know the actual value assigned
223       to a register.  Often just its form is helpful.
224
225   Therefore, we maintain the following arrays:
226
227   reg_last_set_value		the last value assigned
228   reg_last_set_label		records the value of label_tick when the
229				register was assigned
230   reg_last_set_table_tick	records the value of label_tick when a
231				value using the register is assigned
232   reg_last_set_invalid		set to non-zero when it is not valid
233				to use the value of this register in some
234				register's value
235
236   To understand the usage of these tables, it is important to understand
237   the distinction between the value in reg_last_set_value being valid
238   and the register being validly contained in some other expression in the
239   table.
240
241   Entry I in reg_last_set_value is valid if it is non-zero, and either
242   reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
243
244   Register I may validly appear in any expression returned for the value
245   of another register if reg_n_sets[i] is 1.  It may also appear in the
246   value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
247   reg_last_set_invalid[j] is zero.
248
249   If an expression is found in the table containing a register which may
250   not validly appear in an expression, the register is replaced by
251   something that won't match, (clobber (const_int 0)).
252
253   reg_last_set_invalid[i] is set non-zero when register I is being assigned
254   to and reg_last_set_table_tick[i] == label_tick.  */
255
256/* Record last value assigned to (hard or pseudo) register n. */
257
258static rtx *reg_last_set_value;
259
260/* Record the value of label_tick when the value for register n is placed in
261   reg_last_set_value[n].  */
262
263static int *reg_last_set_label;
264
265/* Record the value of label_tick when an expression involving register n
266   is placed in reg_last_set_value. */
267
268static int *reg_last_set_table_tick;
269
270/* Set non-zero if references to register n in expressions should not be
271   used.  */
272
273static char *reg_last_set_invalid;
274
275/* Incremented for each label. */
276
277static int label_tick;
278
279/* Some registers that are set more than once and used in more than one
280   basic block are nevertheless always set in similar ways.  For example,
281   a QImode register may be loaded from memory in two places on a machine
282   where byte loads zero extend.
283
284   We record in the following array what we know about the nonzero
285   bits of a register, specifically which bits are known to be zero.
286
287   If an entry is zero, it means that we don't know anything special.  */
288
289static unsigned HOST_WIDE_INT *reg_nonzero_bits;
290
291/* Mode used to compute significance in reg_nonzero_bits.  It is the largest
292   integer mode that can fit in HOST_BITS_PER_WIDE_INT.  */
293
294static enum machine_mode nonzero_bits_mode;
295
296/* Nonzero if we know that a register has some leading bits that are always
297   equal to the sign bit.  */
298
299static char *reg_sign_bit_copies;
300
301/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
302   It is zero while computing them and after combine has completed.  This
303   former test prevents propagating values based on previously set values,
304   which can be incorrect if a variable is modified in a loop.  */
305
306static int nonzero_sign_valid;
307
308/* These arrays are maintained in parallel with reg_last_set_value
309   and are used to store the mode in which the register was last set,
310   the bits that were known to be zero when it was last set, and the
311   number of sign bits copies it was known to have when it was last set.  */
312
313static enum machine_mode *reg_last_set_mode;
314static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
315static char *reg_last_set_sign_bit_copies;
316
317/* Record one modification to rtl structure
318   to be undone by storing old_contents into *where.
319   is_int is 1 if the contents are an int.  */
320
321struct undo
322{
323  int is_int;
324  union {rtx r; int i;} old_contents;
325  union {rtx *r; int *i;} where;
326};
327
328/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
329   num_undo says how many are currently recorded.
330
331   storage is nonzero if we must undo the allocation of new storage.
332   The value of storage is what to pass to obfree.
333
334   other_insn is nonzero if we have modified some other insn in the process
335   of working on subst_insn.  It must be verified too.  */
336
337#define MAX_UNDO 50
338
339struct undobuf
340{
341  int num_undo;
342  char *storage;
343  struct undo undo[MAX_UNDO];
344  rtx other_insn;
345};
346
347static struct undobuf undobuf;
348
349/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
350   insn.  The substitution can be undone by undo_all.  If INTO is already
351   set to NEWVAL, do not record this change.  Because computing NEWVAL might
352   also call SUBST, we have to compute it before we put anything into
353   the undo table.  */
354
355#define SUBST(INTO, NEWVAL)  \
356 do { rtx _new = (NEWVAL);						\
357      if (undobuf.num_undo < MAX_UNDO)					\
358	{								\
359	  undobuf.undo[undobuf.num_undo].is_int = 0;			\
360	  undobuf.undo[undobuf.num_undo].where.r = &INTO;		\
361	  undobuf.undo[undobuf.num_undo].old_contents.r = INTO;	\
362	  INTO = _new;							\
363	  if (undobuf.undo[undobuf.num_undo].old_contents.r != INTO)	\
364	    undobuf.num_undo++; 					\
365	}								\
366    } while (0)
367
368/* Similar to SUBST, but NEWVAL is an int.  INTO will normally be an XINT
369   expression.
370   Note that substitution for the value of a CONST_INT is not safe.  */
371
372#define SUBST_INT(INTO, NEWVAL)  \
373 do { if (undobuf.num_undo < MAX_UNDO)					\
374{									\
375	  undobuf.undo[undobuf.num_undo].is_int = 1;			\
376	  undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO;	\
377	  undobuf.undo[undobuf.num_undo].old_contents.i = INTO;		\
378	  INTO = NEWVAL;						\
379	  if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO)	\
380	    undobuf.num_undo++;						\
381	}								\
382     } while (0)
383
384/* Number of times the pseudo being substituted for
385   was found and replaced.  */
386
387static int n_occurrences;
388
389static void init_reg_last_arrays	PROTO(());
390static void setup_incoming_promotions   PROTO(());
391static void set_nonzero_bits_and_sign_copies  PROTO((rtx, rtx));
392static int can_combine_p	PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
393static int combinable_i3pat	PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
394static rtx try_combine		PROTO((rtx, rtx, rtx));
395static void undo_all		PROTO((void));
396static rtx *find_split_point	PROTO((rtx *, rtx));
397static rtx subst		PROTO((rtx, rtx, rtx, int, int));
398static rtx simplify_rtx		PROTO((rtx, enum machine_mode, int, int));
399static rtx simplify_if_then_else  PROTO((rtx));
400static rtx simplify_set		PROTO((rtx));
401static rtx simplify_logical	PROTO((rtx, int));
402static rtx expand_compound_operation  PROTO((rtx));
403static rtx expand_field_assignment  PROTO((rtx));
404static rtx make_extraction	PROTO((enum machine_mode, rtx, int, rtx, int,
405				       int, int, int));
406static rtx extract_left_shift	PROTO((rtx, int));
407static rtx make_compound_operation  PROTO((rtx, enum rtx_code));
408static int get_pos_from_mask	PROTO((unsigned HOST_WIDE_INT, int *));
409static rtx force_to_mode	PROTO((rtx, enum machine_mode,
410				       unsigned HOST_WIDE_INT, rtx, int));
411static rtx if_then_else_cond	PROTO((rtx, rtx *, rtx *));
412static rtx known_cond		PROTO((rtx, enum rtx_code, rtx, rtx));
413static rtx make_field_assignment  PROTO((rtx));
414static rtx apply_distributive_law  PROTO((rtx));
415static rtx simplify_and_const_int  PROTO((rtx, enum machine_mode, rtx,
416					  unsigned HOST_WIDE_INT));
417static unsigned HOST_WIDE_INT nonzero_bits  PROTO((rtx, enum machine_mode));
418static int num_sign_bit_copies  PROTO((rtx, enum machine_mode));
419static int merge_outer_ops	PROTO((enum rtx_code *, HOST_WIDE_INT *,
420				       enum rtx_code, HOST_WIDE_INT,
421				       enum machine_mode, int *));
422static rtx simplify_shift_const	PROTO((rtx, enum rtx_code, enum machine_mode,
423				       rtx, int));
424static int recog_for_combine	PROTO((rtx *, rtx, rtx *, int *));
425static rtx gen_lowpart_for_combine  PROTO((enum machine_mode, rtx));
426static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
427				  ...));
428static rtx gen_binary		PROTO((enum rtx_code, enum machine_mode,
429				       rtx, rtx));
430static rtx gen_unary		PROTO((enum rtx_code, enum machine_mode,
431				       enum machine_mode, rtx));
432static enum rtx_code simplify_comparison  PROTO((enum rtx_code, rtx *, rtx *));
433static int reversible_comparison_p  PROTO((rtx));
434static void update_table_tick	PROTO((rtx));
435static void record_value_for_reg  PROTO((rtx, rtx, rtx));
436static void record_dead_and_set_regs_1  PROTO((rtx, rtx));
437static void record_dead_and_set_regs  PROTO((rtx));
438static int get_last_value_validate  PROTO((rtx *, int, int));
439static rtx get_last_value	PROTO((rtx));
440static int use_crosses_set_p	PROTO((rtx, int));
441static void reg_dead_at_p_1	PROTO((rtx, rtx));
442static int reg_dead_at_p	PROTO((rtx, rtx));
443static void move_deaths		PROTO((rtx, int, rtx, rtx *));
444static int reg_bitfield_target_p  PROTO((rtx, rtx));
445static void distribute_notes	PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
446static void distribute_links	PROTO((rtx));
447static void mark_used_regs_combine PROTO((rtx));
448
449/* Main entry point for combiner.  F is the first insn of the function.
450   NREGS is the first unused pseudo-reg number.  */
451
452void
453combine_instructions (f, nregs)
454     rtx f;
455     int nregs;
456{
457  register rtx insn, next, prev;
458  register int i;
459  register rtx links, nextlinks;
460
461  combine_attempts = 0;
462  combine_merges = 0;
463  combine_extras = 0;
464  combine_successes = 0;
465  undobuf.num_undo = previous_num_undos = 0;
466
467  combine_max_regno = nregs;
468
469  reg_nonzero_bits
470    = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
471  reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
472
473  bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
474  bzero (reg_sign_bit_copies, nregs * sizeof (char));
475
476  reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
477  reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
478  reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
479  reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
480  reg_last_set_label = (int *) alloca (nregs * sizeof (int));
481  reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
482  reg_last_set_mode
483    = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
484  reg_last_set_nonzero_bits
485    = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
486  reg_last_set_sign_bit_copies
487    = (char *) alloca (nregs * sizeof (char));
488
489  init_reg_last_arrays ();
490
491  init_recog_no_volatile ();
492
493  /* Compute maximum uid value so uid_cuid can be allocated.  */
494
495  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
496    if (INSN_UID (insn) > i)
497      i = INSN_UID (insn);
498
499  uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
500  max_uid_cuid = i;
501
502  nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
503
504  /* Don't use reg_nonzero_bits when computing it.  This can cause problems
505     when, for example, we have j <<= 1 in a loop.  */
506
507  nonzero_sign_valid = 0;
508
509  /* Compute the mapping from uids to cuids.
510     Cuids are numbers assigned to insns, like uids,
511     except that cuids increase monotonically through the code.
512
513     Scan all SETs and see if we can deduce anything about what
514     bits are known to be zero for some registers and how many copies
515     of the sign bit are known to exist for those registers.
516
517     Also set any known values so that we can use it while searching
518     for what bits are known to be set.  */
519
520  label_tick = 1;
521
522  /* We need to initialize it here, because record_dead_and_set_regs may call
523     get_last_value.  */
524  subst_prev_insn = NULL_RTX;
525
526  setup_incoming_promotions ();
527
528  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
529    {
530      uid_cuid[INSN_UID (insn)] = ++i;
531      subst_low_cuid = i;
532      subst_insn = insn;
533
534      if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
535	{
536	  note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
537	  record_dead_and_set_regs (insn);
538	}
539
540      if (GET_CODE (insn) == CODE_LABEL)
541	label_tick++;
542    }
543
544  nonzero_sign_valid = 1;
545
546  /* Now scan all the insns in forward order.  */
547
548  this_basic_block = -1;
549  label_tick = 1;
550  last_call_cuid = 0;
551  mem_last_set = 0;
552  init_reg_last_arrays ();
553  setup_incoming_promotions ();
554
555  for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
556    {
557      next = 0;
558
559      /* If INSN starts a new basic block, update our basic block number.  */
560      if (this_basic_block + 1 < n_basic_blocks
561	  && basic_block_head[this_basic_block + 1] == insn)
562	this_basic_block++;
563
564      if (GET_CODE (insn) == CODE_LABEL)
565	label_tick++;
566
567      else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
568	{
569	  /* Try this insn with each insn it links back to.  */
570
571	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
572	    if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
573	      goto retry;
574
575	  /* Try each sequence of three linked insns ending with this one.  */
576
577	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
578	    for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
579		 nextlinks = XEXP (nextlinks, 1))
580	      if ((next = try_combine (insn, XEXP (links, 0),
581				       XEXP (nextlinks, 0))) != 0)
582		goto retry;
583
584#ifdef HAVE_cc0
585	  /* Try to combine a jump insn that uses CC0
586	     with a preceding insn that sets CC0, and maybe with its
587	     logical predecessor as well.
588	     This is how we make decrement-and-branch insns.
589	     We need this special code because data flow connections
590	     via CC0 do not get entered in LOG_LINKS.  */
591
592	  if (GET_CODE (insn) == JUMP_INSN
593	      && (prev = prev_nonnote_insn (insn)) != 0
594	      && GET_CODE (prev) == INSN
595	      && sets_cc0_p (PATTERN (prev)))
596	    {
597	      if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
598		goto retry;
599
600	      for (nextlinks = LOG_LINKS (prev); nextlinks;
601		   nextlinks = XEXP (nextlinks, 1))
602		if ((next = try_combine (insn, prev,
603					 XEXP (nextlinks, 0))) != 0)
604		  goto retry;
605	    }
606
607	  /* Do the same for an insn that explicitly references CC0.  */
608	  if (GET_CODE (insn) == INSN
609	      && (prev = prev_nonnote_insn (insn)) != 0
610	      && GET_CODE (prev) == INSN
611	      && sets_cc0_p (PATTERN (prev))
612	      && GET_CODE (PATTERN (insn)) == SET
613	      && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
614	    {
615	      if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
616		goto retry;
617
618	      for (nextlinks = LOG_LINKS (prev); nextlinks;
619		   nextlinks = XEXP (nextlinks, 1))
620		if ((next = try_combine (insn, prev,
621					 XEXP (nextlinks, 0))) != 0)
622		  goto retry;
623	    }
624
625	  /* Finally, see if any of the insns that this insn links to
626	     explicitly references CC0.  If so, try this insn, that insn,
627	     and its predecessor if it sets CC0.  */
628	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
629	    if (GET_CODE (XEXP (links, 0)) == INSN
630		&& GET_CODE (PATTERN (XEXP (links, 0))) == SET
631		&& reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
632		&& (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
633		&& GET_CODE (prev) == INSN
634		&& sets_cc0_p (PATTERN (prev))
635		&& (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
636	      goto retry;
637#endif
638
639	  /* Try combining an insn with two different insns whose results it
640	     uses.  */
641	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
642	    for (nextlinks = XEXP (links, 1); nextlinks;
643		 nextlinks = XEXP (nextlinks, 1))
644	      if ((next = try_combine (insn, XEXP (links, 0),
645				       XEXP (nextlinks, 0))) != 0)
646		goto retry;
647
648	  if (GET_CODE (insn) != NOTE)
649	    record_dead_and_set_regs (insn);
650
651	retry:
652	  ;
653	}
654    }
655
656  total_attempts += combine_attempts;
657  total_merges += combine_merges;
658  total_extras += combine_extras;
659  total_successes += combine_successes;
660
661  nonzero_sign_valid = 0;
662}
663
664/* Wipe the reg_last_xxx arrays in preparation for another pass.  */
665
666static void
667init_reg_last_arrays ()
668{
669  int nregs = combine_max_regno;
670
671  bzero ((char *) reg_last_death, nregs * sizeof (rtx));
672  bzero ((char *) reg_last_set, nregs * sizeof (rtx));
673  bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
674  bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
675  bzero ((char *) reg_last_set_label, nregs * sizeof (int));
676  bzero (reg_last_set_invalid, nregs * sizeof (char));
677  bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
678  bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
679  bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
680}
681
682/* Set up any promoted values for incoming argument registers.  */
683
684static void
685setup_incoming_promotions ()
686{
687#ifdef PROMOTE_FUNCTION_ARGS
688  int regno;
689  rtx reg;
690  enum machine_mode mode;
691  int unsignedp;
692  rtx first = get_insns ();
693
694  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
695    if (FUNCTION_ARG_REGNO_P (regno)
696	&& (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
697      record_value_for_reg (reg, first,
698			    gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
699				     GET_MODE (reg),
700				     gen_rtx (CLOBBER, mode, const0_rtx)));
701#endif
702}
703
704/* Called via note_stores.  If X is a pseudo that is used in more than
705   one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
706   set, record what bits are known zero.  If we are clobbering X,
707   ignore this "set" because the clobbered value won't be used.
708
709   If we are setting only a portion of X and we can't figure out what
710   portion, assume all bits will be used since we don't know what will
711   be happening.
712
713   Similarly, set how many bits of X are known to be copies of the sign bit
714   at all locations in the function.  This is the smallest number implied
715   by any set of X.  */
716
717static void
718set_nonzero_bits_and_sign_copies (x, set)
719     rtx x;
720     rtx set;
721{
722  int num;
723
724  if (GET_CODE (x) == REG
725      && REGNO (x) >= FIRST_PSEUDO_REGISTER
726      && reg_n_sets[REGNO (x)] > 1
727      && reg_basic_block[REGNO (x)] < 0
728      /* If this register is undefined at the start of the file, we can't
729	 say what its contents were.  */
730      && ! (basic_block_live_at_start[0][REGNO (x) / REGSET_ELT_BITS]
731	    & ((REGSET_ELT_TYPE) 1 << (REGNO (x) % REGSET_ELT_BITS)))
732      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
733    {
734      if (GET_CODE (set) == CLOBBER)
735	{
736	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
737	  reg_sign_bit_copies[REGNO (x)] = 0;
738	  return;
739	}
740
741      /* If this is a complex assignment, see if we can convert it into a
742	 simple assignment.  */
743      set = expand_field_assignment (set);
744
745      /* If this is a simple assignment, or we have a paradoxical SUBREG,
746	 set what we know about X.  */
747
748      if (SET_DEST (set) == x
749	  || (GET_CODE (SET_DEST (set)) == SUBREG
750	      && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
751		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
752	      && SUBREG_REG (SET_DEST (set)) == x))
753	{
754	  rtx src = SET_SRC (set);
755
756#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
757	  /* If X is narrower than a word and SRC is a non-negative
758	     constant that would appear negative in the mode of X,
759	     sign-extend it for use in reg_nonzero_bits because some
760	     machines (maybe most) will actually do the sign-extension
761	     and this is the conservative approach.
762
763	     ??? For 2.5, try to tighten up the MD files in this regard
764	     instead of this kludge.  */
765
766	  if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
767	      && GET_CODE (src) == CONST_INT
768	      && INTVAL (src) > 0
769	      && 0 != (INTVAL (src)
770		       & ((HOST_WIDE_INT) 1
771			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
772	    src = GEN_INT (INTVAL (src)
773			   | ((HOST_WIDE_INT) (-1)
774			      << GET_MODE_BITSIZE (GET_MODE (x))));
775#endif
776
777	  reg_nonzero_bits[REGNO (x)]
778	    |= nonzero_bits (src, nonzero_bits_mode);
779	  num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
780	  if (reg_sign_bit_copies[REGNO (x)] == 0
781	      || reg_sign_bit_copies[REGNO (x)] > num)
782	    reg_sign_bit_copies[REGNO (x)] = num;
783	}
784      else
785	{
786	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
787	  reg_sign_bit_copies[REGNO (x)] = 0;
788	}
789    }
790}
791
792/* See if INSN can be combined into I3.  PRED and SUCC are optionally
793   insns that were previously combined into I3 or that will be combined
794   into the merger of INSN and I3.
795
796   Return 0 if the combination is not allowed for any reason.
797
798   If the combination is allowed, *PDEST will be set to the single
799   destination of INSN and *PSRC to the single source, and this function
800   will return 1.  */
801
802static int
803can_combine_p (insn, i3, pred, succ, pdest, psrc)
804     rtx insn;
805     rtx i3;
806     rtx pred, succ;
807     rtx *pdest, *psrc;
808{
809  int i;
810  rtx set = 0, src, dest;
811  rtx p, link;
812  int all_adjacent = (succ ? (next_active_insn (insn) == succ
813			      && next_active_insn (succ) == i3)
814		      : next_active_insn (insn) == i3);
815
816  /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
817     or a PARALLEL consisting of such a SET and CLOBBERs.
818
819     If INSN has CLOBBER parallel parts, ignore them for our processing.
820     By definition, these happen during the execution of the insn.  When it
821     is merged with another insn, all bets are off.  If they are, in fact,
822     needed and aren't also supplied in I3, they may be added by
823     recog_for_combine.  Otherwise, it won't match.
824
825     We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
826     note.
827
828     Get the source and destination of INSN.  If more than one, can't
829     combine.  */
830
831  if (GET_CODE (PATTERN (insn)) == SET)
832    set = PATTERN (insn);
833  else if (GET_CODE (PATTERN (insn)) == PARALLEL
834	   && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
835    {
836      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
837	{
838	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
839
840	  switch (GET_CODE (elt))
841	    {
842	      /* We can ignore CLOBBERs.  */
843	    case CLOBBER:
844	      break;
845
846	    case SET:
847	      /* Ignore SETs whose result isn't used but not those that
848		 have side-effects.  */
849	      if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
850		  && ! side_effects_p (elt))
851		break;
852
853	      /* If we have already found a SET, this is a second one and
854		 so we cannot combine with this insn.  */
855	      if (set)
856		return 0;
857
858	      set = elt;
859	      break;
860
861	    default:
862	      /* Anything else means we can't combine.  */
863	      return 0;
864	    }
865	}
866
867      if (set == 0
868	  /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
869	     so don't do anything with it.  */
870	  || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
871	return 0;
872    }
873  else
874    return 0;
875
876  if (set == 0)
877    return 0;
878
879  set = expand_field_assignment (set);
880  src = SET_SRC (set), dest = SET_DEST (set);
881
882  /* Don't eliminate a store in the stack pointer.  */
883  if (dest == stack_pointer_rtx
884      /* If we couldn't eliminate a field assignment, we can't combine.  */
885      || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
886      /* Don't combine with an insn that sets a register to itself if it has
887	 a REG_EQUAL note.  This may be part of a REG_NO_CONFLICT sequence.  */
888      || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
889      /* Can't merge a function call.  */
890      || GET_CODE (src) == CALL
891      /* Don't eliminate a function call argument.  */
892      || (GET_CODE (i3) == CALL_INSN
893	  && (find_reg_fusage (i3, USE, dest)
894	      || (GET_CODE (dest) == REG
895		  && REGNO (dest) < FIRST_PSEUDO_REGISTER
896		  && global_regs[REGNO (dest)])))
897      /* Don't substitute into an incremented register.  */
898      || FIND_REG_INC_NOTE (i3, dest)
899      || (succ && FIND_REG_INC_NOTE (succ, dest))
900      /* Don't combine the end of a libcall into anything.  */
901      || find_reg_note (insn, REG_RETVAL, NULL_RTX)
902      /* Make sure that DEST is not used after SUCC but before I3.  */
903      || (succ && ! all_adjacent
904	  && reg_used_between_p (dest, succ, i3))
905      /* Make sure that the value that is to be substituted for the register
906	 does not use any registers whose values alter in between.  However,
907	 If the insns are adjacent, a use can't cross a set even though we
908	 think it might (this can happen for a sequence of insns each setting
909	 the same destination; reg_last_set of that register might point to
910	 a NOTE).  If INSN has a REG_EQUIV note, the register is always
911	 equivalent to the memory so the substitution is valid even if there
912	 are intervening stores.  Also, don't move a volatile asm or
913	 UNSPEC_VOLATILE across any other insns.  */
914      || (! all_adjacent
915	  && (((GET_CODE (src) != MEM
916		|| ! find_reg_note (insn, REG_EQUIV, src))
917	       && use_crosses_set_p (src, INSN_CUID (insn)))
918	      || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
919	      || GET_CODE (src) == UNSPEC_VOLATILE))
920      /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
921	 better register allocation by not doing the combine.  */
922      || find_reg_note (i3, REG_NO_CONFLICT, dest)
923      || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
924      /* Don't combine across a CALL_INSN, because that would possibly
925	 change whether the life span of some REGs crosses calls or not,
926	 and it is a pain to update that information.
927	 Exception: if source is a constant, moving it later can't hurt.
928	 Accept that special case, because it helps -fforce-addr a lot.  */
929      || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
930    return 0;
931
932  /* DEST must either be a REG or CC0.  */
933  if (GET_CODE (dest) == REG)
934    {
935      /* If register alignment is being enforced for multi-word items in all
936	 cases except for parameters, it is possible to have a register copy
937	 insn referencing a hard register that is not allowed to contain the
938	 mode being copied and which would not be valid as an operand of most
939	 insns.  Eliminate this problem by not combining with such an insn.
940
941	 Also, on some machines we don't want to extend the life of a hard
942	 register.  */
943
944      if (GET_CODE (src) == REG
945	  && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
946	       && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
947	      /* Don't extend the life of a hard register unless it is
948		 user variable (if we have few registers) or it can't
949		 fit into the desired register (meaning something special
950		 is going on).  */
951	      || (REGNO (src) < FIRST_PSEUDO_REGISTER
952		  && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
953#ifdef SMALL_REGISTER_CLASSES
954		      || ! REG_USERVAR_P (src)
955#endif
956		      ))))
957	return 0;
958    }
959  else if (GET_CODE (dest) != CC0)
960    return 0;
961
962  /* Don't substitute for a register intended as a clobberable operand.
963     Similarly, don't substitute an expression containing a register that
964     will be clobbered in I3.  */
965  if (GET_CODE (PATTERN (i3)) == PARALLEL)
966    for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
967      if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
968	  && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
969				       src)
970	      || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
971	return 0;
972
973  /* If INSN contains anything volatile, or is an `asm' (whether volatile
974     or not), reject, unless nothing volatile comes between it and I3,
975     with the exception of SUCC.  */
976
977  if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
978    for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
979      if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
980	  && p != succ && volatile_refs_p (PATTERN (p)))
981	return 0;
982
983  /* If there are any volatile insns between INSN and I3, reject, because
984     they might affect machine state.  */
985
986  for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
987    if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
988	&& p != succ && volatile_insn_p (PATTERN (p)))
989      return 0;
990
991  /* If INSN or I2 contains an autoincrement or autodecrement,
992     make sure that register is not used between there and I3,
993     and not already used in I3 either.
994     Also insist that I3 not be a jump; if it were one
995     and the incremented register were spilled, we would lose.  */
996
997#ifdef AUTO_INC_DEC
998  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
999    if (REG_NOTE_KIND (link) == REG_INC
1000	&& (GET_CODE (i3) == JUMP_INSN
1001	    || reg_used_between_p (XEXP (link, 0), insn, i3)
1002	    || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1003      return 0;
1004#endif
1005
1006#ifdef HAVE_cc0
1007  /* Don't combine an insn that follows a CC0-setting insn.
1008     An insn that uses CC0 must not be separated from the one that sets it.
1009     We do, however, allow I2 to follow a CC0-setting insn if that insn
1010     is passed as I1; in that case it will be deleted also.
1011     We also allow combining in this case if all the insns are adjacent
1012     because that would leave the two CC0 insns adjacent as well.
1013     It would be more logical to test whether CC0 occurs inside I1 or I2,
1014     but that would be much slower, and this ought to be equivalent.  */
1015
1016  p = prev_nonnote_insn (insn);
1017  if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1018      && ! all_adjacent)
1019    return 0;
1020#endif
1021
1022  /* If we get here, we have passed all the tests and the combination is
1023     to be allowed.  */
1024
1025  *pdest = dest;
1026  *psrc = src;
1027
1028  return 1;
1029}
1030
1031/* LOC is the location within I3 that contains its pattern or the component
1032   of a PARALLEL of the pattern.  We validate that it is valid for combining.
1033
1034   One problem is if I3 modifies its output, as opposed to replacing it
1035   entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1036   so would produce an insn that is not equivalent to the original insns.
1037
1038   Consider:
1039
1040         (set (reg:DI 101) (reg:DI 100))
1041	 (set (subreg:SI (reg:DI 101) 0) <foo>)
1042
1043   This is NOT equivalent to:
1044
1045         (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1046	 	    (set (reg:DI 101) (reg:DI 100))])
1047
1048   Not only does this modify 100 (in which case it might still be valid
1049   if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1050
1051   We can also run into a problem if I2 sets a register that I1
1052   uses and I1 gets directly substituted into I3 (not via I2).  In that
1053   case, we would be getting the wrong value of I2DEST into I3, so we
1054   must reject the combination.  This case occurs when I2 and I1 both
1055   feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1056   If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1057   of a SET must prevent combination from occurring.
1058
1059   On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
1060   if the destination of a SET is a hard register that isn't a user
1061   variable.
1062
1063   Before doing the above check, we first try to expand a field assignment
1064   into a set of logical operations.
1065
1066   If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1067   we place a register that is both set and used within I3.  If more than one
1068   such register is detected, we fail.
1069
1070   Return 1 if the combination is valid, zero otherwise.  */
1071
1072static int
1073combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1074     rtx i3;
1075     rtx *loc;
1076     rtx i2dest;
1077     rtx i1dest;
1078     int i1_not_in_src;
1079     rtx *pi3dest_killed;
1080{
1081  rtx x = *loc;
1082
1083  if (GET_CODE (x) == SET)
1084    {
1085      rtx set = expand_field_assignment (x);
1086      rtx dest = SET_DEST (set);
1087      rtx src = SET_SRC (set);
1088      rtx inner_dest = dest, inner_src = src;
1089
1090      SUBST (*loc, set);
1091
1092      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1093	     || GET_CODE (inner_dest) == SUBREG
1094	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
1095	inner_dest = XEXP (inner_dest, 0);
1096
1097  /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1098     was added.  */
1099#if 0
1100      while (GET_CODE (inner_src) == STRICT_LOW_PART
1101	     || GET_CODE (inner_src) == SUBREG
1102	     || GET_CODE (inner_src) == ZERO_EXTRACT)
1103	inner_src = XEXP (inner_src, 0);
1104
1105      /* If it is better that two different modes keep two different pseudos,
1106	 avoid combining them.  This avoids producing the following pattern
1107	 on a 386:
1108	  (set (subreg:SI (reg/v:QI 21) 0)
1109	       (lshiftrt:SI (reg/v:SI 20)
1110	           (const_int 24)))
1111	 If that were made, reload could not handle the pair of
1112	 reg 20/21, since it would try to get any GENERAL_REGS
1113	 but some of them don't handle QImode.  */
1114
1115      if (rtx_equal_p (inner_src, i2dest)
1116	  && GET_CODE (inner_dest) == REG
1117	  && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1118	return 0;
1119#endif
1120
1121      /* Check for the case where I3 modifies its output, as
1122	 discussed above.  */
1123      if ((inner_dest != dest
1124	   && (reg_overlap_mentioned_p (i2dest, inner_dest)
1125	       || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1126	  /* This is the same test done in can_combine_p except that we
1127	     allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1128	     CALL operation.  */
1129	  || (GET_CODE (inner_dest) == REG
1130	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1131	      && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1132					GET_MODE (inner_dest))
1133#ifdef SMALL_REGISTER_CLASSES
1134		 || (GET_CODE (src) != CALL && ! REG_USERVAR_P (inner_dest))
1135#endif
1136		  ))
1137	  || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1138	return 0;
1139
1140      /* If DEST is used in I3, it is being killed in this insn,
1141	 so record that for later.
1142	 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1143	 STACK_POINTER_REGNUM, since these are always considered to be
1144	 live.  Similarly for ARG_POINTER_REGNUM if it is fixed.  */
1145      if (pi3dest_killed && GET_CODE (dest) == REG
1146	  && reg_referenced_p (dest, PATTERN (i3))
1147	  && REGNO (dest) != FRAME_POINTER_REGNUM
1148#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1149	  && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1150#endif
1151#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1152	  && (REGNO (dest) != ARG_POINTER_REGNUM
1153	      || ! fixed_regs [REGNO (dest)])
1154#endif
1155	  && REGNO (dest) != STACK_POINTER_REGNUM)
1156	{
1157	  if (*pi3dest_killed)
1158	    return 0;
1159
1160	  *pi3dest_killed = dest;
1161	}
1162    }
1163
1164  else if (GET_CODE (x) == PARALLEL)
1165    {
1166      int i;
1167
1168      for (i = 0; i < XVECLEN (x, 0); i++)
1169	if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1170				i1_not_in_src, pi3dest_killed))
1171	  return 0;
1172    }
1173
1174  return 1;
1175}
1176
1177/* Try to combine the insns I1 and I2 into I3.
1178   Here I1 and I2 appear earlier than I3.
1179   I1 can be zero; then we combine just I2 into I3.
1180
1181   It we are combining three insns and the resulting insn is not recognized,
1182   try splitting it into two insns.  If that happens, I2 and I3 are retained
1183   and I1 is pseudo-deleted by turning it into a NOTE.  Otherwise, I1 and I2
1184   are pseudo-deleted.
1185
1186   Return 0 if the combination does not work.  Then nothing is changed.
1187   If we did the combination, return the insn at which combine should
1188   resume scanning.  */
1189
1190static rtx
1191try_combine (i3, i2, i1)
1192     register rtx i3, i2, i1;
1193{
1194  /* New patterns for I3 and I3, respectively.  */
1195  rtx newpat, newi2pat = 0;
1196  /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead.  */
1197  int added_sets_1, added_sets_2;
1198  /* Total number of SETs to put into I3.  */
1199  int total_sets;
1200  /* Nonzero is I2's body now appears in I3.  */
1201  int i2_is_used;
1202  /* INSN_CODEs for new I3, new I2, and user of condition code.  */
1203  int insn_code_number, i2_code_number, other_code_number;
1204  /* Contains I3 if the destination of I3 is used in its source, which means
1205     that the old life of I3 is being killed.  If that usage is placed into
1206     I2 and not in I3, a REG_DEAD note must be made.  */
1207  rtx i3dest_killed = 0;
1208  /* SET_DEST and SET_SRC of I2 and I1.  */
1209  rtx i2dest, i2src, i1dest = 0, i1src = 0;
1210  /* PATTERN (I2), or a copy of it in certain cases.  */
1211  rtx i2pat;
1212  /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC.  */
1213  int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1214  int i1_feeds_i3 = 0;
1215  /* Notes that must be added to REG_NOTES in I3 and I2.  */
1216  rtx new_i3_notes, new_i2_notes;
1217  /* Notes that we substituted I3 into I2 instead of the normal case.  */
1218  int i3_subst_into_i2 = 0;
1219  /* Notes that I1, I2 or I3 is a MULT operation.  */
1220  int have_mult = 0;
1221  /* Number of clobbers of SCRATCH we had to add.  */
1222  int i3_scratches = 0, i2_scratches = 0, other_scratches = 0;
1223
1224  int maxreg;
1225  rtx temp;
1226  register rtx link;
1227  int i;
1228
1229  /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1230     This can occur when flow deletes an insn that it has merged into an
1231     auto-increment address.  We also can't do anything if I3 has a
1232     REG_LIBCALL note since we don't want to disrupt the contiguity of a
1233     libcall.  */
1234
1235  if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1236      || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1237      || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1238      || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1239    return 0;
1240
1241  combine_attempts++;
1242
1243  undobuf.num_undo = previous_num_undos = 0;
1244  undobuf.other_insn = 0;
1245
1246  /* Save the current high-water-mark so we can free storage if we didn't
1247     accept this combination.  */
1248  undobuf.storage = (char *) oballoc (0);
1249
1250  /* Reset the hard register usage information.  */
1251  CLEAR_HARD_REG_SET (newpat_used_regs);
1252
1253  /* If I1 and I2 both feed I3, they can be in any order.  To simplify the
1254     code below, set I1 to be the earlier of the two insns.  */
1255  if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1256    temp = i1, i1 = i2, i2 = temp;
1257
1258  added_links_insn = 0;
1259
1260  /* First check for one important special-case that the code below will
1261     not handle.  Namely, the case where I1 is zero, I2 has multiple sets,
1262     and I3 is a SET whose SET_SRC is a SET_DEST in I2.  In that case,
1263     we may be able to replace that destination with the destination of I3.
1264     This occurs in the common code where we compute both a quotient and
1265     remainder into a structure, in which case we want to do the computation
1266     directly into the structure to avoid register-register copies.
1267
1268     We make very conservative checks below and only try to handle the
1269     most common cases of this.  For example, we only handle the case
1270     where I2 and I3 are adjacent to avoid making difficult register
1271     usage tests.  */
1272
1273  if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1274      && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1275      && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1276#ifdef SMALL_REGISTER_CLASSES
1277      && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1278	  || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1279	  || REG_USERVAR_P (SET_DEST (PATTERN (i3))))
1280#endif
1281      && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1282      && GET_CODE (PATTERN (i2)) == PARALLEL
1283      && ! side_effects_p (SET_DEST (PATTERN (i3)))
1284      /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1285	 below would need to check what is inside (and reg_overlap_mentioned_p
1286	 doesn't support those codes anyway).  Don't allow those destinations;
1287	 the resulting insn isn't likely to be recognized anyway.  */
1288      && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1289      && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1290      && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1291				    SET_DEST (PATTERN (i3)))
1292      && next_real_insn (i2) == i3)
1293    {
1294      rtx p2 = PATTERN (i2);
1295
1296      /* Make sure that the destination of I3,
1297	 which we are going to substitute into one output of I2,
1298	 is not used within another output of I2.  We must avoid making this:
1299	 (parallel [(set (mem (reg 69)) ...)
1300		    (set (reg 69) ...)])
1301	 which is not well-defined as to order of actions.
1302	 (Besides, reload can't handle output reloads for this.)
1303
1304	 The problem can also happen if the dest of I3 is a memory ref,
1305	 if another dest in I2 is an indirect memory ref.  */
1306      for (i = 0; i < XVECLEN (p2, 0); i++)
1307	if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1308	    && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1309					SET_DEST (XVECEXP (p2, 0, i))))
1310	  break;
1311
1312      if (i == XVECLEN (p2, 0))
1313	for (i = 0; i < XVECLEN (p2, 0); i++)
1314	  if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1315	    {
1316	      combine_merges++;
1317
1318	      subst_insn = i3;
1319	      subst_low_cuid = INSN_CUID (i2);
1320
1321	      added_sets_2 = added_sets_1 = 0;
1322	      i2dest = SET_SRC (PATTERN (i3));
1323
1324	      /* Replace the dest in I2 with our dest and make the resulting
1325		 insn the new pattern for I3.  Then skip to where we
1326		 validate the pattern.  Everything was set up above.  */
1327	      SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1328		     SET_DEST (PATTERN (i3)));
1329
1330	      newpat = p2;
1331	      i3_subst_into_i2 = 1;
1332	      goto validate_replacement;
1333	    }
1334    }
1335
1336#ifndef HAVE_cc0
1337  /* If we have no I1 and I2 looks like:
1338	(parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1339		   (set Y OP)])
1340     make up a dummy I1 that is
1341	(set Y OP)
1342     and change I2 to be
1343        (set (reg:CC X) (compare:CC Y (const_int 0)))
1344
1345     (We can ignore any trailing CLOBBERs.)
1346
1347     This undoes a previous combination and allows us to match a branch-and-
1348     decrement insn.  */
1349
1350  if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1351      && XVECLEN (PATTERN (i2), 0) >= 2
1352      && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1353      && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1354	  == MODE_CC)
1355      && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1356      && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1357      && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1358      && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1359      && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1360		      SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1361    {
1362      for (i =  XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1363	if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1364	  break;
1365
1366      if (i == 1)
1367	{
1368	  /* We make I1 with the same INSN_UID as I2.  This gives it
1369	     the same INSN_CUID for value tracking.  Our fake I1 will
1370	     never appear in the insn stream so giving it the same INSN_UID
1371	     as I2 will not cause a problem.  */
1372
1373	  subst_prev_insn = i1
1374	    = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1375		       XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1376
1377	  SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1378	  SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1379		 SET_DEST (PATTERN (i1)));
1380	}
1381    }
1382#endif
1383
1384  /* Verify that I2 and I1 are valid for combining.  */
1385  if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1386      || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1387    {
1388      undo_all ();
1389      return 0;
1390    }
1391
1392  /* Record whether I2DEST is used in I2SRC and similarly for the other
1393     cases.  Knowing this will help in register status updating below.  */
1394  i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1395  i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1396  i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1397
1398  /* See if I1 directly feeds into I3.  It does if I1DEST is not used
1399     in I2SRC.  */
1400  i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1401
1402  /* Ensure that I3's pattern can be the destination of combines.  */
1403  if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1404			  i1 && i2dest_in_i1src && i1_feeds_i3,
1405			  &i3dest_killed))
1406    {
1407      undo_all ();
1408      return 0;
1409    }
1410
1411  /* See if any of the insns is a MULT operation.  Unless one is, we will
1412     reject a combination that is, since it must be slower.  Be conservative
1413     here.  */
1414  if (GET_CODE (i2src) == MULT
1415      || (i1 != 0 && GET_CODE (i1src) == MULT)
1416      || (GET_CODE (PATTERN (i3)) == SET
1417	  && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1418    have_mult = 1;
1419
1420  /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1421     We used to do this EXCEPT in one case: I3 has a post-inc in an
1422     output operand.  However, that exception can give rise to insns like
1423     	mov r3,(r3)+
1424     which is a famous insn on the PDP-11 where the value of r3 used as the
1425     source was model-dependent.  Avoid this sort of thing.  */
1426
1427#if 0
1428  if (!(GET_CODE (PATTERN (i3)) == SET
1429	&& GET_CODE (SET_SRC (PATTERN (i3))) == REG
1430	&& GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1431	&& (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1432	    || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1433    /* It's not the exception.  */
1434#endif
1435#ifdef AUTO_INC_DEC
1436    for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1437      if (REG_NOTE_KIND (link) == REG_INC
1438	  && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1439	      || (i1 != 0
1440		  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1441	{
1442	  undo_all ();
1443	  return 0;
1444	}
1445#endif
1446
1447  /* See if the SETs in I1 or I2 need to be kept around in the merged
1448     instruction: whenever the value set there is still needed past I3.
1449     For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1450
1451     For the SET in I1, we have two cases:  If I1 and I2 independently
1452     feed into I3, the set in I1 needs to be kept around if I1DEST dies
1453     or is set in I3.  Otherwise (if I1 feeds I2 which feeds I3), the set
1454     in I1 needs to be kept around unless I1DEST dies or is set in either
1455     I2 or I3.  We can distinguish these cases by seeing if I2SRC mentions
1456     I1DEST.  If so, we know I1 feeds into I2.  */
1457
1458  added_sets_2 = ! dead_or_set_p (i3, i2dest);
1459
1460  added_sets_1
1461    = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1462	       : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1463
1464  /* If the set in I2 needs to be kept around, we must make a copy of
1465     PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1466     PATTERN (I2), we are only substituting for the original I1DEST, not into
1467     an already-substituted copy.  This also prevents making self-referential
1468     rtx.  If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1469     I2DEST.  */
1470
1471  i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1472	   ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1473	   : PATTERN (i2));
1474
1475  if (added_sets_2)
1476    i2pat = copy_rtx (i2pat);
1477
1478  combine_merges++;
1479
1480  /* Substitute in the latest insn for the regs set by the earlier ones.  */
1481
1482  maxreg = max_reg_num ();
1483
1484  subst_insn = i3;
1485
1486  /* It is possible that the source of I2 or I1 may be performing an
1487     unneeded operation, such as a ZERO_EXTEND of something that is known
1488     to have the high part zero.  Handle that case by letting subst look at
1489     the innermost one of them.
1490
1491     Another way to do this would be to have a function that tries to
1492     simplify a single insn instead of merging two or more insns.  We don't
1493     do this because of the potential of infinite loops and because
1494     of the potential extra memory required.  However, doing it the way
1495     we are is a bit of a kludge and doesn't catch all cases.
1496
1497     But only do this if -fexpensive-optimizations since it slows things down
1498     and doesn't usually win.  */
1499
1500  if (flag_expensive_optimizations)
1501    {
1502      /* Pass pc_rtx so no substitutions are done, just simplifications.
1503	 The cases that we are interested in here do not involve the few
1504	 cases were is_replaced is checked.  */
1505      if (i1)
1506	{
1507	  subst_low_cuid = INSN_CUID (i1);
1508	  i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1509	}
1510      else
1511	{
1512	  subst_low_cuid = INSN_CUID (i2);
1513	  i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1514	}
1515
1516      previous_num_undos = undobuf.num_undo;
1517    }
1518
1519#ifndef HAVE_cc0
1520  /* Many machines that don't use CC0 have insns that can both perform an
1521     arithmetic operation and set the condition code.  These operations will
1522     be represented as a PARALLEL with the first element of the vector
1523     being a COMPARE of an arithmetic operation with the constant zero.
1524     The second element of the vector will set some pseudo to the result
1525     of the same arithmetic operation.  If we simplify the COMPARE, we won't
1526     match such a pattern and so will generate an extra insn.   Here we test
1527     for this case, where both the comparison and the operation result are
1528     needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1529     I2SRC.  Later we will make the PARALLEL that contains I2.  */
1530
1531  if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1532      && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1533      && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1534      && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1535    {
1536      rtx *cc_use;
1537      enum machine_mode compare_mode;
1538
1539      newpat = PATTERN (i3);
1540      SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1541
1542      i2_is_used = 1;
1543
1544#ifdef EXTRA_CC_MODES
1545      /* See if a COMPARE with the operand we substituted in should be done
1546	 with the mode that is currently being used.  If not, do the same
1547	 processing we do in `subst' for a SET; namely, if the destination
1548	 is used only once, try to replace it with a register of the proper
1549	 mode and also replace the COMPARE.  */
1550      if (undobuf.other_insn == 0
1551	  && (cc_use = find_single_use (SET_DEST (newpat), i3,
1552					&undobuf.other_insn))
1553	  && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1554					      i2src, const0_rtx))
1555	      != GET_MODE (SET_DEST (newpat))))
1556	{
1557	  int regno = REGNO (SET_DEST (newpat));
1558	  rtx new_dest = gen_rtx (REG, compare_mode, regno);
1559
1560	  if (regno < FIRST_PSEUDO_REGISTER
1561	      || (reg_n_sets[regno] == 1 && ! added_sets_2
1562		  && ! REG_USERVAR_P (SET_DEST (newpat))))
1563	    {
1564	      if (regno >= FIRST_PSEUDO_REGISTER)
1565		SUBST (regno_reg_rtx[regno], new_dest);
1566
1567	      SUBST (SET_DEST (newpat), new_dest);
1568	      SUBST (XEXP (*cc_use, 0), new_dest);
1569	      SUBST (SET_SRC (newpat),
1570		     gen_rtx_combine (COMPARE, compare_mode,
1571				      i2src, const0_rtx));
1572	    }
1573	  else
1574	    undobuf.other_insn = 0;
1575	}
1576#endif
1577    }
1578  else
1579#endif
1580    {
1581      n_occurrences = 0;		/* `subst' counts here */
1582
1583      /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1584	 need to make a unique copy of I2SRC each time we substitute it
1585	 to avoid self-referential rtl.  */
1586
1587      subst_low_cuid = INSN_CUID (i2);
1588      newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1589		      ! i1_feeds_i3 && i1dest_in_i1src);
1590      previous_num_undos = undobuf.num_undo;
1591
1592      /* Record whether i2's body now appears within i3's body.  */
1593      i2_is_used = n_occurrences;
1594    }
1595
1596  /* If we already got a failure, don't try to do more.  Otherwise,
1597     try to substitute in I1 if we have it.  */
1598
1599  if (i1 && GET_CODE (newpat) != CLOBBER)
1600    {
1601      /* Before we can do this substitution, we must redo the test done
1602	 above (see detailed comments there) that ensures  that I1DEST
1603	 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1604
1605      if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1606			      0, NULL_PTR))
1607	{
1608	  undo_all ();
1609	  return 0;
1610	}
1611
1612      n_occurrences = 0;
1613      subst_low_cuid = INSN_CUID (i1);
1614      newpat = subst (newpat, i1dest, i1src, 0, 0);
1615      previous_num_undos = undobuf.num_undo;
1616    }
1617
1618  /* Fail if an autoincrement side-effect has been duplicated.  Be careful
1619     to count all the ways that I2SRC and I1SRC can be used.  */
1620  if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1621       && i2_is_used + added_sets_2 > 1)
1622      || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1623	  && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1624	      > 1))
1625      /* Fail if we tried to make a new register (we used to abort, but there's
1626	 really no reason to).  */
1627      || max_reg_num () != maxreg
1628      /* Fail if we couldn't do something and have a CLOBBER.  */
1629      || GET_CODE (newpat) == CLOBBER
1630      /* Fail if this new pattern is a MULT and we didn't have one before
1631	 at the outer level.  */
1632      || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1633	  && ! have_mult))
1634    {
1635      undo_all ();
1636      return 0;
1637    }
1638
1639  /* If the actions of the earlier insns must be kept
1640     in addition to substituting them into the latest one,
1641     we must make a new PARALLEL for the latest insn
1642     to hold additional the SETs.  */
1643
1644  if (added_sets_1 || added_sets_2)
1645    {
1646      combine_extras++;
1647
1648      if (GET_CODE (newpat) == PARALLEL)
1649	{
1650	  rtvec old = XVEC (newpat, 0);
1651	  total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1652	  newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1653	  bcopy ((char *) &old->elem[0], (char *) &XVECEXP (newpat, 0, 0),
1654		 sizeof (old->elem[0]) * old->num_elem);
1655	}
1656      else
1657	{
1658	  rtx old = newpat;
1659	  total_sets = 1 + added_sets_1 + added_sets_2;
1660	  newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1661	  XVECEXP (newpat, 0, 0) = old;
1662	}
1663
1664     if (added_sets_1)
1665       XVECEXP (newpat, 0, --total_sets)
1666	 = (GET_CODE (PATTERN (i1)) == PARALLEL
1667	    ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1668
1669     if (added_sets_2)
1670	{
1671	  /* If there is no I1, use I2's body as is.  We used to also not do
1672	     the subst call below if I2 was substituted into I3,
1673	     but that could lose a simplification.  */
1674	  if (i1 == 0)
1675	    XVECEXP (newpat, 0, --total_sets) = i2pat;
1676	  else
1677	    /* See comment where i2pat is assigned.  */
1678	    XVECEXP (newpat, 0, --total_sets)
1679	      = subst (i2pat, i1dest, i1src, 0, 0);
1680	}
1681    }
1682
1683  /* We come here when we are replacing a destination in I2 with the
1684     destination of I3.  */
1685 validate_replacement:
1686
1687  /* Note which hard regs this insn has as inputs.  */
1688  mark_used_regs_combine (newpat);
1689
1690  /* Is the result of combination a valid instruction?  */
1691  insn_code_number
1692    = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1693
1694  /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1695     the second SET's destination is a register that is unused.  In that case,
1696     we just need the first SET.   This can occur when simplifying a divmod
1697     insn.  We *must* test for this case here because the code below that
1698     splits two independent SETs doesn't handle this case correctly when it
1699     updates the register status.  Also check the case where the first
1700     SET's destination is unused.  That would not cause incorrect code, but
1701     does cause an unneeded insn to remain.  */
1702
1703  if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1704      && XVECLEN (newpat, 0) == 2
1705      && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1706      && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1707      && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1708      && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1709      && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1710      && asm_noperands (newpat) < 0)
1711    {
1712      newpat = XVECEXP (newpat, 0, 0);
1713      insn_code_number
1714	= recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1715    }
1716
1717  else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1718	   && XVECLEN (newpat, 0) == 2
1719	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1720	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1721	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1722	   && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1723	   && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1724	   && asm_noperands (newpat) < 0)
1725    {
1726      newpat = XVECEXP (newpat, 0, 1);
1727      insn_code_number
1728	= recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1729    }
1730
1731  /* If we were combining three insns and the result is a simple SET
1732     with no ASM_OPERANDS that wasn't recognized, try to split it into two
1733     insns.  There are two ways to do this.  It can be split using a
1734     machine-specific method (like when you have an addition of a large
1735     constant) or by combine in the function find_split_point.  */
1736
1737  if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1738      && asm_noperands (newpat) < 0)
1739    {
1740      rtx m_split, *split;
1741      rtx ni2dest = i2dest;
1742
1743      /* See if the MD file can split NEWPAT.  If it can't, see if letting it
1744	 use I2DEST as a scratch register will help.  In the latter case,
1745	 convert I2DEST to the mode of the source of NEWPAT if we can.  */
1746
1747      m_split = split_insns (newpat, i3);
1748
1749      /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1750	 inputs of NEWPAT.  */
1751
1752      /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1753	 possible to try that as a scratch reg.  This would require adding
1754	 more code to make it work though.  */
1755
1756      if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
1757	{
1758	  /* If I2DEST is a hard register or the only use of a pseudo,
1759	     we can change its mode.  */
1760	  if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1761	      && GET_MODE (SET_DEST (newpat)) != VOIDmode
1762	      && GET_CODE (i2dest) == REG
1763	      && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1764		  || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1765		      && ! REG_USERVAR_P (i2dest))))
1766	    ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1767			       REGNO (i2dest));
1768
1769	  m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1770					  gen_rtvec (2, newpat,
1771						     gen_rtx (CLOBBER,
1772							      VOIDmode,
1773							      ni2dest))),
1774				 i3);
1775	}
1776
1777      if (m_split && GET_CODE (m_split) == SEQUENCE
1778	  && XVECLEN (m_split, 0) == 2
1779	  && (next_real_insn (i2) == i3
1780	      || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1781				      INSN_CUID (i2))))
1782	{
1783	  rtx i2set, i3set;
1784	  rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
1785	  newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1786
1787	  i3set = single_set (XVECEXP (m_split, 0, 1));
1788	  i2set = single_set (XVECEXP (m_split, 0, 0));
1789
1790	  /* In case we changed the mode of I2DEST, replace it in the
1791	     pseudo-register table here.  We can't do it above in case this
1792	     code doesn't get executed and we do a split the other way.  */
1793
1794	  if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1795	    SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1796
1797	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes,
1798					      &i2_scratches);
1799
1800	  /* If I2 or I3 has multiple SETs, we won't know how to track
1801	     register status, so don't use these insns.  */
1802
1803	  if (i2_code_number >= 0 && i2set && i3set)
1804	    insn_code_number = recog_for_combine (&newi3pat, i3, &new_i3_notes,
1805						  &i3_scratches);
1806	  if (insn_code_number >= 0)
1807	    newpat = newi3pat;
1808
1809	  /* It is possible that both insns now set the destination of I3.
1810	     If so, we must show an extra use of it.  */
1811
1812	  if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1813	      && GET_CODE (SET_DEST (i2set)) == REG
1814	      && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
1815	    reg_n_sets[REGNO (SET_DEST (i2set))]++;
1816	}
1817
1818      /* If we can split it and use I2DEST, go ahead and see if that
1819	 helps things be recognized.  Verify that none of the registers
1820	 are set between I2 and I3.  */
1821      if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
1822#ifdef HAVE_cc0
1823	  && GET_CODE (i2dest) == REG
1824#endif
1825	  /* We need I2DEST in the proper mode.  If it is a hard register
1826	     or the only use of a pseudo, we can change its mode.  */
1827	  && (GET_MODE (*split) == GET_MODE (i2dest)
1828	      || GET_MODE (*split) == VOIDmode
1829	      || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1830	      || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1831		  && ! REG_USERVAR_P (i2dest)))
1832	  && (next_real_insn (i2) == i3
1833	      || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1834	  /* We can't overwrite I2DEST if its value is still used by
1835	     NEWPAT.  */
1836	  && ! reg_referenced_p (i2dest, newpat))
1837	{
1838	  rtx newdest = i2dest;
1839	  enum rtx_code split_code = GET_CODE (*split);
1840	  enum machine_mode split_mode = GET_MODE (*split);
1841
1842	  /* Get NEWDEST as a register in the proper mode.  We have already
1843	     validated that we can do this.  */
1844	  if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
1845	    {
1846	      newdest = gen_rtx (REG, split_mode, REGNO (i2dest));
1847
1848	      if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1849		SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1850	    }
1851
1852	  /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1853	     an ASHIFT.  This can occur if it was inside a PLUS and hence
1854	     appeared to be a memory address.  This is a kludge.  */
1855	  if (split_code == MULT
1856	      && GET_CODE (XEXP (*split, 1)) == CONST_INT
1857	      && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1858	    {
1859	      SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
1860					      XEXP (*split, 0), GEN_INT (i)));
1861	      /* Update split_code because we may not have a multiply
1862		 anymore.  */
1863	      split_code = GET_CODE (*split);
1864	    }
1865
1866#ifdef INSN_SCHEDULING
1867	  /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1868	     be written as a ZERO_EXTEND.  */
1869	  if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
1870	    SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
1871					    XEXP (*split, 0)));
1872#endif
1873
1874	  newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1875	  SUBST (*split, newdest);
1876	  i2_code_number
1877	    = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
1878
1879	  /* If the split point was a MULT and we didn't have one before,
1880	     don't use one now.  */
1881	  if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
1882	    insn_code_number
1883	      = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1884	}
1885    }
1886
1887  /* Check for a case where we loaded from memory in a narrow mode and
1888     then sign extended it, but we need both registers.  In that case,
1889     we have a PARALLEL with both loads from the same memory location.
1890     We can split this into a load from memory followed by a register-register
1891     copy.  This saves at least one insn, more if register allocation can
1892     eliminate the copy.
1893
1894     We cannot do this if the destination of the second assignment is
1895     a register that we have already assumed is zero-extended.  Similarly
1896     for a SUBREG of such a register.  */
1897
1898  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1899	   && GET_CODE (newpat) == PARALLEL
1900	   && XVECLEN (newpat, 0) == 2
1901	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1902	   && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1903	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1904	   && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1905			   XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1906	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1907				   INSN_CUID (i2))
1908	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1909	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1910	   && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
1911		 (GET_CODE (temp) == REG
1912		  && reg_nonzero_bits[REGNO (temp)] != 0
1913		  && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1914		  && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1915		  && (reg_nonzero_bits[REGNO (temp)]
1916		      != GET_MODE_MASK (word_mode))))
1917	   && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
1918		 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
1919		     (GET_CODE (temp) == REG
1920		      && reg_nonzero_bits[REGNO (temp)] != 0
1921		      && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1922		      && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1923		      && (reg_nonzero_bits[REGNO (temp)]
1924			  != GET_MODE_MASK (word_mode)))))
1925	   && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1926					 SET_SRC (XVECEXP (newpat, 0, 1)))
1927	   && ! find_reg_note (i3, REG_UNUSED,
1928			       SET_DEST (XVECEXP (newpat, 0, 0))))
1929    {
1930      rtx ni2dest;
1931
1932      newi2pat = XVECEXP (newpat, 0, 0);
1933      ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
1934      newpat = XVECEXP (newpat, 0, 1);
1935      SUBST (SET_SRC (newpat),
1936	     gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
1937      i2_code_number
1938	= recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
1939
1940      if (i2_code_number >= 0)
1941	insn_code_number
1942	  = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1943
1944      if (insn_code_number >= 0)
1945	{
1946	  rtx insn;
1947	  rtx link;
1948
1949	  /* If we will be able to accept this, we have made a change to the
1950	     destination of I3.  This can invalidate a LOG_LINKS pointing
1951	     to I3.  No other part of combine.c makes such a transformation.
1952
1953	     The new I3 will have a destination that was previously the
1954	     destination of I1 or I2 and which was used in i2 or I3.  Call
1955	     distribute_links to make a LOG_LINK from the next use of
1956	     that destination.  */
1957
1958	  PATTERN (i3) = newpat;
1959	  distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
1960
1961	  /* I3 now uses what used to be its destination and which is
1962	     now I2's destination.  That means we need a LOG_LINK from
1963	     I3 to I2.  But we used to have one, so we still will.
1964
1965	     However, some later insn might be using I2's dest and have
1966	     a LOG_LINK pointing at I3.  We must remove this link.
1967	     The simplest way to remove the link is to point it at I1,
1968	     which we know will be a NOTE.  */
1969
1970	  for (insn = NEXT_INSN (i3);
1971	       insn && (this_basic_block == n_basic_blocks - 1
1972			|| insn != basic_block_head[this_basic_block + 1]);
1973	       insn = NEXT_INSN (insn))
1974	    {
1975	      if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1976		  && reg_referenced_p (ni2dest, PATTERN (insn)))
1977		{
1978		  for (link = LOG_LINKS (insn); link;
1979		       link = XEXP (link, 1))
1980		    if (XEXP (link, 0) == i3)
1981		      XEXP (link, 0) = i1;
1982
1983		  break;
1984		}
1985	    }
1986	}
1987    }
1988
1989  /* Similarly, check for a case where we have a PARALLEL of two independent
1990     SETs but we started with three insns.  In this case, we can do the sets
1991     as two separate insns.  This case occurs when some SET allows two
1992     other insns to combine, but the destination of that SET is still live.  */
1993
1994  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1995	   && GET_CODE (newpat) == PARALLEL
1996	   && XVECLEN (newpat, 0) == 2
1997	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1998	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1999	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2000	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2001	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2002	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2003	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2004				   INSN_CUID (i2))
2005	   /* Don't pass sets with (USE (MEM ...)) dests to the following.  */
2006	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2007	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2008	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2009				  XVECEXP (newpat, 0, 0))
2010	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2011				  XVECEXP (newpat, 0, 1)))
2012    {
2013      newi2pat = XVECEXP (newpat, 0, 1);
2014      newpat = XVECEXP (newpat, 0, 0);
2015
2016      i2_code_number
2017	= recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2018
2019      if (i2_code_number >= 0)
2020	insn_code_number
2021	  = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
2022    }
2023
2024  /* If it still isn't recognized, fail and change things back the way they
2025     were.  */
2026  if ((insn_code_number < 0
2027       /* Is the result a reasonable ASM_OPERANDS?  */
2028       && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2029    {
2030      undo_all ();
2031      return 0;
2032    }
2033
2034  /* If we had to change another insn, make sure it is valid also.  */
2035  if (undobuf.other_insn)
2036    {
2037      rtx other_pat = PATTERN (undobuf.other_insn);
2038      rtx new_other_notes;
2039      rtx note, next;
2040
2041      CLEAR_HARD_REG_SET (newpat_used_regs);
2042
2043      other_code_number
2044	= recog_for_combine (&other_pat, undobuf.other_insn,
2045			     &new_other_notes, &other_scratches);
2046
2047      if (other_code_number < 0 && ! check_asm_operands (other_pat))
2048	{
2049	  undo_all ();
2050	  return 0;
2051	}
2052
2053      PATTERN (undobuf.other_insn) = other_pat;
2054
2055      /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2056	 are still valid.  Then add any non-duplicate notes added by
2057	 recog_for_combine.  */
2058      for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2059	{
2060	  next = XEXP (note, 1);
2061
2062	  if (REG_NOTE_KIND (note) == REG_UNUSED
2063	      && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2064	    {
2065	      if (GET_CODE (XEXP (note, 0)) == REG)
2066		reg_n_deaths[REGNO (XEXP (note, 0))]--;
2067
2068	      remove_note (undobuf.other_insn, note);
2069	    }
2070	}
2071
2072      for (note = new_other_notes; note; note = XEXP (note, 1))
2073	if (GET_CODE (XEXP (note, 0)) == REG)
2074	  reg_n_deaths[REGNO (XEXP (note, 0))]++;
2075
2076      distribute_notes (new_other_notes, undobuf.other_insn,
2077			undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2078    }
2079
2080  /* We now know that we can do this combination.  Merge the insns and
2081     update the status of registers and LOG_LINKS.  */
2082
2083  {
2084    rtx i3notes, i2notes, i1notes = 0;
2085    rtx i3links, i2links, i1links = 0;
2086    rtx midnotes = 0;
2087    register int regno;
2088    /* Compute which registers we expect to eliminate.  */
2089    rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
2090		   ? 0 : i2dest);
2091    rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
2092
2093    /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2094       clear them.  */
2095    i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2096    i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2097    if (i1)
2098      i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2099
2100    /* Ensure that we do not have something that should not be shared but
2101       occurs multiple times in the new insns.  Check this by first
2102       resetting all the `used' flags and then copying anything is shared.  */
2103
2104    reset_used_flags (i3notes);
2105    reset_used_flags (i2notes);
2106    reset_used_flags (i1notes);
2107    reset_used_flags (newpat);
2108    reset_used_flags (newi2pat);
2109    if (undobuf.other_insn)
2110      reset_used_flags (PATTERN (undobuf.other_insn));
2111
2112    i3notes = copy_rtx_if_shared (i3notes);
2113    i2notes = copy_rtx_if_shared (i2notes);
2114    i1notes = copy_rtx_if_shared (i1notes);
2115    newpat = copy_rtx_if_shared (newpat);
2116    newi2pat = copy_rtx_if_shared (newi2pat);
2117    if (undobuf.other_insn)
2118      reset_used_flags (PATTERN (undobuf.other_insn));
2119
2120    INSN_CODE (i3) = insn_code_number;
2121    PATTERN (i3) = newpat;
2122    if (undobuf.other_insn)
2123      INSN_CODE (undobuf.other_insn) = other_code_number;
2124
2125    /* We had one special case above where I2 had more than one set and
2126       we replaced a destination of one of those sets with the destination
2127       of I3.  In that case, we have to update LOG_LINKS of insns later
2128       in this basic block.  Note that this (expensive) case is rare.
2129
2130       Also, in this case, we must pretend that all REG_NOTEs for I2
2131       actually came from I3, so that REG_UNUSED notes from I2 will be
2132       properly handled.  */
2133
2134    if (i3_subst_into_i2)
2135      {
2136	for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2137	  if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2138	      && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2139	      && ! find_reg_note (i2, REG_UNUSED,
2140				  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2141	    for (temp = NEXT_INSN (i2);
2142		 temp && (this_basic_block == n_basic_blocks - 1
2143			  || basic_block_head[this_basic_block] != temp);
2144		 temp = NEXT_INSN (temp))
2145	      if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2146		for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2147		  if (XEXP (link, 0) == i2)
2148		    XEXP (link, 0) = i3;
2149
2150	if (i3notes)
2151	  {
2152	    rtx link = i3notes;
2153	    while (XEXP (link, 1))
2154	      link = XEXP (link, 1);
2155	    XEXP (link, 1) = i2notes;
2156	  }
2157	else
2158	  i3notes = i2notes;
2159	i2notes = 0;
2160      }
2161
2162    LOG_LINKS (i3) = 0;
2163    REG_NOTES (i3) = 0;
2164    LOG_LINKS (i2) = 0;
2165    REG_NOTES (i2) = 0;
2166
2167    if (newi2pat)
2168      {
2169	INSN_CODE (i2) = i2_code_number;
2170	PATTERN (i2) = newi2pat;
2171      }
2172    else
2173      {
2174	PUT_CODE (i2, NOTE);
2175	NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2176	NOTE_SOURCE_FILE (i2) = 0;
2177      }
2178
2179    if (i1)
2180      {
2181	LOG_LINKS (i1) = 0;
2182	REG_NOTES (i1) = 0;
2183	PUT_CODE (i1, NOTE);
2184	NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2185	NOTE_SOURCE_FILE (i1) = 0;
2186      }
2187
2188    /* Get death notes for everything that is now used in either I3 or
2189       I2 and used to die in a previous insn.  */
2190
2191    move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
2192    if (newi2pat)
2193      move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
2194
2195    /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3.  */
2196    if (i3notes)
2197      distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2198			elim_i2, elim_i1);
2199    if (i2notes)
2200      distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2201			elim_i2, elim_i1);
2202    if (i1notes)
2203      distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2204			elim_i2, elim_i1);
2205    if (midnotes)
2206      distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2207			elim_i2, elim_i1);
2208
2209    /* Distribute any notes added to I2 or I3 by recog_for_combine.  We
2210       know these are REG_UNUSED and want them to go to the desired insn,
2211       so we always pass it as i3.  We have not counted the notes in
2212       reg_n_deaths yet, so we need to do so now.  */
2213
2214    if (newi2pat && new_i2_notes)
2215      {
2216	for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2217	  if (GET_CODE (XEXP (temp, 0)) == REG)
2218	    reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2219
2220	distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2221      }
2222
2223    if (new_i3_notes)
2224      {
2225	for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2226	  if (GET_CODE (XEXP (temp, 0)) == REG)
2227	    reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2228
2229	distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2230      }
2231
2232    /* If I3DEST was used in I3SRC, it really died in I3.  We may need to
2233       put a REG_DEAD note for it somewhere.  Similarly for I2 and I1.
2234       Show an additional death due to the REG_DEAD note we make here.  If
2235       we discard it in distribute_notes, we will decrement it again.  */
2236
2237    if (i3dest_killed)
2238      {
2239	if (GET_CODE (i3dest_killed) == REG)
2240	  reg_n_deaths[REGNO (i3dest_killed)]++;
2241
2242	distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2243				   NULL_RTX),
2244			  NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2245			  NULL_RTX, NULL_RTX);
2246      }
2247
2248    /* For I2 and I1, we have to be careful.  If NEWI2PAT exists and sets
2249       I2DEST or I1DEST, the death must be somewhere before I2, not I3.  If
2250       we passed I3 in that case, it might delete I2.  */
2251
2252    if (i2dest_in_i2src)
2253      {
2254	if (GET_CODE (i2dest) == REG)
2255	  reg_n_deaths[REGNO (i2dest)]++;
2256
2257	if (newi2pat && reg_set_p (i2dest, newi2pat))
2258	  distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2259			    NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2260	else
2261	  distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2262			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2263			    NULL_RTX, NULL_RTX);
2264      }
2265
2266    if (i1dest_in_i1src)
2267      {
2268	if (GET_CODE (i1dest) == REG)
2269	  reg_n_deaths[REGNO (i1dest)]++;
2270
2271	if (newi2pat && reg_set_p (i1dest, newi2pat))
2272	  distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2273			    NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2274	else
2275	  distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2276			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2277			    NULL_RTX, NULL_RTX);
2278      }
2279
2280    distribute_links (i3links);
2281    distribute_links (i2links);
2282    distribute_links (i1links);
2283
2284    if (GET_CODE (i2dest) == REG)
2285      {
2286	rtx link;
2287	rtx i2_insn = 0, i2_val = 0, set;
2288
2289	/* The insn that used to set this register doesn't exist, and
2290	   this life of the register may not exist either.  See if one of
2291	   I3's links points to an insn that sets I2DEST.  If it does,
2292	   that is now the last known value for I2DEST. If we don't update
2293	   this and I2 set the register to a value that depended on its old
2294	   contents, we will get confused.  If this insn is used, thing
2295	   will be set correctly in combine_instructions.  */
2296
2297	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2298	  if ((set = single_set (XEXP (link, 0))) != 0
2299	      && rtx_equal_p (i2dest, SET_DEST (set)))
2300	    i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2301
2302	record_value_for_reg (i2dest, i2_insn, i2_val);
2303
2304	/* If the reg formerly set in I2 died only once and that was in I3,
2305	   zero its use count so it won't make `reload' do any work.  */
2306	if (! added_sets_2 && newi2pat == 0 && ! i2dest_in_i2src)
2307	  {
2308	    regno = REGNO (i2dest);
2309	    reg_n_sets[regno]--;
2310	    if (reg_n_sets[regno] == 0
2311		&& ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2312		      & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2313	      reg_n_refs[regno] = 0;
2314	  }
2315      }
2316
2317    if (i1 && GET_CODE (i1dest) == REG)
2318      {
2319	rtx link;
2320	rtx i1_insn = 0, i1_val = 0, set;
2321
2322	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2323	  if ((set = single_set (XEXP (link, 0))) != 0
2324	      && rtx_equal_p (i1dest, SET_DEST (set)))
2325	    i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2326
2327	record_value_for_reg (i1dest, i1_insn, i1_val);
2328
2329	regno = REGNO (i1dest);
2330	if (! added_sets_1 && ! i1dest_in_i1src)
2331	  {
2332	    reg_n_sets[regno]--;
2333	    if (reg_n_sets[regno] == 0
2334		&& ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2335		      & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2336	      reg_n_refs[regno] = 0;
2337	  }
2338      }
2339
2340    /* Update reg_nonzero_bits et al for any changes that may have been made
2341       to this insn.  */
2342
2343    note_stores (newpat, set_nonzero_bits_and_sign_copies);
2344    if (newi2pat)
2345      note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
2346
2347    /* If we added any (clobber (scratch)), add them to the max for a
2348       block.  This is a very pessimistic calculation, since we might
2349       have had them already and this might not be the worst block, but
2350       it's not worth doing any better.  */
2351    max_scratch += i3_scratches + i2_scratches + other_scratches;
2352
2353    /* If I3 is now an unconditional jump, ensure that it has a
2354       BARRIER following it since it may have initially been a
2355       conditional jump.  It may also be the last nonnote insn.  */
2356
2357    if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2358	&& ((temp = next_nonnote_insn (i3)) == NULL_RTX
2359	    || GET_CODE (temp) != BARRIER))
2360      emit_barrier_after (i3);
2361  }
2362
2363  combine_successes++;
2364
2365  /* Clear this here, so that subsequent get_last_value calls are not
2366     affected.  */
2367  subst_prev_insn = NULL_RTX;
2368
2369  if (added_links_insn
2370      && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2371      && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2372    return added_links_insn;
2373  else
2374    return newi2pat ? i2 : i3;
2375}
2376
2377/* Undo all the modifications recorded in undobuf.  */
2378
2379static void
2380undo_all ()
2381{
2382  register int i;
2383  if (undobuf.num_undo > MAX_UNDO)
2384    undobuf.num_undo = MAX_UNDO;
2385  for (i = undobuf.num_undo - 1; i >= 0; i--)
2386    {
2387      if (undobuf.undo[i].is_int)
2388	*undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2389      else
2390	*undobuf.undo[i].where.r = undobuf.undo[i].old_contents.r;
2391
2392    }
2393
2394  obfree (undobuf.storage);
2395  undobuf.num_undo = 0;
2396
2397  /* Clear this here, so that subsequent get_last_value calls are not
2398     affected.  */
2399  subst_prev_insn = NULL_RTX;
2400}
2401
2402/* Find the innermost point within the rtx at LOC, possibly LOC itself,
2403   where we have an arithmetic expression and return that point.  LOC will
2404   be inside INSN.
2405
2406   try_combine will call this function to see if an insn can be split into
2407   two insns.  */
2408
2409static rtx *
2410find_split_point (loc, insn)
2411     rtx *loc;
2412     rtx insn;
2413{
2414  rtx x = *loc;
2415  enum rtx_code code = GET_CODE (x);
2416  rtx *split;
2417  int len = 0, pos, unsignedp;
2418  rtx inner;
2419
2420  /* First special-case some codes.  */
2421  switch (code)
2422    {
2423    case SUBREG:
2424#ifdef INSN_SCHEDULING
2425      /* If we are making a paradoxical SUBREG invalid, it becomes a split
2426	 point.  */
2427      if (GET_CODE (SUBREG_REG (x)) == MEM)
2428	return loc;
2429#endif
2430      return find_split_point (&SUBREG_REG (x), insn);
2431
2432    case MEM:
2433#ifdef HAVE_lo_sum
2434      /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2435	 using LO_SUM and HIGH.  */
2436      if (GET_CODE (XEXP (x, 0)) == CONST
2437	  || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2438	{
2439	  SUBST (XEXP (x, 0),
2440		 gen_rtx_combine (LO_SUM, Pmode,
2441				  gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2442				  XEXP (x, 0)));
2443	  return &XEXP (XEXP (x, 0), 0);
2444	}
2445#endif
2446
2447      /* If we have a PLUS whose second operand is a constant and the
2448	 address is not valid, perhaps will can split it up using
2449	 the machine-specific way to split large constants.  We use
2450	 the first pseudo-reg (one of the virtual regs) as a placeholder;
2451	 it will not remain in the result.  */
2452      if (GET_CODE (XEXP (x, 0)) == PLUS
2453	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2454	  && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2455	{
2456	  rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2457	  rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2458				 subst_insn);
2459
2460	  /* This should have produced two insns, each of which sets our
2461	     placeholder.  If the source of the second is a valid address,
2462	     we can make put both sources together and make a split point
2463	     in the middle.  */
2464
2465	  if (seq && XVECLEN (seq, 0) == 2
2466	      && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2467	      && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2468	      && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2469	      && ! reg_mentioned_p (reg,
2470				    SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2471	      && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2472	      && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2473	      && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2474	      && memory_address_p (GET_MODE (x),
2475				   SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2476	    {
2477	      rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2478	      rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2479
2480	      /* Replace the placeholder in SRC2 with SRC1.  If we can
2481		 find where in SRC2 it was placed, that can become our
2482		 split point and we can replace this address with SRC2.
2483		 Just try two obvious places.  */
2484
2485	      src2 = replace_rtx (src2, reg, src1);
2486	      split = 0;
2487	      if (XEXP (src2, 0) == src1)
2488		split = &XEXP (src2, 0);
2489	      else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2490		       && XEXP (XEXP (src2, 0), 0) == src1)
2491		split = &XEXP (XEXP (src2, 0), 0);
2492
2493	      if (split)
2494		{
2495		  SUBST (XEXP (x, 0), src2);
2496		  return split;
2497		}
2498	    }
2499
2500	  /* If that didn't work, perhaps the first operand is complex and
2501	     needs to be computed separately, so make a split point there.
2502	     This will occur on machines that just support REG + CONST
2503	     and have a constant moved through some previous computation.  */
2504
2505	  else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2506		   && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2507			 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2508			     == 'o')))
2509	    return &XEXP (XEXP (x, 0), 0);
2510	}
2511      break;
2512
2513    case SET:
2514#ifdef HAVE_cc0
2515      /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2516	 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2517	 we need to put the operand into a register.  So split at that
2518	 point.  */
2519
2520      if (SET_DEST (x) == cc0_rtx
2521	  && GET_CODE (SET_SRC (x)) != COMPARE
2522	  && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2523	  && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2524	  && ! (GET_CODE (SET_SRC (x)) == SUBREG
2525		&& GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2526	return &SET_SRC (x);
2527#endif
2528
2529      /* See if we can split SET_SRC as it stands.  */
2530      split = find_split_point (&SET_SRC (x), insn);
2531      if (split && split != &SET_SRC (x))
2532	return split;
2533
2534      /* See if this is a bitfield assignment with everything constant.  If
2535	 so, this is an IOR of an AND, so split it into that.  */
2536      if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2537	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2538	      <= HOST_BITS_PER_WIDE_INT)
2539	  && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2540	  && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2541	  && GET_CODE (SET_SRC (x)) == CONST_INT
2542	  && ((INTVAL (XEXP (SET_DEST (x), 1))
2543	      + INTVAL (XEXP (SET_DEST (x), 2)))
2544	      <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2545	  && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2546	{
2547	  int pos = INTVAL (XEXP (SET_DEST (x), 2));
2548	  int len = INTVAL (XEXP (SET_DEST (x), 1));
2549	  int src = INTVAL (SET_SRC (x));
2550	  rtx dest = XEXP (SET_DEST (x), 0);
2551	  enum machine_mode mode = GET_MODE (dest);
2552	  unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2553
2554	  if (BITS_BIG_ENDIAN)
2555	    pos = GET_MODE_BITSIZE (mode) - len - pos;
2556
2557	  if (src == mask)
2558	    SUBST (SET_SRC (x),
2559		   gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2560	  else
2561	    SUBST (SET_SRC (x),
2562		   gen_binary (IOR, mode,
2563			       gen_binary (AND, mode, dest,
2564					   GEN_INT (~ (mask << pos)
2565						    & GET_MODE_MASK (mode))),
2566			       GEN_INT (src << pos)));
2567
2568	  SUBST (SET_DEST (x), dest);
2569
2570	  split = find_split_point (&SET_SRC (x), insn);
2571	  if (split && split != &SET_SRC (x))
2572	    return split;
2573	}
2574
2575      /* Otherwise, see if this is an operation that we can split into two.
2576	 If so, try to split that.  */
2577      code = GET_CODE (SET_SRC (x));
2578
2579      switch (code)
2580	{
2581	case AND:
2582	  /* If we are AND'ing with a large constant that is only a single
2583	     bit and the result is only being used in a context where we
2584	     need to know if it is zero or non-zero, replace it with a bit
2585	     extraction.  This will avoid the large constant, which might
2586	     have taken more than one insn to make.  If the constant were
2587	     not a valid argument to the AND but took only one insn to make,
2588	     this is no worse, but if it took more than one insn, it will
2589	     be better.  */
2590
2591	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2592	      && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2593	      && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2594	      && GET_CODE (SET_DEST (x)) == REG
2595	      && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2596	      && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2597	      && XEXP (*split, 0) == SET_DEST (x)
2598	      && XEXP (*split, 1) == const0_rtx)
2599	    {
2600	      SUBST (SET_SRC (x),
2601		     make_extraction (GET_MODE (SET_DEST (x)),
2602				      XEXP (SET_SRC (x), 0),
2603				      pos, NULL_RTX, 1, 1, 0, 0));
2604	      return find_split_point (loc, insn);
2605	    }
2606	  break;
2607
2608	case SIGN_EXTEND:
2609	  inner = XEXP (SET_SRC (x), 0);
2610	  pos = 0;
2611	  len = GET_MODE_BITSIZE (GET_MODE (inner));
2612	  unsignedp = 0;
2613	  break;
2614
2615	case SIGN_EXTRACT:
2616	case ZERO_EXTRACT:
2617	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2618	      && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2619	    {
2620	      inner = XEXP (SET_SRC (x), 0);
2621	      len = INTVAL (XEXP (SET_SRC (x), 1));
2622	      pos = INTVAL (XEXP (SET_SRC (x), 2));
2623
2624	      if (BITS_BIG_ENDIAN)
2625		pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2626	      unsignedp = (code == ZERO_EXTRACT);
2627	    }
2628	  break;
2629	}
2630
2631      if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2632	{
2633	  enum machine_mode mode = GET_MODE (SET_SRC (x));
2634
2635	  /* For unsigned, we have a choice of a shift followed by an
2636	     AND or two shifts.  Use two shifts for field sizes where the
2637	     constant might be too large.  We assume here that we can
2638	     always at least get 8-bit constants in an AND insn, which is
2639	     true for every current RISC.  */
2640
2641	  if (unsignedp && len <= 8)
2642	    {
2643	      SUBST (SET_SRC (x),
2644		     gen_rtx_combine
2645		     (AND, mode,
2646		      gen_rtx_combine (LSHIFTRT, mode,
2647				       gen_lowpart_for_combine (mode, inner),
2648				       GEN_INT (pos)),
2649		      GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2650
2651	      split = find_split_point (&SET_SRC (x), insn);
2652	      if (split && split != &SET_SRC (x))
2653		return split;
2654	    }
2655	  else
2656	    {
2657	      SUBST (SET_SRC (x),
2658		     gen_rtx_combine
2659		     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
2660		      gen_rtx_combine (ASHIFT, mode,
2661				       gen_lowpart_for_combine (mode, inner),
2662				       GEN_INT (GET_MODE_BITSIZE (mode)
2663						- len - pos)),
2664		      GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2665
2666	      split = find_split_point (&SET_SRC (x), insn);
2667	      if (split && split != &SET_SRC (x))
2668		return split;
2669	    }
2670	}
2671
2672      /* See if this is a simple operation with a constant as the second
2673	 operand.  It might be that this constant is out of range and hence
2674	 could be used as a split point.  */
2675      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2676	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2677	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2678	  && CONSTANT_P (XEXP (SET_SRC (x), 1))
2679	  && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2680	      || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2681		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2682		      == 'o'))))
2683	return &XEXP (SET_SRC (x), 1);
2684
2685      /* Finally, see if this is a simple operation with its first operand
2686	 not in a register.  The operation might require this operand in a
2687	 register, so return it as a split point.  We can always do this
2688	 because if the first operand were another operation, we would have
2689	 already found it as a split point.  */
2690      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2691	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2692	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2693	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2694	  && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2695	return &XEXP (SET_SRC (x), 0);
2696
2697      return 0;
2698
2699    case AND:
2700    case IOR:
2701      /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2702	 it is better to write this as (not (ior A B)) so we can split it.
2703	 Similarly for IOR.  */
2704      if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2705	{
2706	  SUBST (*loc,
2707		 gen_rtx_combine (NOT, GET_MODE (x),
2708				  gen_rtx_combine (code == IOR ? AND : IOR,
2709						   GET_MODE (x),
2710						   XEXP (XEXP (x, 0), 0),
2711						   XEXP (XEXP (x, 1), 0))));
2712	  return find_split_point (loc, insn);
2713	}
2714
2715      /* Many RISC machines have a large set of logical insns.  If the
2716	 second operand is a NOT, put it first so we will try to split the
2717	 other operand first.  */
2718      if (GET_CODE (XEXP (x, 1)) == NOT)
2719	{
2720	  rtx tem = XEXP (x, 0);
2721	  SUBST (XEXP (x, 0), XEXP (x, 1));
2722	  SUBST (XEXP (x, 1), tem);
2723	}
2724      break;
2725    }
2726
2727  /* Otherwise, select our actions depending on our rtx class.  */
2728  switch (GET_RTX_CLASS (code))
2729    {
2730    case 'b':			/* This is ZERO_EXTRACT and SIGN_EXTRACT.  */
2731    case '3':
2732      split = find_split_point (&XEXP (x, 2), insn);
2733      if (split)
2734	return split;
2735      /* ... fall through ... */
2736    case '2':
2737    case 'c':
2738    case '<':
2739      split = find_split_point (&XEXP (x, 1), insn);
2740      if (split)
2741	return split;
2742      /* ... fall through ... */
2743    case '1':
2744      /* Some machines have (and (shift ...) ...) insns.  If X is not
2745	 an AND, but XEXP (X, 0) is, use it as our split point.  */
2746      if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2747	return &XEXP (x, 0);
2748
2749      split = find_split_point (&XEXP (x, 0), insn);
2750      if (split)
2751	return split;
2752      return loc;
2753    }
2754
2755  /* Otherwise, we don't have a split point.  */
2756  return 0;
2757}
2758
2759/* Throughout X, replace FROM with TO, and return the result.
2760   The result is TO if X is FROM;
2761   otherwise the result is X, but its contents may have been modified.
2762   If they were modified, a record was made in undobuf so that
2763   undo_all will (among other things) return X to its original state.
2764
2765   If the number of changes necessary is too much to record to undo,
2766   the excess changes are not made, so the result is invalid.
2767   The changes already made can still be undone.
2768   undobuf.num_undo is incremented for such changes, so by testing that
2769   the caller can tell whether the result is valid.
2770
2771   `n_occurrences' is incremented each time FROM is replaced.
2772
2773   IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2774
2775   UNIQUE_COPY is non-zero if each substitution must be unique.  We do this
2776   by copying if `n_occurrences' is non-zero.  */
2777
2778static rtx
2779subst (x, from, to, in_dest, unique_copy)
2780     register rtx x, from, to;
2781     int in_dest;
2782     int unique_copy;
2783{
2784  register enum rtx_code code = GET_CODE (x);
2785  enum machine_mode op0_mode = VOIDmode;
2786  register char *fmt;
2787  register int len, i;
2788  rtx new;
2789
2790/* Two expressions are equal if they are identical copies of a shared
2791   RTX or if they are both registers with the same register number
2792   and mode.  */
2793
2794#define COMBINE_RTX_EQUAL_P(X,Y)			\
2795  ((X) == (Y)						\
2796   || (GET_CODE (X) == REG && GET_CODE (Y) == REG	\
2797       && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2798
2799  if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2800    {
2801      n_occurrences++;
2802      return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2803    }
2804
2805  /* If X and FROM are the same register but different modes, they will
2806     not have been seen as equal above.  However, flow.c will make a
2807     LOG_LINKS entry for that case.  If we do nothing, we will try to
2808     rerecognize our original insn and, when it succeeds, we will
2809     delete the feeding insn, which is incorrect.
2810
2811     So force this insn not to match in this (rare) case.  */
2812  if (! in_dest && code == REG && GET_CODE (from) == REG
2813      && REGNO (x) == REGNO (from))
2814    return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2815
2816  /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2817     of which may contain things that can be combined.  */
2818  if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2819    return x;
2820
2821  /* It is possible to have a subexpression appear twice in the insn.
2822     Suppose that FROM is a register that appears within TO.
2823     Then, after that subexpression has been scanned once by `subst',
2824     the second time it is scanned, TO may be found.  If we were
2825     to scan TO here, we would find FROM within it and create a
2826     self-referent rtl structure which is completely wrong.  */
2827  if (COMBINE_RTX_EQUAL_P (x, to))
2828    return to;
2829
2830  len = GET_RTX_LENGTH (code);
2831  fmt = GET_RTX_FORMAT (code);
2832
2833  /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2834     set up to skip this common case.  All other cases where we want to
2835     suppress replacing something inside a SET_SRC are handled via the
2836     IN_DEST operand.  */
2837  if (code == SET
2838      && (GET_CODE (SET_DEST (x)) == REG
2839        || GET_CODE (SET_DEST (x)) == CC0
2840        || GET_CODE (SET_DEST (x)) == PC))
2841    fmt = "ie";
2842
2843  /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2844  if (fmt[0] == 'e')
2845    op0_mode = GET_MODE (XEXP (x, 0));
2846
2847  for (i = 0; i < len; i++)
2848    {
2849      if (fmt[i] == 'E')
2850	{
2851	  register int j;
2852	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2853	    {
2854	      if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2855		{
2856		  new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2857		  n_occurrences++;
2858		}
2859	      else
2860		{
2861		  new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2862
2863		  /* If this substitution failed, this whole thing fails.  */
2864		  if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2865		    return new;
2866		}
2867
2868	      SUBST (XVECEXP (x, i, j), new);
2869	    }
2870	}
2871      else if (fmt[i] == 'e')
2872	{
2873	  if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2874	    {
2875	      /* In general, don't install a subreg involving two modes not
2876		 tieable.  It can worsen register allocation, and can even
2877		 make invalid reload insns, since the reg inside may need to
2878		 be copied from in the outside mode, and that may be invalid
2879		 if it is an fp reg copied in integer mode.
2880
2881		 We allow two exceptions to this: It is valid if it is inside
2882		 another SUBREG and the mode of that SUBREG and the mode of
2883		 the inside of TO is tieable and it is valid if X is a SET
2884		 that copies FROM to CC0.  */
2885	      if (GET_CODE (to) == SUBREG
2886		  && ! MODES_TIEABLE_P (GET_MODE (to),
2887					GET_MODE (SUBREG_REG (to)))
2888		  && ! (code == SUBREG
2889			&& MODES_TIEABLE_P (GET_MODE (x),
2890					    GET_MODE (SUBREG_REG (to))))
2891#ifdef HAVE_cc0
2892		  && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
2893#endif
2894		  )
2895		return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
2896
2897	      new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2898	      n_occurrences++;
2899	    }
2900	  else
2901	    /* If we are in a SET_DEST, suppress most cases unless we
2902	       have gone inside a MEM, in which case we want to
2903	       simplify the address.  We assume here that things that
2904	       are actually part of the destination have their inner
2905	       parts in the first expression.  This is true for SUBREG,
2906	       STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2907	       things aside from REG and MEM that should appear in a
2908	       SET_DEST.  */
2909	    new = subst (XEXP (x, i), from, to,
2910			 (((in_dest
2911			    && (code == SUBREG || code == STRICT_LOW_PART
2912				|| code == ZERO_EXTRACT))
2913			   || code == SET)
2914			  && i == 0), unique_copy);
2915
2916	  /* If we found that we will have to reject this combination,
2917	     indicate that by returning the CLOBBER ourselves, rather than
2918	     an expression containing it.  This will speed things up as
2919	     well as prevent accidents where two CLOBBERs are considered
2920	     to be equal, thus producing an incorrect simplification.  */
2921
2922	  if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2923	    return new;
2924
2925	  SUBST (XEXP (x, i), new);
2926	}
2927    }
2928
2929  /* Try to simplify X.  If the simplification changed the code, it is likely
2930     that further simplification will help, so loop, but limit the number
2931     of repetitions that will be performed.  */
2932
2933  for (i = 0; i < 4; i++)
2934    {
2935      /* If X is sufficiently simple, don't bother trying to do anything
2936	 with it.  */
2937      if (code != CONST_INT && code != REG && code != CLOBBER)
2938	x = simplify_rtx (x, op0_mode, i == 3, in_dest);
2939
2940      if (GET_CODE (x) == code)
2941	break;
2942
2943      code = GET_CODE (x);
2944
2945      /* We no longer know the original mode of operand 0 since we
2946	 have changed the form of X)  */
2947      op0_mode = VOIDmode;
2948    }
2949
2950  return x;
2951}
2952
2953/* Simplify X, a piece of RTL.  We just operate on the expression at the
2954   outer level; call `subst' to simplify recursively.  Return the new
2955   expression.
2956
2957   OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
2958   will be the iteration even if an expression with a code different from
2959   X is returned; IN_DEST is nonzero if we are inside a SET_DEST.  */
2960
2961static rtx
2962simplify_rtx (x, op0_mode, last, in_dest)
2963     rtx x;
2964     enum machine_mode op0_mode;
2965     int last;
2966     int in_dest;
2967{
2968  enum rtx_code code = GET_CODE (x);
2969  enum machine_mode mode = GET_MODE (x);
2970  rtx temp;
2971  int i;
2972
2973  /* If this is a commutative operation, put a constant last and a complex
2974     expression first.  We don't need to do this for comparisons here.  */
2975  if (GET_RTX_CLASS (code) == 'c'
2976      && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2977	  || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2978	      && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2979	  || (GET_CODE (XEXP (x, 0)) == SUBREG
2980	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2981	      && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2982    {
2983      temp = XEXP (x, 0);
2984      SUBST (XEXP (x, 0), XEXP (x, 1));
2985      SUBST (XEXP (x, 1), temp);
2986    }
2987
2988  /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2989     sign extension of a PLUS with a constant, reverse the order of the sign
2990     extension and the addition. Note that this not the same as the original
2991     code, but overflow is undefined for signed values.  Also note that the
2992     PLUS will have been partially moved "inside" the sign-extension, so that
2993     the first operand of X will really look like:
2994         (ashiftrt (plus (ashift A C4) C5) C4).
2995     We convert this to
2996         (plus (ashiftrt (ashift A C4) C2) C4)
2997     and replace the first operand of X with that expression.  Later parts
2998     of this function may simplify the expression further.
2999
3000     For example, if we start with (mult (sign_extend (plus A C1)) C2),
3001     we swap the SIGN_EXTEND and PLUS.  Later code will apply the
3002     distributive law to produce (plus (mult (sign_extend X) C1) C3).
3003
3004     We do this to simplify address expressions.  */
3005
3006  if ((code == PLUS || code == MINUS || code == MULT)
3007      && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3008      && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3009      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3010      && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3011      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3012      && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3013      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3014      && (temp = simplify_binary_operation (ASHIFTRT, mode,
3015					    XEXP (XEXP (XEXP (x, 0), 0), 1),
3016					    XEXP (XEXP (x, 0), 1))) != 0)
3017    {
3018      rtx new
3019	= simplify_shift_const (NULL_RTX, ASHIFT, mode,
3020				XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3021				INTVAL (XEXP (XEXP (x, 0), 1)));
3022
3023      new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3024				  INTVAL (XEXP (XEXP (x, 0), 1)));
3025
3026      SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3027    }
3028
3029  /* If this is a simple operation applied to an IF_THEN_ELSE, try
3030     applying it to the arms of the IF_THEN_ELSE.  This often simplifies
3031     things.  Check for cases where both arms are testing the same
3032     condition.
3033
3034     Don't do anything if all operands are very simple.  */
3035
3036  if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3037	|| GET_RTX_CLASS (code) == '<')
3038       && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3039	    && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3040		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3041		      == 'o')))
3042	   || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3043	       && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3044		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3045			 == 'o')))))
3046      || (GET_RTX_CLASS (code) == '1'
3047	  && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3048	       && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3049		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3050			 == 'o'))))))
3051    {
3052      rtx cond, true, false;
3053
3054      cond = if_then_else_cond (x, &true, &false);
3055      if (cond != 0)
3056	{
3057	  rtx cop1 = const0_rtx;
3058	  enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3059
3060	  if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3061	    return x;
3062
3063	  /* Simplify the alternative arms; this may collapse the true and
3064	     false arms to store-flag values.  */
3065	  true = subst (true, pc_rtx, pc_rtx, 0, 0);
3066	  false = subst (false, pc_rtx, pc_rtx, 0, 0);
3067
3068	  /* Restarting if we generate a store-flag expression will cause
3069	     us to loop.  Just drop through in this case.  */
3070
3071	  /* If the result values are STORE_FLAG_VALUE and zero, we can
3072	     just make the comparison operation.  */
3073	  if (true == const_true_rtx && false == const0_rtx)
3074	    x = gen_binary (cond_code, mode, cond, cop1);
3075	  else if (true == const0_rtx && false == const_true_rtx)
3076	    x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3077
3078	  /* Likewise, we can make the negate of a comparison operation
3079	     if the result values are - STORE_FLAG_VALUE and zero.  */
3080	  else if (GET_CODE (true) == CONST_INT
3081		   && INTVAL (true) == - STORE_FLAG_VALUE
3082		   && false == const0_rtx)
3083	    x = gen_unary (NEG, mode, mode,
3084			   gen_binary (cond_code, mode, cond, cop1));
3085	  else if (GET_CODE (false) == CONST_INT
3086		   && INTVAL (false) == - STORE_FLAG_VALUE
3087		   && true == const0_rtx)
3088	    x = gen_unary (NEG, mode, mode,
3089			   gen_binary (reverse_condition (cond_code),
3090				       mode, cond, cop1));
3091	  else
3092	    return gen_rtx (IF_THEN_ELSE, mode,
3093			    gen_binary (cond_code, VOIDmode, cond, cop1),
3094			    true, false);
3095
3096	  code = GET_CODE (x);
3097	  op0_mode = VOIDmode;
3098	}
3099    }
3100
3101  /* Try to fold this expression in case we have constants that weren't
3102     present before.  */
3103  temp = 0;
3104  switch (GET_RTX_CLASS (code))
3105    {
3106    case '1':
3107      temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3108      break;
3109    case '<':
3110      temp = simplify_relational_operation (code, op0_mode,
3111					    XEXP (x, 0), XEXP (x, 1));
3112#ifdef FLOAT_STORE_FLAG_VALUE
3113      if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3114	temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3115		: immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3116#endif
3117      break;
3118    case 'c':
3119    case '2':
3120      temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3121      break;
3122    case 'b':
3123    case '3':
3124      temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3125					 XEXP (x, 1), XEXP (x, 2));
3126      break;
3127    }
3128
3129  if (temp)
3130    x = temp, code = GET_CODE (temp);
3131
3132  /* First see if we can apply the inverse distributive law.  */
3133  if (code == PLUS || code == MINUS
3134      || code == AND || code == IOR || code == XOR)
3135    {
3136      x = apply_distributive_law (x);
3137      code = GET_CODE (x);
3138    }
3139
3140  /* If CODE is an associative operation not otherwise handled, see if we
3141     can associate some operands.  This can win if they are constants or
3142     if they are logically related (i.e. (a & b) & a.  */
3143  if ((code == PLUS || code == MINUS
3144       || code == MULT || code == AND || code == IOR || code == XOR
3145       || code == DIV || code == UDIV
3146       || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3147      && INTEGRAL_MODE_P (mode))
3148    {
3149      if (GET_CODE (XEXP (x, 0)) == code)
3150	{
3151	  rtx other = XEXP (XEXP (x, 0), 0);
3152	  rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3153	  rtx inner_op1 = XEXP (x, 1);
3154	  rtx inner;
3155
3156	  /* Make sure we pass the constant operand if any as the second
3157	     one if this is a commutative operation.  */
3158	  if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3159	    {
3160	      rtx tem = inner_op0;
3161	      inner_op0 = inner_op1;
3162	      inner_op1 = tem;
3163	    }
3164	  inner = simplify_binary_operation (code == MINUS ? PLUS
3165					     : code == DIV ? MULT
3166					     : code == UDIV ? MULT
3167					     : code,
3168					     mode, inner_op0, inner_op1);
3169
3170	  /* For commutative operations, try the other pair if that one
3171	     didn't simplify.  */
3172	  if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3173	    {
3174	      other = XEXP (XEXP (x, 0), 1);
3175	      inner = simplify_binary_operation (code, mode,
3176						 XEXP (XEXP (x, 0), 0),
3177						 XEXP (x, 1));
3178	    }
3179
3180	  if (inner)
3181	    return gen_binary (code, mode, other, inner);
3182	}
3183    }
3184
3185  /* A little bit of algebraic simplification here.  */
3186  switch (code)
3187    {
3188    case MEM:
3189      /* Ensure that our address has any ASHIFTs converted to MULT in case
3190	 address-recognizing predicates are called later.  */
3191      temp = make_compound_operation (XEXP (x, 0), MEM);
3192      SUBST (XEXP (x, 0), temp);
3193      break;
3194
3195    case SUBREG:
3196      /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3197	 is paradoxical.  If we can't do that safely, then it becomes
3198	 something nonsensical so that this combination won't take place.  */
3199
3200      if (GET_CODE (SUBREG_REG (x)) == MEM
3201	  && (GET_MODE_SIZE (mode)
3202	      <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3203	{
3204	  rtx inner = SUBREG_REG (x);
3205	  int endian_offset = 0;
3206	  /* Don't change the mode of the MEM
3207	     if that would change the meaning of the address.  */
3208	  if (MEM_VOLATILE_P (SUBREG_REG (x))
3209	      || mode_dependent_address_p (XEXP (inner, 0)))
3210	    return gen_rtx (CLOBBER, mode, const0_rtx);
3211
3212	  if (BYTES_BIG_ENDIAN)
3213	    {
3214	      if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3215		endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3216	      if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3217		endian_offset -= (UNITS_PER_WORD
3218				  - GET_MODE_SIZE (GET_MODE (inner)));
3219	    }
3220	  /* Note if the plus_constant doesn't make a valid address
3221	     then this combination won't be accepted.  */
3222	  x = gen_rtx (MEM, mode,
3223		       plus_constant (XEXP (inner, 0),
3224				      (SUBREG_WORD (x) * UNITS_PER_WORD
3225				       + endian_offset)));
3226	  MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3227	  RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3228	  MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3229	  return x;
3230	}
3231
3232      /* If we are in a SET_DEST, these other cases can't apply.  */
3233      if (in_dest)
3234	return x;
3235
3236      /* Changing mode twice with SUBREG => just change it once,
3237	 or not at all if changing back to starting mode.  */
3238      if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3239	{
3240	  if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3241	      && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3242	    return SUBREG_REG (SUBREG_REG (x));
3243
3244	  SUBST_INT (SUBREG_WORD (x),
3245		     SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3246	  SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3247	}
3248
3249      /* SUBREG of a hard register => just change the register number
3250	 and/or mode.  If the hard register is not valid in that mode,
3251	 suppress this combination.  If the hard register is the stack,
3252	 frame, or argument pointer, leave this as a SUBREG.  */
3253
3254      if (GET_CODE (SUBREG_REG (x)) == REG
3255	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3256	  && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
3257#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3258	  && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3259#endif
3260#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3261	  && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3262#endif
3263	  && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
3264	{
3265	  if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3266				  mode))
3267	    return gen_rtx (REG, mode,
3268			    REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3269	  else
3270	    return gen_rtx (CLOBBER, mode, const0_rtx);
3271	}
3272
3273      /* For a constant, try to pick up the part we want.  Handle a full
3274	 word and low-order part.  Only do this if we are narrowing
3275	 the constant; if it is being widened, we have no idea what
3276	 the extra bits will have been set to.  */
3277
3278      if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3279	  && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3280	  && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
3281	  && GET_MODE_CLASS (mode) == MODE_INT)
3282	{
3283	  temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
3284				  0, op0_mode);
3285	  if (temp)
3286	    return temp;
3287	}
3288
3289      /* If we want a subreg of a constant, at offset 0,
3290	 take the low bits.  On a little-endian machine, that's
3291	 always valid.  On a big-endian machine, it's valid
3292	 only if the constant's mode fits in one word.  */
3293      if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
3294	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode)
3295	  && (! WORDS_BIG_ENDIAN
3296	      || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
3297	return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3298
3299      /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3300	 since we are saying that the high bits don't matter.  */
3301      if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3302	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3303	return SUBREG_REG (x);
3304
3305      /* Note that we cannot do any narrowing for non-constants since
3306	 we might have been counting on using the fact that some bits were
3307	 zero.  We now do this in the SET.  */
3308
3309      break;
3310
3311    case NOT:
3312      /* (not (plus X -1)) can become (neg X).  */
3313      if (GET_CODE (XEXP (x, 0)) == PLUS
3314	  && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3315	return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
3316
3317      /* Similarly, (not (neg X)) is (plus X -1).  */
3318      if (GET_CODE (XEXP (x, 0)) == NEG)
3319	return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3320				constm1_rtx);
3321
3322      /* (not (xor X C)) for C constant is (xor X D) with D = ~ C.  */
3323      if (GET_CODE (XEXP (x, 0)) == XOR
3324	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3325	  && (temp = simplify_unary_operation (NOT, mode,
3326					       XEXP (XEXP (x, 0), 1),
3327					       mode)) != 0)
3328	return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
3329
3330      /* (not (ashift 1 X)) is (rotate ~1 X).  We used to do this for operands
3331	 other than 1, but that is not valid.  We could do a similar
3332	 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3333	 but this doesn't seem common enough to bother with.  */
3334      if (GET_CODE (XEXP (x, 0)) == ASHIFT
3335	  && XEXP (XEXP (x, 0), 0) == const1_rtx)
3336	return gen_rtx (ROTATE, mode, gen_unary (NOT, mode, mode, const1_rtx),
3337			XEXP (XEXP (x, 0), 1));
3338
3339      if (GET_CODE (XEXP (x, 0)) == SUBREG
3340	  && subreg_lowpart_p (XEXP (x, 0))
3341	  && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3342	      < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3343	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3344	  && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3345	{
3346	  enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3347
3348	  x = gen_rtx (ROTATE, inner_mode,
3349		       gen_unary (NOT, inner_mode, inner_mode, const1_rtx),
3350		       XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3351	  return gen_lowpart_for_combine (mode, x);
3352	}
3353
3354#if STORE_FLAG_VALUE == -1
3355      /* (not (comparison foo bar)) can be done by reversing the comparison
3356	 code if valid.  */
3357      if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3358	  && reversible_comparison_p (XEXP (x, 0)))
3359	return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3360				mode, XEXP (XEXP (x, 0), 0),
3361				XEXP (XEXP (x, 0), 1));
3362
3363      /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3364	 is (lt foo (const_int 0)), so we can perform the above
3365	 simplification.  */
3366
3367      if (XEXP (x, 1) == const1_rtx
3368	  && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3369	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3370	  && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3371	return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3372#endif
3373
3374      /* Apply De Morgan's laws to reduce number of patterns for machines
3375 	 with negating logical insns (and-not, nand, etc.).  If result has
3376 	 only one NOT, put it first, since that is how the patterns are
3377 	 coded.  */
3378
3379      if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3380 	{
3381 	 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3382
3383	 if (GET_CODE (in1) == NOT)
3384	   in1 = XEXP (in1, 0);
3385 	 else
3386	   in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3387
3388	 if (GET_CODE (in2) == NOT)
3389	   in2 = XEXP (in2, 0);
3390 	 else if (GET_CODE (in2) == CONST_INT
3391		  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3392	   in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
3393	 else
3394	   in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3395
3396	 if (GET_CODE (in2) == NOT)
3397	   {
3398	     rtx tem = in2;
3399	     in2 = in1; in1 = tem;
3400	   }
3401
3402	 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3403				 mode, in1, in2);
3404       }
3405      break;
3406
3407    case NEG:
3408      /* (neg (plus X 1)) can become (not X).  */
3409      if (GET_CODE (XEXP (x, 0)) == PLUS
3410	  && XEXP (XEXP (x, 0), 1) == const1_rtx)
3411	return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3412
3413      /* Similarly, (neg (not X)) is (plus X 1).  */
3414      if (GET_CODE (XEXP (x, 0)) == NOT)
3415	return plus_constant (XEXP (XEXP (x, 0), 0), 1);
3416
3417      /* (neg (minus X Y)) can become (minus Y X).  */
3418      if (GET_CODE (XEXP (x, 0)) == MINUS
3419	  && (! FLOAT_MODE_P (mode)
3420	      /* x-y != -(y-x) with IEEE floating point. */
3421	      || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3422	      || flag_fast_math))
3423	return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3424			   XEXP (XEXP (x, 0), 0));
3425
3426      /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3427      if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3428	  && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3429	return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3430
3431      /* NEG commutes with ASHIFT since it is multiplication.  Only do this
3432	 if we can then eliminate the NEG (e.g.,
3433	 if the operand is a constant).  */
3434
3435      if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3436	{
3437	  temp = simplify_unary_operation (NEG, mode,
3438					   XEXP (XEXP (x, 0), 0), mode);
3439	  if (temp)
3440	    {
3441	      SUBST (XEXP (XEXP (x, 0), 0), temp);
3442	      return XEXP (x, 0);
3443	    }
3444	}
3445
3446      temp = expand_compound_operation (XEXP (x, 0));
3447
3448      /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3449 	 replaced by (lshiftrt X C).  This will convert
3450	 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y).  */
3451
3452      if (GET_CODE (temp) == ASHIFTRT
3453	  && GET_CODE (XEXP (temp, 1)) == CONST_INT
3454	  && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3455	return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3456				     INTVAL (XEXP (temp, 1)));
3457
3458      /* If X has only a single bit that might be nonzero, say, bit I, convert
3459	 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3460	 MODE minus 1.  This will convert (neg (zero_extract X 1 Y)) to
3461	 (sign_extract X 1 Y).  But only do this if TEMP isn't a register
3462	 or a SUBREG of one since we'd be making the expression more
3463	 complex if it was just a register.  */
3464
3465      if (GET_CODE (temp) != REG
3466	  && ! (GET_CODE (temp) == SUBREG
3467		&& GET_CODE (SUBREG_REG (temp)) == REG)
3468	  && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
3469	{
3470	  rtx temp1 = simplify_shift_const
3471	    (NULL_RTX, ASHIFTRT, mode,
3472	     simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3473				   GET_MODE_BITSIZE (mode) - 1 - i),
3474	     GET_MODE_BITSIZE (mode) - 1 - i);
3475
3476	  /* If all we did was surround TEMP with the two shifts, we
3477	     haven't improved anything, so don't use it.  Otherwise,
3478	     we are better off with TEMP1.  */
3479	  if (GET_CODE (temp1) != ASHIFTRT
3480	      || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3481	      || XEXP (XEXP (temp1, 0), 0) != temp)
3482	    return temp1;
3483	}
3484      break;
3485
3486    case TRUNCATE:
3487      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3488	SUBST (XEXP (x, 0),
3489	       force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3490			      GET_MODE_MASK (mode), NULL_RTX, 0));
3491      break;
3492
3493    case FLOAT_TRUNCATE:
3494      /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF.  */
3495      if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3496	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3497 	return XEXP (XEXP (x, 0), 0);
3498
3499      /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3500	 (OP:SF foo:SF) if OP is NEG or ABS.  */
3501      if ((GET_CODE (XEXP (x, 0)) == ABS
3502	   || GET_CODE (XEXP (x, 0)) == NEG)
3503	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3504	  && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3505	return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3506			  XEXP (XEXP (XEXP (x, 0), 0), 0));
3507
3508      /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3509	 is (float_truncate:SF x).  */
3510      if (GET_CODE (XEXP (x, 0)) == SUBREG
3511	  && subreg_lowpart_p (XEXP (x, 0))
3512	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3513	return SUBREG_REG (XEXP (x, 0));
3514      break;
3515
3516#ifdef HAVE_cc0
3517    case COMPARE:
3518      /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3519	 using cc0, in which case we want to leave it as a COMPARE
3520	 so we can distinguish it from a register-register-copy.  */
3521      if (XEXP (x, 1) == const0_rtx)
3522	return XEXP (x, 0);
3523
3524      /* In IEEE floating point, x-0 is not the same as x.  */
3525      if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3526	   || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3527	   || flag_fast_math)
3528	  && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3529	return XEXP (x, 0);
3530      break;
3531#endif
3532
3533    case CONST:
3534      /* (const (const X)) can become (const X).  Do it this way rather than
3535	 returning the inner CONST since CONST can be shared with a
3536	 REG_EQUAL note.  */
3537      if (GET_CODE (XEXP (x, 0)) == CONST)
3538	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3539      break;
3540
3541#ifdef HAVE_lo_sum
3542    case LO_SUM:
3543      /* Convert (lo_sum (high FOO) FOO) to FOO.  This is necessary so we
3544	 can add in an offset.  find_split_point will split this address up
3545	 again if it doesn't match.  */
3546      if (GET_CODE (XEXP (x, 0)) == HIGH
3547	  && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3548	return XEXP (x, 1);
3549      break;
3550#endif
3551
3552    case PLUS:
3553      /* If we have (plus (plus (A const) B)), associate it so that CONST is
3554	 outermost.  That's because that's the way indexed addresses are
3555	 supposed to appear.  This code used to check many more cases, but
3556	 they are now checked elsewhere.  */
3557      if (GET_CODE (XEXP (x, 0)) == PLUS
3558	  && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3559	return gen_binary (PLUS, mode,
3560			   gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3561				       XEXP (x, 1)),
3562			   XEXP (XEXP (x, 0), 1));
3563
3564      /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3565	 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3566	 bit-field and can be replaced by either a sign_extend or a
3567	 sign_extract.  The `and' may be a zero_extend.  */
3568      if (GET_CODE (XEXP (x, 0)) == XOR
3569	  && GET_CODE (XEXP (x, 1)) == CONST_INT
3570	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3571	  && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3572	  && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3573	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3574	  && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3575	       && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3576	       && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
3577		   == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
3578	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3579		  && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3580		      == i + 1))))
3581	return simplify_shift_const
3582	  (NULL_RTX, ASHIFTRT, mode,
3583	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
3584				 XEXP (XEXP (XEXP (x, 0), 0), 0),
3585				 GET_MODE_BITSIZE (mode) - (i + 1)),
3586	   GET_MODE_BITSIZE (mode) - (i + 1));
3587
3588      /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3589	 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3590	 is 1.  This produces better code than the alternative immediately
3591	 below.  */
3592      if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3593	  && reversible_comparison_p (XEXP (x, 0))
3594	  && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3595	      || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
3596	return
3597	  gen_unary (NEG, mode, mode,
3598		     gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3599				 mode, XEXP (XEXP (x, 0), 0),
3600				 XEXP (XEXP (x, 0), 1)));
3601
3602      /* If only the low-order bit of X is possibly nonzero, (plus x -1)
3603	 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3604	 the bitsize of the mode - 1.  This allows simplification of
3605	 "a = (b & 8) == 0;"  */
3606      if (XEXP (x, 1) == constm1_rtx
3607	  && GET_CODE (XEXP (x, 0)) != REG
3608	  && ! (GET_CODE (XEXP (x,0)) == SUBREG
3609		&& GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3610	  && nonzero_bits (XEXP (x, 0), mode) == 1)
3611	return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
3612	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
3613				 gen_rtx_combine (XOR, mode,
3614						  XEXP (x, 0), const1_rtx),
3615				 GET_MODE_BITSIZE (mode) - 1),
3616	   GET_MODE_BITSIZE (mode) - 1);
3617
3618      /* If we are adding two things that have no bits in common, convert
3619	 the addition into an IOR.  This will often be further simplified,
3620	 for example in cases like ((a & 1) + (a & 2)), which can
3621	 become a & 3.  */
3622
3623      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3624	  && (nonzero_bits (XEXP (x, 0), mode)
3625	      & nonzero_bits (XEXP (x, 1), mode)) == 0)
3626	return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3627      break;
3628
3629    case MINUS:
3630#if STORE_FLAG_VALUE == 1
3631      /* (minus 1 (comparison foo bar)) can be done by reversing the comparison
3632	 code if valid.  */
3633      if (XEXP (x, 0) == const1_rtx
3634	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3635	  && reversible_comparison_p (XEXP (x, 1)))
3636	return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3637			   mode, XEXP (XEXP (x, 1), 0),
3638				XEXP (XEXP (x, 1), 1));
3639#endif
3640
3641      /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3642	 (and <foo> (const_int pow2-1))  */
3643      if (GET_CODE (XEXP (x, 1)) == AND
3644	  && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3645	  && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3646	  && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3647	return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3648				       - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3649
3650      /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
3651	 integers.  */
3652      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
3653	return gen_binary (MINUS, mode,
3654			   gen_binary (MINUS, mode, XEXP (x, 0),
3655				       XEXP (XEXP (x, 1), 0)),
3656			   XEXP (XEXP (x, 1), 1));
3657      break;
3658
3659    case MULT:
3660      /* If we have (mult (plus A B) C), apply the distributive law and then
3661	 the inverse distributive law to see if things simplify.  This
3662	 occurs mostly in addresses, often when unrolling loops.  */
3663
3664      if (GET_CODE (XEXP (x, 0)) == PLUS)
3665	{
3666	  x = apply_distributive_law
3667	    (gen_binary (PLUS, mode,
3668			 gen_binary (MULT, mode,
3669				     XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3670			 gen_binary (MULT, mode,
3671				     XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3672
3673	  if (GET_CODE (x) != MULT)
3674	    return x;
3675	}
3676      break;
3677
3678    case UDIV:
3679      /* If this is a divide by a power of two, treat it as a shift if
3680	 its first operand is a shift.  */
3681      if (GET_CODE (XEXP (x, 1)) == CONST_INT
3682	  && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3683	  && (GET_CODE (XEXP (x, 0)) == ASHIFT
3684	      || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3685	      || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3686	      || GET_CODE (XEXP (x, 0)) == ROTATE
3687	      || GET_CODE (XEXP (x, 0)) == ROTATERT))
3688	return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
3689      break;
3690
3691    case EQ:  case NE:
3692    case GT:  case GTU:  case GE:  case GEU:
3693    case LT:  case LTU:  case LE:  case LEU:
3694      /* If the first operand is a condition code, we can't do anything
3695	 with it.  */
3696      if (GET_CODE (XEXP (x, 0)) == COMPARE
3697	  || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3698#ifdef HAVE_cc0
3699	      && XEXP (x, 0) != cc0_rtx
3700#endif
3701	       ))
3702	{
3703	  rtx op0 = XEXP (x, 0);
3704	  rtx op1 = XEXP (x, 1);
3705	  enum rtx_code new_code;
3706
3707	  if (GET_CODE (op0) == COMPARE)
3708	    op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3709
3710	  /* Simplify our comparison, if possible.  */
3711	  new_code = simplify_comparison (code, &op0, &op1);
3712
3713#if STORE_FLAG_VALUE == 1
3714	  /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3715	     if only the low-order bit is possibly nonzero in X (such as when
3716	     X is a ZERO_EXTRACT of one bit).  Similarly, we can convert EQ to
3717	     (xor X 1) or (minus 1 X); we use the former.  Finally, if X is
3718	     known to be either 0 or -1, NE becomes a NEG and EQ becomes
3719	     (plus X 1).
3720
3721	     Remove any ZERO_EXTRACT we made when thinking this was a
3722	     comparison.  It may now be simpler to use, e.g., an AND.  If a
3723	     ZERO_EXTRACT is indeed appropriate, it will be placed back by
3724	     the call to make_compound_operation in the SET case.  */
3725
3726	  if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3727	      && op1 == const0_rtx
3728	      && nonzero_bits (op0, mode) == 1)
3729	    return gen_lowpart_for_combine (mode,
3730					    expand_compound_operation (op0));
3731
3732	  else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3733		   && op1 == const0_rtx
3734		   && (num_sign_bit_copies (op0, mode)
3735		       == GET_MODE_BITSIZE (mode)))
3736	    {
3737	      op0 = expand_compound_operation (op0);
3738	      return gen_unary (NEG, mode, mode,
3739				gen_lowpart_for_combine (mode, op0));
3740	    }
3741
3742	  else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3743		   && op1 == const0_rtx
3744		   && nonzero_bits (op0, mode) == 1)
3745	    {
3746	      op0 = expand_compound_operation (op0);
3747	      return gen_binary (XOR, mode,
3748				 gen_lowpart_for_combine (mode, op0),
3749				 const1_rtx);
3750	    }
3751
3752	  else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3753		   && op1 == const0_rtx
3754		   && (num_sign_bit_copies (op0, mode)
3755		       == GET_MODE_BITSIZE (mode)))
3756	    {
3757	      op0 = expand_compound_operation (op0);
3758	      return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
3759	    }
3760#endif
3761
3762#if STORE_FLAG_VALUE == -1
3763	  /* If STORE_FLAG_VALUE is -1, we have cases similar to
3764	     those above.  */
3765	  if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3766	      && op1 == const0_rtx
3767	      && (num_sign_bit_copies (op0, mode)
3768		  == GET_MODE_BITSIZE (mode)))
3769	    return gen_lowpart_for_combine (mode,
3770					    expand_compound_operation (op0));
3771
3772	  else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3773		   && op1 == const0_rtx
3774		   && nonzero_bits (op0, mode) == 1)
3775	    {
3776	      op0 = expand_compound_operation (op0);
3777	      return gen_unary (NEG, mode, mode,
3778				gen_lowpart_for_combine (mode, op0));
3779	    }
3780
3781	  else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3782		   && op1 == const0_rtx
3783		   && (num_sign_bit_copies (op0, mode)
3784		       == GET_MODE_BITSIZE (mode)))
3785	    {
3786	      op0 = expand_compound_operation (op0);
3787	      return gen_unary (NOT, mode, mode,
3788				gen_lowpart_for_combine (mode, op0));
3789	    }
3790
3791	  /* If X is 0/1, (eq X 0) is X-1.  */
3792	  else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3793		   && op1 == const0_rtx
3794		   && nonzero_bits (op0, mode) == 1)
3795	    {
3796	      op0 = expand_compound_operation (op0);
3797	      return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
3798	    }
3799#endif
3800
3801	  /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3802	     one bit that might be nonzero, we can convert (ne x 0) to
3803	     (ashift x c) where C puts the bit in the sign bit.  Remove any
3804	     AND with STORE_FLAG_VALUE when we are done, since we are only
3805	     going to test the sign bit.  */
3806	  if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3807	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3808	      && (STORE_FLAG_VALUE
3809		  == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3810	      && op1 == const0_rtx
3811	      && mode == GET_MODE (op0)
3812	      && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
3813	    {
3814	      x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3815					expand_compound_operation (op0),
3816					GET_MODE_BITSIZE (mode) - 1 - i);
3817	      if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3818		return XEXP (x, 0);
3819	      else
3820		return x;
3821	    }
3822
3823	  /* If the code changed, return a whole new comparison.  */
3824	  if (new_code != code)
3825	    return gen_rtx_combine (new_code, mode, op0, op1);
3826
3827	  /* Otherwise, keep this operation, but maybe change its operands.
3828	     This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR).  */
3829	  SUBST (XEXP (x, 0), op0);
3830	  SUBST (XEXP (x, 1), op1);
3831	}
3832      break;
3833
3834    case IF_THEN_ELSE:
3835      return simplify_if_then_else (x);
3836
3837    case ZERO_EXTRACT:
3838    case SIGN_EXTRACT:
3839    case ZERO_EXTEND:
3840    case SIGN_EXTEND:
3841      /* If we are processing SET_DEST, we are done. */
3842      if (in_dest)
3843	return x;
3844
3845      return expand_compound_operation (x);
3846
3847    case SET:
3848      return simplify_set (x);
3849
3850    case AND:
3851    case IOR:
3852    case XOR:
3853      return simplify_logical (x, last);
3854
3855    case ABS:
3856      /* (abs (neg <foo>)) -> (abs <foo>) */
3857      if (GET_CODE (XEXP (x, 0)) == NEG)
3858	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3859
3860      /* If operand is something known to be positive, ignore the ABS.  */
3861      if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
3862	  || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
3863	       <= HOST_BITS_PER_WIDE_INT)
3864	      && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3865		   & ((HOST_WIDE_INT) 1
3866		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
3867		  == 0)))
3868	return XEXP (x, 0);
3869
3870
3871      /* If operand is known to be only -1 or 0, convert ABS to NEG.  */
3872      if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
3873	return gen_rtx_combine (NEG, mode, XEXP (x, 0));
3874
3875      break;
3876
3877    case FFS:
3878      /* (ffs (*_extend <X>)) = (ffs <X>) */
3879      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3880	  || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3881	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3882      break;
3883
3884    case FLOAT:
3885      /* (float (sign_extend <X>)) = (float <X>).  */
3886      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
3887	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3888      break;
3889
3890    case ASHIFT:
3891    case LSHIFTRT:
3892    case ASHIFTRT:
3893    case ROTATE:
3894    case ROTATERT:
3895      /* If this is a shift by a constant amount, simplify it.  */
3896      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3897	return simplify_shift_const (x, code, mode, XEXP (x, 0),
3898				     INTVAL (XEXP (x, 1)));
3899
3900#ifdef SHIFT_COUNT_TRUNCATED
3901      else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
3902	SUBST (XEXP (x, 1),
3903	       force_to_mode (XEXP (x, 1), GET_MODE (x),
3904			      ((HOST_WIDE_INT) 1
3905			       << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
3906			      - 1,
3907			      NULL_RTX, 0));
3908#endif
3909
3910      break;
3911    }
3912
3913  return x;
3914}
3915
3916/* Simplify X, an IF_THEN_ELSE expression.  Return the new expression.  */
3917
3918static rtx
3919simplify_if_then_else (x)
3920     rtx x;
3921{
3922  enum machine_mode mode = GET_MODE (x);
3923  rtx cond = XEXP (x, 0);
3924  rtx true = XEXP (x, 1);
3925  rtx false = XEXP (x, 2);
3926  enum rtx_code true_code = GET_CODE (cond);
3927  int comparison_p = GET_RTX_CLASS (true_code) == '<';
3928  rtx temp;
3929  int i;
3930
3931  /* Simplify storing of the truth value. */
3932  if (comparison_p && true == const_true_rtx && false == const0_rtx)
3933    return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
3934
3935  /* Also when the truth value has to be reversed. */
3936  if (comparison_p && reversible_comparison_p (cond)
3937      && true == const0_rtx && false == const_true_rtx)
3938    return gen_binary (reverse_condition (true_code),
3939		       mode, XEXP (cond, 0), XEXP (cond, 1));
3940
3941  /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
3942     in it is being compared against certain values.  Get the true and false
3943     comparisons and see if that says anything about the value of each arm.  */
3944
3945  if (comparison_p && reversible_comparison_p (cond)
3946      && GET_CODE (XEXP (cond, 0)) == REG)
3947    {
3948      HOST_WIDE_INT nzb;
3949      rtx from = XEXP (cond, 0);
3950      enum rtx_code false_code = reverse_condition (true_code);
3951      rtx true_val = XEXP (cond, 1);
3952      rtx false_val = true_val;
3953      int swapped = 0;
3954
3955      /* If FALSE_CODE is EQ, swap the codes and arms.  */
3956
3957      if (false_code == EQ)
3958	{
3959	  swapped = 1, true_code = EQ, false_code = NE;
3960	  temp = true, true = false, false = temp;
3961	}
3962
3963      /* If we are comparing against zero and the expression being tested has
3964	 only a single bit that might be nonzero, that is its value when it is
3965	 not equal to zero.  Similarly if it is known to be -1 or 0.  */
3966
3967      if (true_code == EQ && true_val == const0_rtx
3968	  && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
3969	false_code = EQ, false_val = GEN_INT (nzb);
3970      else if (true_code == EQ && true_val == const0_rtx
3971	       && (num_sign_bit_copies (from, GET_MODE (from))
3972		   == GET_MODE_BITSIZE (GET_MODE (from))))
3973	false_code = EQ, false_val = constm1_rtx;
3974
3975      /* Now simplify an arm if we know the value of the register in the
3976	 branch and it is used in the arm.  Be careful due to the potential
3977	 of locally-shared RTL.  */
3978
3979      if (reg_mentioned_p (from, true))
3980	true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
3981		      pc_rtx, pc_rtx, 0, 0);
3982      if (reg_mentioned_p (from, false))
3983	false = subst (known_cond (copy_rtx (false), false_code,
3984				   from, false_val),
3985		       pc_rtx, pc_rtx, 0, 0);
3986
3987      SUBST (XEXP (x, 1), swapped ? false : true);
3988      SUBST (XEXP (x, 2), swapped ? true : false);
3989
3990      true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
3991    }
3992
3993  /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3994     reversed, do so to avoid needing two sets of patterns for
3995     subtract-and-branch insns.  Similarly if we have a constant in the true
3996     arm, the false arm is the same as the first operand of the comparison, or
3997     the false arm is more complicated than the true arm.  */
3998
3999  if (comparison_p && reversible_comparison_p (cond)
4000      && (true == pc_rtx
4001	  || (CONSTANT_P (true)
4002	      && GET_CODE (false) != CONST_INT && false != pc_rtx)
4003	  || true == const0_rtx
4004	  || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4005	      && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4006	  || (GET_CODE (true) == SUBREG
4007	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4008	      && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4009	  || reg_mentioned_p (true, false)
4010	  || rtx_equal_p (false, XEXP (cond, 0))))
4011    {
4012      true_code = reverse_condition (true_code);
4013      SUBST (XEXP (x, 0),
4014	     gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4015			 XEXP (cond, 1)));
4016
4017      SUBST (XEXP (x, 1), false);
4018      SUBST (XEXP (x, 2), true);
4019
4020      temp = true, true = false, false = temp, cond = XEXP (x, 0);
4021    }
4022
4023  /* If the two arms are identical, we don't need the comparison.  */
4024
4025  if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4026    return true;
4027
4028  /* Look for cases where we have (abs x) or (neg (abs X)).  */
4029
4030  if (GET_MODE_CLASS (mode) == MODE_INT
4031      && GET_CODE (false) == NEG
4032      && rtx_equal_p (true, XEXP (false, 0))
4033      && comparison_p
4034      && rtx_equal_p (true, XEXP (cond, 0))
4035      && ! side_effects_p (true))
4036    switch (true_code)
4037      {
4038      case GT:
4039      case GE:
4040	return gen_unary (ABS, mode, mode, true);
4041      case LT:
4042      case LE:
4043	return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
4044      }
4045
4046  /* Look for MIN or MAX.  */
4047
4048  if ((! FLOAT_MODE_P (mode) || flag_fast_math)
4049      && comparison_p
4050      && rtx_equal_p (XEXP (cond, 0), true)
4051      && rtx_equal_p (XEXP (cond, 1), false)
4052      && ! side_effects_p (cond))
4053    switch (true_code)
4054      {
4055      case GE:
4056      case GT:
4057	return gen_binary (SMAX, mode, true, false);
4058      case LE:
4059      case LT:
4060	return gen_binary (SMIN, mode, true, false);
4061      case GEU:
4062      case GTU:
4063	return gen_binary (UMAX, mode, true, false);
4064      case LEU:
4065      case LTU:
4066	return gen_binary (UMIN, mode, true, false);
4067      }
4068
4069#if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
4070
4071  /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4072     second operand is zero, this can be done as (OP Z (mult COND C2)) where
4073     C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4074     SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4075     We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4076     neither of the above, but it isn't worth checking for.  */
4077
4078  if (comparison_p && mode != VOIDmode && ! side_effects_p (x))
4079    {
4080      rtx t = make_compound_operation (true, SET);
4081      rtx f = make_compound_operation (false, SET);
4082      rtx cond_op0 = XEXP (cond, 0);
4083      rtx cond_op1 = XEXP (cond, 1);
4084      enum rtx_code op, extend_op = NIL;
4085      enum machine_mode m = mode;
4086      rtx z = 0, c1;
4087
4088      if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4089	   || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4090	   || GET_CODE (t) == ASHIFT
4091	   || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4092	  && rtx_equal_p (XEXP (t, 0), f))
4093	c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4094
4095      /* If an identity-zero op is commutative, check whether there
4096	 would be a match if we swapped the operands. */
4097      else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4098		|| GET_CODE (t) == XOR)
4099	       && rtx_equal_p (XEXP (t, 1), f))
4100	c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4101      else if (GET_CODE (t) == SIGN_EXTEND
4102	       && (GET_CODE (XEXP (t, 0)) == PLUS
4103		   || GET_CODE (XEXP (t, 0)) == MINUS
4104		   || GET_CODE (XEXP (t, 0)) == IOR
4105		   || GET_CODE (XEXP (t, 0)) == XOR
4106		   || GET_CODE (XEXP (t, 0)) == ASHIFT
4107		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4108		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4109	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4110	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4111	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4112	       && (num_sign_bit_copies (f, GET_MODE (f))
4113		   > (GET_MODE_BITSIZE (mode)
4114		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4115	{
4116	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4117	  extend_op = SIGN_EXTEND;
4118	  m = GET_MODE (XEXP (t, 0));
4119	}
4120      else if (GET_CODE (t) == SIGN_EXTEND
4121	       && (GET_CODE (XEXP (t, 0)) == PLUS
4122		   || GET_CODE (XEXP (t, 0)) == IOR
4123		   || GET_CODE (XEXP (t, 0)) == XOR)
4124	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4125	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4126	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4127	       && (num_sign_bit_copies (f, GET_MODE (f))
4128		   > (GET_MODE_BITSIZE (mode)
4129		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4130	{
4131	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4132	  extend_op = SIGN_EXTEND;
4133	  m = GET_MODE (XEXP (t, 0));
4134	}
4135      else if (GET_CODE (t) == ZERO_EXTEND
4136	       && (GET_CODE (XEXP (t, 0)) == PLUS
4137		   || GET_CODE (XEXP (t, 0)) == MINUS
4138		   || GET_CODE (XEXP (t, 0)) == IOR
4139		   || GET_CODE (XEXP (t, 0)) == XOR
4140		   || GET_CODE (XEXP (t, 0)) == ASHIFT
4141		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4142		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4143	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4144	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4145	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4146	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4147	       && ((nonzero_bits (f, GET_MODE (f))
4148		    & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4149		   == 0))
4150	{
4151	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4152	  extend_op = ZERO_EXTEND;
4153	  m = GET_MODE (XEXP (t, 0));
4154	}
4155      else if (GET_CODE (t) == ZERO_EXTEND
4156	       && (GET_CODE (XEXP (t, 0)) == PLUS
4157		   || GET_CODE (XEXP (t, 0)) == IOR
4158		   || GET_CODE (XEXP (t, 0)) == XOR)
4159	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4160	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4161	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4162	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4163	       && ((nonzero_bits (f, GET_MODE (f))
4164		    & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4165		   == 0))
4166	{
4167	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4168	  extend_op = ZERO_EXTEND;
4169	  m = GET_MODE (XEXP (t, 0));
4170	}
4171
4172      if (z)
4173	{
4174	  temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4175			pc_rtx, pc_rtx, 0, 0);
4176	  temp = gen_binary (MULT, m, temp,
4177			     gen_binary (MULT, m, c1, const_true_rtx));
4178	  temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4179	  temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4180
4181	  if (extend_op != NIL)
4182	    temp = gen_unary (extend_op, mode, m, temp);
4183
4184	  return temp;
4185	}
4186    }
4187#endif
4188
4189  /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4190     1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4191     negation of a single bit, we can convert this operation to a shift.  We
4192     can actually do this more generally, but it doesn't seem worth it.  */
4193
4194  if (true_code == NE && XEXP (cond, 1) == const0_rtx
4195      && false == const0_rtx && GET_CODE (true) == CONST_INT
4196      && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4197	   && (i = exact_log2 (INTVAL (true))) >= 0)
4198	  || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4199	       == GET_MODE_BITSIZE (mode))
4200	      && (i = exact_log2 (- INTVAL (true))) >= 0)))
4201    return
4202      simplify_shift_const (NULL_RTX, ASHIFT, mode,
4203			    gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
4204
4205  return x;
4206}
4207
4208/* Simplify X, a SET expression.  Return the new expression.  */
4209
4210static rtx
4211simplify_set (x)
4212     rtx x;
4213{
4214  rtx src = SET_SRC (x);
4215  rtx dest = SET_DEST (x);
4216  enum machine_mode mode
4217    = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4218  rtx other_insn;
4219  rtx *cc_use;
4220
4221  /* (set (pc) (return)) gets written as (return).  */
4222  if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4223    return src;
4224
4225  /* Now that we know for sure which bits of SRC we are using, see if we can
4226     simplify the expression for the object knowing that we only need the
4227     low-order bits.  */
4228
4229  if (GET_MODE_CLASS (mode) == MODE_INT)
4230    src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4231
4232  /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4233     the comparison result and try to simplify it unless we already have used
4234     undobuf.other_insn.  */
4235  if ((GET_CODE (src) == COMPARE
4236#ifdef HAVE_cc0
4237       || dest == cc0_rtx
4238#endif
4239       )
4240      && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4241      && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4242      && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4243      && rtx_equal_p (XEXP (*cc_use, 0), dest))
4244    {
4245      enum rtx_code old_code = GET_CODE (*cc_use);
4246      enum rtx_code new_code;
4247      rtx op0, op1;
4248      int other_changed = 0;
4249      enum machine_mode compare_mode = GET_MODE (dest);
4250
4251      if (GET_CODE (src) == COMPARE)
4252	op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4253      else
4254	op0 = src, op1 = const0_rtx;
4255
4256      /* Simplify our comparison, if possible.  */
4257      new_code = simplify_comparison (old_code, &op0, &op1);
4258
4259#ifdef EXTRA_CC_MODES
4260      /* If this machine has CC modes other than CCmode, check to see if we
4261	 need to use a different CC mode here.  */
4262      compare_mode = SELECT_CC_MODE (new_code, op0, op1);
4263#endif /* EXTRA_CC_MODES */
4264
4265#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
4266      /* If the mode changed, we have to change SET_DEST, the mode in the
4267	 compare, and the mode in the place SET_DEST is used.  If SET_DEST is
4268	 a hard register, just build new versions with the proper mode.  If it
4269	 is a pseudo, we lose unless it is only time we set the pseudo, in
4270	 which case we can safely change its mode.  */
4271      if (compare_mode != GET_MODE (dest))
4272	{
4273	  int regno = REGNO (dest);
4274	  rtx new_dest = gen_rtx (REG, compare_mode, regno);
4275
4276	  if (regno < FIRST_PSEUDO_REGISTER
4277	      || (reg_n_sets[regno] == 1 && ! REG_USERVAR_P (dest)))
4278	    {
4279	      if (regno >= FIRST_PSEUDO_REGISTER)
4280		SUBST (regno_reg_rtx[regno], new_dest);
4281
4282	      SUBST (SET_DEST (x), new_dest);
4283	      SUBST (XEXP (*cc_use, 0), new_dest);
4284	      other_changed = 1;
4285
4286	      dest = new_dest;
4287	    }
4288	}
4289#endif
4290
4291      /* If the code changed, we have to build a new comparison in
4292	 undobuf.other_insn.  */
4293      if (new_code != old_code)
4294	{
4295	  unsigned HOST_WIDE_INT mask;
4296
4297	  SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4298					   dest, const0_rtx));
4299
4300	  /* If the only change we made was to change an EQ into an NE or
4301	     vice versa, OP0 has only one bit that might be nonzero, and OP1
4302	     is zero, check if changing the user of the condition code will
4303	     produce a valid insn.  If it won't, we can keep the original code
4304	     in that insn by surrounding our operation with an XOR.  */
4305
4306	  if (((old_code == NE && new_code == EQ)
4307	       || (old_code == EQ && new_code == NE))
4308	      && ! other_changed && op1 == const0_rtx
4309	      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4310	      && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
4311	    {
4312	      rtx pat = PATTERN (other_insn), note = 0;
4313	      int scratches;
4314
4315	      if ((recog_for_combine (&pat, other_insn, &note, &scratches) < 0
4316		   && ! check_asm_operands (pat)))
4317		{
4318		  PUT_CODE (*cc_use, old_code);
4319		  other_insn = 0;
4320
4321		  op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
4322		}
4323	    }
4324
4325	  other_changed = 1;
4326	}
4327
4328      if (other_changed)
4329	undobuf.other_insn = other_insn;
4330
4331#ifdef HAVE_cc0
4332      /* If we are now comparing against zero, change our source if
4333	 needed.  If we do not use cc0, we always have a COMPARE.  */
4334      if (op1 == const0_rtx && dest == cc0_rtx)
4335	{
4336	  SUBST (SET_SRC (x), op0);
4337	  src = op0;
4338	}
4339      else
4340#endif
4341
4342      /* Otherwise, if we didn't previously have a COMPARE in the
4343	 correct mode, we need one.  */
4344      if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4345	{
4346	  SUBST (SET_SRC (x),
4347		 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4348	  src = SET_SRC (x);
4349	}
4350      else
4351	{
4352	  /* Otherwise, update the COMPARE if needed.  */
4353	  SUBST (XEXP (src, 0), op0);
4354	  SUBST (XEXP (src, 1), op1);
4355	}
4356    }
4357  else
4358    {
4359      /* Get SET_SRC in a form where we have placed back any
4360	 compound expressions.  Then do the checks below.  */
4361      src = make_compound_operation (src, SET);
4362      SUBST (SET_SRC (x), src);
4363    }
4364
4365  /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4366     and X being a REG or (subreg (reg)), we may be able to convert this to
4367     (set (subreg:m2 x) (op)).
4368
4369     We can always do this if M1 is narrower than M2 because that means that
4370     we only care about the low bits of the result.
4371
4372     However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
4373     perform a narrower operation that requested since the high-order bits will
4374     be undefined.  On machine where it is defined, this transformation is safe
4375     as long as M1 and M2 have the same number of words.  */
4376
4377  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4378      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4379      && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4380	   / UNITS_PER_WORD)
4381	  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4382	       + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
4383#ifndef WORD_REGISTER_OPERATIONS
4384      && (GET_MODE_SIZE (GET_MODE (src))
4385	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4386#endif
4387#ifdef CLASS_CANNOT_CHANGE_SIZE
4388      && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
4389	    && (TEST_HARD_REG_BIT
4390		(reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
4391		 REGNO (dest)))
4392	    && (GET_MODE_SIZE (GET_MODE (src))
4393		!= GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4394#endif
4395      && (GET_CODE (dest) == REG
4396	  || (GET_CODE (dest) == SUBREG
4397	      && GET_CODE (SUBREG_REG (dest)) == REG)))
4398    {
4399      SUBST (SET_DEST (x),
4400	     gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4401				      dest));
4402      SUBST (SET_SRC (x), SUBREG_REG (src));
4403
4404      src = SET_SRC (x), dest = SET_DEST (x);
4405    }
4406
4407#ifdef LOAD_EXTEND_OP
4408  /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4409     would require a paradoxical subreg.  Replace the subreg with a
4410     zero_extend to avoid the reload that would otherwise be required. */
4411
4412  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4413      && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4414      && SUBREG_WORD (src) == 0
4415      && (GET_MODE_SIZE (GET_MODE (src))
4416	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4417      && GET_CODE (SUBREG_REG (src)) == MEM)
4418    {
4419      SUBST (SET_SRC (x),
4420	     gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4421			      GET_MODE (src), XEXP (src, 0)));
4422
4423      src = SET_SRC (x);
4424    }
4425#endif
4426
4427  /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4428     are comparing an item known to be 0 or -1 against 0, use a logical
4429     operation instead. Check for one of the arms being an IOR of the other
4430     arm with some value.  We compute three terms to be IOR'ed together.  In
4431     practice, at most two will be nonzero.  Then we do the IOR's.  */
4432
4433  if (GET_CODE (dest) != PC
4434      && GET_CODE (src) == IF_THEN_ELSE
4435      && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
4436      && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4437      && XEXP (XEXP (src, 0), 1) == const0_rtx
4438      && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
4439#ifdef HAVE_conditional_move
4440      && ! can_conditionally_move_p (GET_MODE (src))
4441#endif
4442      && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4443			       GET_MODE (XEXP (XEXP (src, 0), 0)))
4444	  == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4445      && ! side_effects_p (src))
4446    {
4447      rtx true = (GET_CODE (XEXP (src, 0)) == NE
4448		      ? XEXP (src, 1) : XEXP (src, 2));
4449      rtx false = (GET_CODE (XEXP (src, 0)) == NE
4450		   ? XEXP (src, 2) : XEXP (src, 1));
4451      rtx term1 = const0_rtx, term2, term3;
4452
4453      if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4454	term1 = false, true = XEXP (true, 1), false = const0_rtx;
4455      else if (GET_CODE (true) == IOR
4456	       && rtx_equal_p (XEXP (true, 1), false))
4457	term1 = false, true = XEXP (true, 0), false = const0_rtx;
4458      else if (GET_CODE (false) == IOR
4459	       && rtx_equal_p (XEXP (false, 0), true))
4460	term1 = true, false = XEXP (false, 1), true = const0_rtx;
4461      else if (GET_CODE (false) == IOR
4462	       && rtx_equal_p (XEXP (false, 1), true))
4463	term1 = true, false = XEXP (false, 0), true = const0_rtx;
4464
4465      term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4466      term3 = gen_binary (AND, GET_MODE (src),
4467			  gen_unary (NOT, GET_MODE (src), GET_MODE (src),
4468				     XEXP (XEXP (src, 0), 0)),
4469			  false);
4470
4471      SUBST (SET_SRC (x),
4472	     gen_binary (IOR, GET_MODE (src),
4473			 gen_binary (IOR, GET_MODE (src), term1, term2),
4474			 term3));
4475
4476      src = SET_SRC (x);
4477    }
4478
4479  /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4480     whole thing fail.  */
4481  if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4482    return src;
4483  else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4484    return dest;
4485  else
4486    /* Convert this into a field assignment operation, if possible.  */
4487    return make_field_assignment (x);
4488}
4489
4490/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4491   result.  LAST is nonzero if this is the last retry.  */
4492
4493static rtx
4494simplify_logical (x, last)
4495     rtx x;
4496     int last;
4497{
4498  enum machine_mode mode = GET_MODE (x);
4499  rtx op0 = XEXP (x, 0);
4500  rtx op1 = XEXP (x, 1);
4501
4502  switch (GET_CODE (x))
4503    {
4504    case AND:
4505      /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4506	 insn (and may simplify more).  */
4507      if (GET_CODE (op0) == XOR
4508	  && rtx_equal_p (XEXP (op0, 0), op1)
4509	  && ! side_effects_p (op1))
4510	x = gen_binary (AND, mode,
4511			gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
4512
4513      if (GET_CODE (op0) == XOR
4514	  && rtx_equal_p (XEXP (op0, 1), op1)
4515	  && ! side_effects_p (op1))
4516	x = gen_binary (AND, mode,
4517			gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
4518
4519      /* Similarly for (~ (A ^ B)) & A.  */
4520      if (GET_CODE (op0) == NOT
4521	  && GET_CODE (XEXP (op0, 0)) == XOR
4522	  && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4523	  && ! side_effects_p (op1))
4524	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4525
4526      if (GET_CODE (op0) == NOT
4527	  && GET_CODE (XEXP (op0, 0)) == XOR
4528	  && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
4529	  && ! side_effects_p (op1))
4530	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4531
4532      if (GET_CODE (op1) == CONST_INT)
4533	{
4534	  x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
4535
4536	  /* If we have (ior (and (X C1) C2)) and the next restart would be
4537	     the last, simplify this by making C1 as small as possible
4538	     and then exit. */
4539	  if (last
4540	      && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4541	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
4542	      && GET_CODE (op1) == CONST_INT)
4543	    return gen_binary (IOR, mode,
4544			       gen_binary (AND, mode, XEXP (op0, 0),
4545					   GEN_INT (INTVAL (XEXP (op0, 1))
4546						    & ~ INTVAL (op1))), op1);
4547
4548	  if (GET_CODE (x) != AND)
4549	    return x;
4550
4551	  if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
4552	      || GET_RTX_CLASS (GET_CODE (x)) == '2')
4553	    op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4554	}
4555
4556      /* Convert (A | B) & A to A.  */
4557      if (GET_CODE (op0) == IOR
4558	  && (rtx_equal_p (XEXP (op0, 0), op1)
4559	      || rtx_equal_p (XEXP (op0, 1), op1))
4560	  && ! side_effects_p (XEXP (op0, 0))
4561	  && ! side_effects_p (XEXP (op0, 1)))
4562	return op1;
4563
4564      /* In the following group of tests (and those in case IOR below),
4565	 we start with some combination of logical operations and apply
4566	 the distributive law followed by the inverse distributive law.
4567	 Most of the time, this results in no change.  However, if some of
4568	 the operands are the same or inverses of each other, simplifications
4569	 will result.
4570
4571	 For example, (and (ior A B) (not B)) can occur as the result of
4572	 expanding a bit field assignment.  When we apply the distributive
4573	 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4574	 which then simplifies to (and (A (not B))).
4575
4576	 If we have (and (ior A B) C), apply the distributive law and then
4577	 the inverse distributive law to see if things simplify.  */
4578
4579      if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
4580	{
4581	  x = apply_distributive_law
4582	    (gen_binary (GET_CODE (op0), mode,
4583			 gen_binary (AND, mode, XEXP (op0, 0), op1),
4584			 gen_binary (AND, mode, XEXP (op0, 1), op1)));
4585	  if (GET_CODE (x) != AND)
4586	    return x;
4587	}
4588
4589      if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
4590	return apply_distributive_law
4591	  (gen_binary (GET_CODE (op1), mode,
4592		       gen_binary (AND, mode, XEXP (op1, 0), op0),
4593		       gen_binary (AND, mode, XEXP (op1, 1), op0)));
4594
4595      /* Similarly, taking advantage of the fact that
4596	 (and (not A) (xor B C)) == (xor (ior A B) (ior A C))  */
4597
4598      if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
4599	return apply_distributive_law
4600	  (gen_binary (XOR, mode,
4601		       gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
4602		       gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
4603
4604      else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
4605	return apply_distributive_law
4606	  (gen_binary (XOR, mode,
4607		       gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
4608		       gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
4609      break;
4610
4611    case IOR:
4612      /* (ior A C) is C if all bits of A that might be nonzero are on in C.  */
4613      if (GET_CODE (op1) == CONST_INT
4614	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4615	  && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
4616	return op1;
4617
4618      /* Convert (A & B) | A to A.  */
4619      if (GET_CODE (op0) == AND
4620	  && (rtx_equal_p (XEXP (op0, 0), op1)
4621	      || rtx_equal_p (XEXP (op0, 1), op1))
4622	  && ! side_effects_p (XEXP (op0, 0))
4623	  && ! side_effects_p (XEXP (op0, 1)))
4624	return op1;
4625
4626      /* If we have (ior (and A B) C), apply the distributive law and then
4627	 the inverse distributive law to see if things simplify.  */
4628
4629      if (GET_CODE (op0) == AND)
4630	{
4631	  x = apply_distributive_law
4632	    (gen_binary (AND, mode,
4633			 gen_binary (IOR, mode, XEXP (op0, 0), op1),
4634			 gen_binary (IOR, mode, XEXP (op0, 1), op1)));
4635
4636	  if (GET_CODE (x) != IOR)
4637	    return x;
4638	}
4639
4640      if (GET_CODE (op1) == AND)
4641	{
4642	  x = apply_distributive_law
4643	    (gen_binary (AND, mode,
4644			 gen_binary (IOR, mode, XEXP (op1, 0), op0),
4645			 gen_binary (IOR, mode, XEXP (op1, 1), op0)));
4646
4647	  if (GET_CODE (x) != IOR)
4648	    return x;
4649	}
4650
4651      /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4652	 mode size to (rotate A CX).  */
4653
4654      if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
4655	   || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
4656	  && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
4657	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
4658	  && GET_CODE (XEXP (op1, 1)) == CONST_INT
4659	  && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
4660	      == GET_MODE_BITSIZE (mode)))
4661	return gen_rtx (ROTATE, mode, XEXP (op0, 0),
4662			(GET_CODE (op0) == ASHIFT
4663			 ? XEXP (op0, 1) : XEXP (op1, 1)));
4664
4665      /* If OP0 is (ashiftrt (plus ...) C), it might actually be
4666	 a (sign_extend (plus ...)).  If so, OP1 is a CONST_INT, and the PLUS
4667	 does not affect any of the bits in OP1, it can really be done
4668	 as a PLUS and we can associate.  We do this by seeing if OP1
4669	 can be safely shifted left C bits.  */
4670      if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
4671	  && GET_CODE (XEXP (op0, 0)) == PLUS
4672	  && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
4673	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
4674	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
4675	{
4676	  int count = INTVAL (XEXP (op0, 1));
4677	  HOST_WIDE_INT mask = INTVAL (op1) << count;
4678
4679	  if (mask >> count == INTVAL (op1)
4680	      && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
4681	    {
4682	      SUBST (XEXP (XEXP (op0, 0), 1),
4683		     GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
4684	      return op0;
4685	    }
4686	}
4687      break;
4688
4689    case XOR:
4690      /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4691	 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4692	 (NOT y).  */
4693      {
4694	int num_negated = 0;
4695
4696	if (GET_CODE (op0) == NOT)
4697	  num_negated++, op0 = XEXP (op0, 0);
4698	if (GET_CODE (op1) == NOT)
4699	  num_negated++, op1 = XEXP (op1, 0);
4700
4701	if (num_negated == 2)
4702	  {
4703	    SUBST (XEXP (x, 0), op0);
4704	    SUBST (XEXP (x, 1), op1);
4705	  }
4706	else if (num_negated == 1)
4707	  return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
4708      }
4709
4710      /* Convert (xor (and A B) B) to (and (not A) B).  The latter may
4711	 correspond to a machine insn or result in further simplifications
4712	 if B is a constant.  */
4713
4714      if (GET_CODE (op0) == AND
4715	  && rtx_equal_p (XEXP (op0, 1), op1)
4716	  && ! side_effects_p (op1))
4717	return gen_binary (AND, mode,
4718			   gen_unary (NOT, mode, mode, XEXP (op0, 0)),
4719			   op1);
4720
4721      else if (GET_CODE (op0) == AND
4722	       && rtx_equal_p (XEXP (op0, 0), op1)
4723	       && ! side_effects_p (op1))
4724	return gen_binary (AND, mode,
4725			   gen_unary (NOT, mode, mode, XEXP (op0, 1)),
4726			   op1);
4727
4728#if STORE_FLAG_VALUE == 1
4729      /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4730	 comparison.  */
4731      if (op1 == const1_rtx
4732	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4733	  && reversible_comparison_p (op0))
4734	return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4735				mode, XEXP (op0, 0), XEXP (op0, 1));
4736
4737      /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4738	 is (lt foo (const_int 0)), so we can perform the above
4739	 simplification.  */
4740
4741      if (op1 == const1_rtx
4742	  && GET_CODE (op0) == LSHIFTRT
4743	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
4744	  && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
4745	return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
4746#endif
4747
4748      /* (xor (comparison foo bar) (const_int sign-bit))
4749	 when STORE_FLAG_VALUE is the sign bit.  */
4750      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4751	  && (STORE_FLAG_VALUE
4752	      == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4753	  && op1 == const_true_rtx
4754	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4755	  && reversible_comparison_p (op0))
4756	return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4757				mode, XEXP (op0, 0), XEXP (op0, 1));
4758      break;
4759    }
4760
4761  return x;
4762}
4763
4764/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4765   operations" because they can be replaced with two more basic operations.
4766   ZERO_EXTEND is also considered "compound" because it can be replaced with
4767   an AND operation, which is simpler, though only one operation.
4768
4769   The function expand_compound_operation is called with an rtx expression
4770   and will convert it to the appropriate shifts and AND operations,
4771   simplifying at each stage.
4772
4773   The function make_compound_operation is called to convert an expression
4774   consisting of shifts and ANDs into the equivalent compound expression.
4775   It is the inverse of this function, loosely speaking.  */
4776
4777static rtx
4778expand_compound_operation (x)
4779     rtx x;
4780{
4781  int pos = 0, len;
4782  int unsignedp = 0;
4783  int modewidth;
4784  rtx tem;
4785
4786  switch (GET_CODE (x))
4787    {
4788    case ZERO_EXTEND:
4789      unsignedp = 1;
4790    case SIGN_EXTEND:
4791      /* We can't necessarily use a const_int for a multiword mode;
4792	 it depends on implicitly extending the value.
4793	 Since we don't know the right way to extend it,
4794	 we can't tell whether the implicit way is right.
4795
4796	 Even for a mode that is no wider than a const_int,
4797	 we can't win, because we need to sign extend one of its bits through
4798	 the rest of it, and we don't know which bit.  */
4799      if (GET_CODE (XEXP (x, 0)) == CONST_INT)
4800	return x;
4801
4802      /* Return if (subreg:MODE FROM 0) is not a safe replacement for
4803	 (zero_extend:MODE FROM) or (sign_extend:MODE FROM).  It is for any MEM
4804	 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
4805	 reloaded. If not for that, MEM's would very rarely be safe.
4806
4807	 Reject MODEs bigger than a word, because we might not be able
4808	 to reference a two-register group starting with an arbitrary register
4809	 (and currently gen_lowpart might crash for a SUBREG).  */
4810
4811      if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
4812	return x;
4813
4814      len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4815      /* If the inner object has VOIDmode (the only way this can happen
4816	 is if it is a ASM_OPERANDS), we can't do anything since we don't
4817	 know how much masking to do.  */
4818      if (len == 0)
4819	return x;
4820
4821      break;
4822
4823    case ZERO_EXTRACT:
4824      unsignedp = 1;
4825    case SIGN_EXTRACT:
4826      /* If the operand is a CLOBBER, just return it.  */
4827      if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4828	return XEXP (x, 0);
4829
4830      if (GET_CODE (XEXP (x, 1)) != CONST_INT
4831	  || GET_CODE (XEXP (x, 2)) != CONST_INT
4832	  || GET_MODE (XEXP (x, 0)) == VOIDmode)
4833	return x;
4834
4835      len = INTVAL (XEXP (x, 1));
4836      pos = INTVAL (XEXP (x, 2));
4837
4838      /* If this goes outside the object being extracted, replace the object
4839	 with a (use (mem ...)) construct that only combine understands
4840	 and is used only for this purpose.  */
4841      if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4842	SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4843
4844      if (BITS_BIG_ENDIAN)
4845	pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4846
4847      break;
4848
4849    default:
4850      return x;
4851    }
4852
4853  /* If we reach here, we want to return a pair of shifts.  The inner
4854     shift is a left shift of BITSIZE - POS - LEN bits.  The outer
4855     shift is a right shift of BITSIZE - LEN bits.  It is arithmetic or
4856     logical depending on the value of UNSIGNEDP.
4857
4858     If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4859     converted into an AND of a shift.
4860
4861     We must check for the case where the left shift would have a negative
4862     count.  This can happen in a case like (x >> 31) & 255 on machines
4863     that can't shift by a constant.  On those machines, we would first
4864     combine the shift with the AND to produce a variable-position
4865     extraction.  Then the constant of 31 would be substituted in to produce
4866     a such a position.  */
4867
4868  modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4869  if (modewidth >= pos - len)
4870    tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
4871				GET_MODE (x),
4872				simplify_shift_const (NULL_RTX, ASHIFT,
4873						      GET_MODE (x),
4874						      XEXP (x, 0),
4875						      modewidth - pos - len),
4876				modewidth - len);
4877
4878  else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4879    tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4880				  simplify_shift_const (NULL_RTX, LSHIFTRT,
4881							GET_MODE (x),
4882							XEXP (x, 0), pos),
4883				  ((HOST_WIDE_INT) 1 << len) - 1);
4884  else
4885    /* Any other cases we can't handle.  */
4886    return x;
4887
4888
4889  /* If we couldn't do this for some reason, return the original
4890     expression.  */
4891  if (GET_CODE (tem) == CLOBBER)
4892    return x;
4893
4894  return tem;
4895}
4896
4897/* X is a SET which contains an assignment of one object into
4898   a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4899   or certain SUBREGS). If possible, convert it into a series of
4900   logical operations.
4901
4902   We half-heartedly support variable positions, but do not at all
4903   support variable lengths.  */
4904
4905static rtx
4906expand_field_assignment (x)
4907     rtx x;
4908{
4909  rtx inner;
4910  rtx pos;			/* Always counts from low bit. */
4911  int len;
4912  rtx mask;
4913  enum machine_mode compute_mode;
4914
4915  /* Loop until we find something we can't simplify.  */
4916  while (1)
4917    {
4918      if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4919	  && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4920	{
4921	  inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4922	  len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4923	  pos = const0_rtx;
4924	}
4925      else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4926	       && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4927	{
4928	  inner = XEXP (SET_DEST (x), 0);
4929	  len = INTVAL (XEXP (SET_DEST (x), 1));
4930	  pos = XEXP (SET_DEST (x), 2);
4931
4932	  /* If the position is constant and spans the width of INNER,
4933	     surround INNER  with a USE to indicate this.  */
4934	  if (GET_CODE (pos) == CONST_INT
4935	      && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4936	    inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4937
4938	  if (BITS_BIG_ENDIAN)
4939	    {
4940	      if (GET_CODE (pos) == CONST_INT)
4941		pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4942			       - INTVAL (pos));
4943	      else if (GET_CODE (pos) == MINUS
4944		       && GET_CODE (XEXP (pos, 1)) == CONST_INT
4945		       && (INTVAL (XEXP (pos, 1))
4946			   == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4947		/* If position is ADJUST - X, new position is X.  */
4948		pos = XEXP (pos, 0);
4949	      else
4950		pos = gen_binary (MINUS, GET_MODE (pos),
4951				  GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4952					   - len),
4953				  pos);
4954	    }
4955	}
4956
4957      /* A SUBREG between two modes that occupy the same numbers of words
4958	 can be done by moving the SUBREG to the source.  */
4959      else if (GET_CODE (SET_DEST (x)) == SUBREG
4960	       && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4961		     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4962		   == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4963			+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4964	{
4965	  x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4966		       gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4967						SET_SRC (x)));
4968	  continue;
4969	}
4970      else
4971	break;
4972
4973      while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4974	inner = SUBREG_REG (inner);
4975
4976      compute_mode = GET_MODE (inner);
4977
4978      /* Compute a mask of LEN bits, if we can do this on the host machine.  */
4979      if (len < HOST_BITS_PER_WIDE_INT)
4980	mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
4981      else
4982	break;
4983
4984      /* Now compute the equivalent expression.  Make a copy of INNER
4985	 for the SET_DEST in case it is a MEM into which we will substitute;
4986	 we don't want shared RTL in that case.  */
4987      x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4988		   gen_binary (IOR, compute_mode,
4989			       gen_binary (AND, compute_mode,
4990					   gen_unary (NOT, compute_mode,
4991						      compute_mode,
4992						      gen_binary (ASHIFT,
4993								  compute_mode,
4994								  mask, pos)),
4995					   inner),
4996			       gen_binary (ASHIFT, compute_mode,
4997					   gen_binary (AND, compute_mode,
4998						       gen_lowpart_for_combine
4999						       (compute_mode,
5000							SET_SRC (x)),
5001						       mask),
5002					   pos)));
5003    }
5004
5005  return x;
5006}
5007
5008/* Return an RTX for a reference to LEN bits of INNER.  If POS_RTX is nonzero,
5009   it is an RTX that represents a variable starting position; otherwise,
5010   POS is the (constant) starting bit position (counted from the LSB).
5011
5012   INNER may be a USE.  This will occur when we started with a bitfield
5013   that went outside the boundary of the object in memory, which is
5014   allowed on most machines.  To isolate this case, we produce a USE
5015   whose mode is wide enough and surround the MEM with it.  The only
5016   code that understands the USE is this routine.  If it is not removed,
5017   it will cause the resulting insn not to match.
5018
5019   UNSIGNEDP is non-zero for an unsigned reference and zero for a
5020   signed reference.
5021
5022   IN_DEST is non-zero if this is a reference in the destination of a
5023   SET.  This is used when a ZERO_ or SIGN_EXTRACT isn't needed.  If non-zero,
5024   a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5025   be used.
5026
5027   IN_COMPARE is non-zero if we are in a COMPARE.  This means that a
5028   ZERO_EXTRACT should be built even for bits starting at bit 0.
5029
5030   MODE is the desired mode of the result (if IN_DEST == 0).  */
5031
5032static rtx
5033make_extraction (mode, inner, pos, pos_rtx, len,
5034		 unsignedp, in_dest, in_compare)
5035     enum machine_mode mode;
5036     rtx inner;
5037     int pos;
5038     rtx pos_rtx;
5039     int len;
5040     int unsignedp;
5041     int in_dest, in_compare;
5042{
5043  /* This mode describes the size of the storage area
5044     to fetch the overall value from.  Within that, we
5045     ignore the POS lowest bits, etc.  */
5046  enum machine_mode is_mode = GET_MODE (inner);
5047  enum machine_mode inner_mode;
5048  enum machine_mode wanted_mem_mode = byte_mode;
5049  enum machine_mode pos_mode = word_mode;
5050  enum machine_mode extraction_mode = word_mode;
5051  enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5052  int spans_byte = 0;
5053  rtx new = 0;
5054  rtx orig_pos_rtx = pos_rtx;
5055  int orig_pos;
5056
5057  /* Get some information about INNER and get the innermost object.  */
5058  if (GET_CODE (inner) == USE)
5059    /* (use:SI (mem:QI foo)) stands for (mem:SI foo).  */
5060    /* We don't need to adjust the position because we set up the USE
5061       to pretend that it was a full-word object.  */
5062    spans_byte = 1, inner = XEXP (inner, 0);
5063  else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5064    {
5065      /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5066	 consider just the QI as the memory to extract from.
5067	 The subreg adds or removes high bits; its mode is
5068	 irrelevant to the meaning of this extraction,
5069	 since POS and LEN count from the lsb.  */
5070      if (GET_CODE (SUBREG_REG (inner)) == MEM)
5071	is_mode = GET_MODE (SUBREG_REG (inner));
5072      inner = SUBREG_REG (inner);
5073    }
5074
5075  inner_mode = GET_MODE (inner);
5076
5077  if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
5078    pos = INTVAL (pos_rtx), pos_rtx = 0;
5079
5080  /* See if this can be done without an extraction.  We never can if the
5081     width of the field is not the same as that of some integer mode. For
5082     registers, we can only avoid the extraction if the position is at the
5083     low-order bit and this is either not in the destination or we have the
5084     appropriate STRICT_LOW_PART operation available.
5085
5086     For MEM, we can avoid an extract if the field starts on an appropriate
5087     boundary and we can change the mode of the memory reference.  However,
5088     we cannot directly access the MEM if we have a USE and the underlying
5089     MEM is not TMODE.  This combination means that MEM was being used in a
5090     context where bits outside its mode were being referenced; that is only
5091     valid in bit-field insns.  */
5092
5093  if (tmode != BLKmode
5094      && ! (spans_byte && inner_mode != tmode)
5095      && ((pos_rtx == 0 && pos == 0 && GET_CODE (inner) != MEM
5096	   && (! in_dest
5097	       || (GET_CODE (inner) == REG
5098		   && (movstrict_optab->handlers[(int) tmode].insn_code
5099		       != CODE_FOR_nothing))))
5100	  || (GET_CODE (inner) == MEM && pos_rtx == 0
5101	      && (pos
5102		  % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5103		     : BITS_PER_UNIT)) == 0
5104	      /* We can't do this if we are widening INNER_MODE (it
5105		 may not be aligned, for one thing).  */
5106	      && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5107	      && (inner_mode == tmode
5108		  || (! mode_dependent_address_p (XEXP (inner, 0))
5109		      && ! MEM_VOLATILE_P (inner))))))
5110    {
5111      /* If INNER is a MEM, make a new MEM that encompasses just the desired
5112	 field.  If the original and current mode are the same, we need not
5113	 adjust the offset.  Otherwise, we do if bytes big endian.
5114
5115	 If INNER is not a MEM, get a piece consisting of the just the field
5116	 of interest (in this case POS must be 0).  */
5117
5118      if (GET_CODE (inner) == MEM)
5119	{
5120	  int offset;
5121	  /* POS counts from lsb, but make OFFSET count in memory order.  */
5122	  if (BYTES_BIG_ENDIAN)
5123	    offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5124	  else
5125	    offset = pos / BITS_PER_UNIT;
5126
5127	  new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
5128	  RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5129	  MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5130	  MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5131	}
5132      else if (GET_CODE (inner) == REG)
5133	{
5134	  /* We can't call gen_lowpart_for_combine here since we always want
5135	     a SUBREG and it would sometimes return a new hard register.  */
5136	  if (tmode != inner_mode)
5137	    new = gen_rtx (SUBREG, tmode, inner,
5138			   (WORDS_BIG_ENDIAN
5139			    && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5140			    ? ((GET_MODE_SIZE (inner_mode)
5141				- GET_MODE_SIZE (tmode))
5142			       / UNITS_PER_WORD)
5143			    : 0));
5144	  else
5145	    new = inner;
5146	}
5147      else
5148	new = force_to_mode (inner, tmode,
5149			     len >= HOST_BITS_PER_WIDE_INT
5150			     ? GET_MODE_MASK (tmode)
5151			     : ((HOST_WIDE_INT) 1 << len) - 1,
5152			     NULL_RTX, 0);
5153
5154      /* If this extraction is going into the destination of a SET,
5155	 make a STRICT_LOW_PART unless we made a MEM.  */
5156
5157      if (in_dest)
5158	return (GET_CODE (new) == MEM ? new
5159		: (GET_CODE (new) != SUBREG
5160		   ? gen_rtx (CLOBBER, tmode, const0_rtx)
5161		   : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
5162
5163      /* Otherwise, sign- or zero-extend unless we already are in the
5164	 proper mode.  */
5165
5166      return (mode == tmode ? new
5167	      : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5168				 mode, new));
5169    }
5170
5171  /* Unless this is a COMPARE or we have a funny memory reference,
5172     don't do anything with zero-extending field extracts starting at
5173     the low-order bit since they are simple AND operations.  */
5174  if (pos_rtx == 0 && pos == 0 && ! in_dest
5175      && ! in_compare && ! spans_byte && unsignedp)
5176    return 0;
5177
5178  /* Unless we are allowed to span bytes, reject this if we would be
5179     spanning bytes or if the position is not a constant and the length
5180     is not 1.  In all other cases, we would only be going outside
5181     out object in cases when an original shift would have been
5182     undefined.  */
5183  if (! spans_byte
5184      && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5185	  || (pos_rtx != 0 && len != 1)))
5186    return 0;
5187
5188  /* Get the mode to use should INNER be a MEM, the mode for the position,
5189     and the mode for the result.  */
5190#ifdef HAVE_insv
5191  if (in_dest)
5192    {
5193      wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
5194      pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5195      extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5196    }
5197#endif
5198
5199#ifdef HAVE_extzv
5200  if (! in_dest && unsignedp)
5201    {
5202      wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
5203      pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5204      extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5205    }
5206#endif
5207
5208#ifdef HAVE_extv
5209  if (! in_dest && ! unsignedp)
5210    {
5211      wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
5212      pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5213      extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5214    }
5215#endif
5216
5217  /* Never narrow an object, since that might not be safe.  */
5218
5219  if (mode != VOIDmode
5220      && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5221    extraction_mode = mode;
5222
5223  if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5224      && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5225    pos_mode = GET_MODE (pos_rtx);
5226
5227  /* If this is not from memory or we have to change the mode of memory and
5228     cannot, the desired mode is EXTRACTION_MODE.  */
5229  if (GET_CODE (inner) != MEM
5230      || (inner_mode != wanted_mem_mode
5231	  && (mode_dependent_address_p (XEXP (inner, 0))
5232	      || MEM_VOLATILE_P (inner))))
5233    wanted_mem_mode = extraction_mode;
5234
5235  orig_pos = pos;
5236
5237  if (BITS_BIG_ENDIAN)
5238    {
5239      /* If position is constant, compute new position.  Otherwise,
5240	 build subtraction.  */
5241      if (pos_rtx == 0)
5242	pos = (MAX (GET_MODE_BITSIZE (is_mode),
5243		    GET_MODE_BITSIZE (wanted_mem_mode))
5244	       - len - pos);
5245      else
5246	pos_rtx
5247	  = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5248			     GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
5249					   GET_MODE_BITSIZE (wanted_mem_mode))
5250				      - len),
5251			     pos_rtx);
5252    }
5253
5254  /* If INNER has a wider mode, make it smaller.  If this is a constant
5255     extract, try to adjust the byte to point to the byte containing
5256     the value.  */
5257  if (wanted_mem_mode != VOIDmode
5258      && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
5259      && ((GET_CODE (inner) == MEM
5260	   && (inner_mode == wanted_mem_mode
5261	       || (! mode_dependent_address_p (XEXP (inner, 0))
5262		   && ! MEM_VOLATILE_P (inner))))))
5263    {
5264      int offset = 0;
5265
5266      /* The computations below will be correct if the machine is big
5267	 endian in both bits and bytes or little endian in bits and bytes.
5268	 If it is mixed, we must adjust.  */
5269
5270      /* If bytes are big endian and we had a paradoxical SUBREG, we must
5271	 adjust OFFSET to compensate. */
5272      if (BYTES_BIG_ENDIAN
5273	  && ! spans_byte
5274	  && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5275	offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
5276
5277      /* If this is a constant position, we can move to the desired byte.  */
5278      if (pos_rtx == 0)
5279	{
5280	  offset += pos / BITS_PER_UNIT;
5281	  pos %= GET_MODE_BITSIZE (wanted_mem_mode);
5282	}
5283
5284      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5285	  && ! spans_byte
5286	  && is_mode != wanted_mem_mode)
5287	offset = (GET_MODE_SIZE (is_mode)
5288		  - GET_MODE_SIZE (wanted_mem_mode) - offset);
5289
5290      if (offset != 0 || inner_mode != wanted_mem_mode)
5291	{
5292	  rtx newmem = gen_rtx (MEM, wanted_mem_mode,
5293				plus_constant (XEXP (inner, 0), offset));
5294	  RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5295	  MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5296	  MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5297	  inner = newmem;
5298	}
5299    }
5300
5301  /* If INNER is not memory, we can always get it into the proper mode. */
5302  else if (GET_CODE (inner) != MEM)
5303    inner = force_to_mode (inner, extraction_mode,
5304			   pos_rtx || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5305			   ? GET_MODE_MASK (extraction_mode)
5306			   : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5307			   NULL_RTX, 0);
5308
5309  /* Adjust mode of POS_RTX, if needed.  If we want a wider mode, we
5310     have to zero extend.  Otherwise, we can just use a SUBREG.  */
5311  if (pos_rtx != 0
5312      && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5313    pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
5314  else if (pos_rtx != 0
5315	   && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5316    pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5317
5318  /* Make POS_RTX unless we already have it and it is correct.  If we don't
5319     have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
5320     be a CONST_INT. */
5321  if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5322    pos_rtx = orig_pos_rtx;
5323
5324  else if (pos_rtx == 0)
5325    pos_rtx = GEN_INT (pos);
5326
5327  /* Make the required operation.  See if we can use existing rtx.  */
5328  new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5329			 extraction_mode, inner, GEN_INT (len), pos_rtx);
5330  if (! in_dest)
5331    new = gen_lowpart_for_combine (mode, new);
5332
5333  return new;
5334}
5335
5336/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5337   with any other operations in X.  Return X without that shift if so.  */
5338
5339static rtx
5340extract_left_shift (x, count)
5341     rtx x;
5342     int count;
5343{
5344  enum rtx_code code = GET_CODE (x);
5345  enum machine_mode mode = GET_MODE (x);
5346  rtx tem;
5347
5348  switch (code)
5349    {
5350    case ASHIFT:
5351      /* This is the shift itself.  If it is wide enough, we will return
5352	 either the value being shifted if the shift count is equal to
5353	 COUNT or a shift for the difference.  */
5354      if (GET_CODE (XEXP (x, 1)) == CONST_INT
5355	  && INTVAL (XEXP (x, 1)) >= count)
5356	return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5357				     INTVAL (XEXP (x, 1)) - count);
5358      break;
5359
5360    case NEG:  case NOT:
5361      if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5362	return gen_unary (code, mode, mode, tem);
5363
5364      break;
5365
5366    case PLUS:  case IOR:  case XOR:  case AND:
5367      /* If we can safely shift this constant and we find the inner shift,
5368	 make a new operation.  */
5369      if (GET_CODE (XEXP (x,1)) == CONST_INT
5370	  && (INTVAL (XEXP (x, 1)) & (((HOST_WIDE_INT) 1 << count)) - 1) == 0
5371	  && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5372	return gen_binary (code, mode, tem,
5373			   GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5374
5375      break;
5376    }
5377
5378  return 0;
5379}
5380
5381/* Look at the expression rooted at X.  Look for expressions
5382   equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5383   Form these expressions.
5384
5385   Return the new rtx, usually just X.
5386
5387   Also, for machines like the Vax that don't have logical shift insns,
5388   try to convert logical to arithmetic shift operations in cases where
5389   they are equivalent.  This undoes the canonicalizations to logical
5390   shifts done elsewhere.
5391
5392   We try, as much as possible, to re-use rtl expressions to save memory.
5393
5394   IN_CODE says what kind of expression we are processing.  Normally, it is
5395   SET.  In a memory address (inside a MEM, PLUS or minus, the latter two
5396   being kludges), it is MEM.  When processing the arguments of a comparison
5397   or a COMPARE against zero, it is COMPARE.  */
5398
5399static rtx
5400make_compound_operation (x, in_code)
5401     rtx x;
5402     enum rtx_code in_code;
5403{
5404  enum rtx_code code = GET_CODE (x);
5405  enum machine_mode mode = GET_MODE (x);
5406  int mode_width = GET_MODE_BITSIZE (mode);
5407  rtx rhs, lhs;
5408  enum rtx_code next_code;
5409  int i;
5410  rtx new = 0;
5411  rtx tem;
5412  char *fmt;
5413
5414  /* Select the code to be used in recursive calls.  Once we are inside an
5415     address, we stay there.  If we have a comparison, set to COMPARE,
5416     but once inside, go back to our default of SET.  */
5417
5418  next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
5419	       : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5420		  && XEXP (x, 1) == const0_rtx) ? COMPARE
5421	       : in_code == COMPARE ? SET : in_code);
5422
5423  /* Process depending on the code of this operation.  If NEW is set
5424     non-zero, it will be returned.  */
5425
5426  switch (code)
5427    {
5428    case ASHIFT:
5429      /* Convert shifts by constants into multiplications if inside
5430	 an address.  */
5431      if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5432	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5433	  && INTVAL (XEXP (x, 1)) >= 0)
5434	{
5435	  new = make_compound_operation (XEXP (x, 0), next_code);
5436	  new = gen_rtx_combine (MULT, mode, new,
5437				 GEN_INT ((HOST_WIDE_INT) 1
5438					  << INTVAL (XEXP (x, 1))));
5439	}
5440      break;
5441
5442    case AND:
5443      /* If the second operand is not a constant, we can't do anything
5444	 with it.  */
5445      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5446	break;
5447
5448      /* If the constant is a power of two minus one and the first operand
5449	 is a logical right shift, make an extraction.  */
5450      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5451	  && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5452	{
5453	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5454	  new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5455				 0, in_code == COMPARE);
5456	}
5457
5458      /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
5459      else if (GET_CODE (XEXP (x, 0)) == SUBREG
5460	       && subreg_lowpart_p (XEXP (x, 0))
5461	       && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5462	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5463	{
5464	  new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5465					 next_code);
5466	  new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
5467				 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5468				 0, in_code == COMPARE);
5469	}
5470      /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)).  */
5471      else if ((GET_CODE (XEXP (x, 0)) == XOR
5472		|| GET_CODE (XEXP (x, 0)) == IOR)
5473	       && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5474	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5475	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5476	{
5477	  /* Apply the distributive law, and then try to make extractions.  */
5478	  new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
5479				 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 0),
5480					  XEXP (x, 1)),
5481				 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 1),
5482					  XEXP (x, 1)));
5483	  new = make_compound_operation (new, in_code);
5484	}
5485
5486      /* If we are have (and (rotate X C) M) and C is larger than the number
5487	 of bits in M, this is an extraction.  */
5488
5489      else if (GET_CODE (XEXP (x, 0)) == ROTATE
5490	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5491	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5492	       && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
5493	{
5494	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5495	  new = make_extraction (mode, new,
5496				 (GET_MODE_BITSIZE (mode)
5497				  - INTVAL (XEXP (XEXP (x, 0), 1))),
5498				 NULL_RTX, i, 1, 0, in_code == COMPARE);
5499	}
5500
5501      /* On machines without logical shifts, if the operand of the AND is
5502	 a logical shift and our mask turns off all the propagated sign
5503	 bits, we can replace the logical shift with an arithmetic shift.  */
5504      else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5505	       && (lshr_optab->handlers[(int) mode].insn_code
5506		   == CODE_FOR_nothing)
5507	       && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5508	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5509	       && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5510	       && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5511	       && mode_width <= HOST_BITS_PER_WIDE_INT)
5512	{
5513	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
5514
5515	  mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5516	  if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5517	    SUBST (XEXP (x, 0),
5518		   gen_rtx_combine (ASHIFTRT, mode,
5519				    make_compound_operation (XEXP (XEXP (x, 0), 0),
5520							     next_code),
5521				    XEXP (XEXP (x, 0), 1)));
5522	}
5523
5524      /* If the constant is one less than a power of two, this might be
5525	 representable by an extraction even if no shift is present.
5526	 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5527	 we are in a COMPARE.  */
5528      else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5529	new = make_extraction (mode,
5530			       make_compound_operation (XEXP (x, 0),
5531							next_code),
5532			       0, NULL_RTX, i, 1, 0, in_code == COMPARE);
5533
5534      /* If we are in a comparison and this is an AND with a power of two,
5535	 convert this into the appropriate bit extract.  */
5536      else if (in_code == COMPARE
5537	       && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5538	new = make_extraction (mode,
5539			       make_compound_operation (XEXP (x, 0),
5540							next_code),
5541			       i, NULL_RTX, 1, 1, 0, 1);
5542
5543      break;
5544
5545    case LSHIFTRT:
5546      /* If the sign bit is known to be zero, replace this with an
5547	 arithmetic shift.  */
5548      if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5549	  && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5550	  && mode_width <= HOST_BITS_PER_WIDE_INT
5551	  && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
5552	{
5553	  new = gen_rtx_combine (ASHIFTRT, mode,
5554				 make_compound_operation (XEXP (x, 0),
5555							  next_code),
5556				 XEXP (x, 1));
5557	  break;
5558	}
5559
5560      /* ... fall through ... */
5561
5562    case ASHIFTRT:
5563      lhs = XEXP (x, 0);
5564      rhs = XEXP (x, 1);
5565
5566      /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5567	 this is a SIGN_EXTRACT.  */
5568      if (GET_CODE (rhs) == CONST_INT
5569	  && GET_CODE (lhs) == ASHIFT
5570	  && GET_CODE (XEXP (lhs, 1)) == CONST_INT
5571	  && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
5572	{
5573	  new = make_compound_operation (XEXP (lhs, 0), next_code);
5574	  new = make_extraction (mode, new,
5575				 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
5576				 NULL_RTX, mode_width - INTVAL (rhs),
5577				 code == LSHIFTRT, 0, in_code == COMPARE);
5578	}
5579
5580      /* See if we have operations between an ASHIFTRT and an ASHIFT.
5581	 If so, try to merge the shifts into a SIGN_EXTEND.  We could
5582	 also do this for some cases of SIGN_EXTRACT, but it doesn't
5583	 seem worth the effort; the case checked for occurs on Alpha.  */
5584
5585      if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
5586	  && ! (GET_CODE (lhs) == SUBREG
5587		&& (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
5588	  && GET_CODE (rhs) == CONST_INT
5589	  && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
5590	  && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
5591	new = make_extraction (mode, make_compound_operation (new, next_code),
5592			       0, NULL_RTX, mode_width - INTVAL (rhs),
5593			       code == LSHIFTRT, 0, in_code == COMPARE);
5594
5595      break;
5596
5597    case SUBREG:
5598      /* Call ourselves recursively on the inner expression.  If we are
5599	 narrowing the object and it has a different RTL code from
5600	 what it originally did, do this SUBREG as a force_to_mode.  */
5601
5602      tem = make_compound_operation (SUBREG_REG (x), in_code);
5603      if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
5604	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
5605	  && subreg_lowpart_p (x))
5606	{
5607	  rtx newer = force_to_mode (tem, mode,
5608				     GET_MODE_MASK (mode), NULL_RTX, 0);
5609
5610	  /* If we have something other than a SUBREG, we might have
5611	     done an expansion, so rerun outselves.  */
5612	  if (GET_CODE (newer) != SUBREG)
5613	    newer = make_compound_operation (newer, in_code);
5614
5615	  return newer;
5616	}
5617    }
5618
5619  if (new)
5620    {
5621      x = gen_lowpart_for_combine (mode, new);
5622      code = GET_CODE (x);
5623    }
5624
5625  /* Now recursively process each operand of this operation.  */
5626  fmt = GET_RTX_FORMAT (code);
5627  for (i = 0; i < GET_RTX_LENGTH (code); i++)
5628    if (fmt[i] == 'e')
5629      {
5630	new = make_compound_operation (XEXP (x, i), next_code);
5631	SUBST (XEXP (x, i), new);
5632      }
5633
5634  return x;
5635}
5636
5637/* Given M see if it is a value that would select a field of bits
5638    within an item, but not the entire word.  Return -1 if not.
5639    Otherwise, return the starting position of the field, where 0 is the
5640    low-order bit.
5641
5642   *PLEN is set to the length of the field.  */
5643
5644static int
5645get_pos_from_mask (m, plen)
5646     unsigned HOST_WIDE_INT m;
5647     int *plen;
5648{
5649  /* Get the bit number of the first 1 bit from the right, -1 if none.  */
5650  int pos = exact_log2 (m & - m);
5651
5652  if (pos < 0)
5653    return -1;
5654
5655  /* Now shift off the low-order zero bits and see if we have a power of
5656     two minus 1.  */
5657  *plen = exact_log2 ((m >> pos) + 1);
5658
5659  if (*plen <= 0)
5660    return -1;
5661
5662  return pos;
5663}
5664
5665/* See if X can be simplified knowing that we will only refer to it in
5666   MODE and will only refer to those bits that are nonzero in MASK.
5667   If other bits are being computed or if masking operations are done
5668   that select a superset of the bits in MASK, they can sometimes be
5669   ignored.
5670
5671   Return a possibly simplified expression, but always convert X to
5672   MODE.  If X is a CONST_INT, AND the CONST_INT with MASK.
5673
5674   Also, if REG is non-zero and X is a register equal in value to REG,
5675   replace X with REG.
5676
5677   If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
5678   are all off in X.  This is used when X will be complemented, by either
5679   NOT, NEG, or XOR.  */
5680
5681static rtx
5682force_to_mode (x, mode, mask, reg, just_select)
5683     rtx x;
5684     enum machine_mode mode;
5685     unsigned HOST_WIDE_INT mask;
5686     rtx reg;
5687     int just_select;
5688{
5689  enum rtx_code code = GET_CODE (x);
5690  int next_select = just_select || code == XOR || code == NOT || code == NEG;
5691  enum machine_mode op_mode;
5692  unsigned HOST_WIDE_INT fuller_mask, nonzero;
5693  rtx op0, op1, temp;
5694
5695  /* If this is a CALL, don't do anything.  Some of the code below
5696     will do the wrong thing since the mode of a CALL is VOIDmode.  */
5697  if (code == CALL)
5698    return x;
5699
5700  /* We want to perform the operation is its present mode unless we know
5701     that the operation is valid in MODE, in which case we do the operation
5702     in MODE.  */
5703  op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
5704	      && code_to_optab[(int) code] != 0
5705	      && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
5706		  != CODE_FOR_nothing))
5707	     ? mode : GET_MODE (x));
5708
5709  /* It is not valid to do a right-shift in a narrower mode
5710     than the one it came in with.  */
5711  if ((code == LSHIFTRT || code == ASHIFTRT)
5712      && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
5713    op_mode = GET_MODE (x);
5714
5715  /* Truncate MASK to fit OP_MODE.  */
5716  if (op_mode)
5717    mask &= GET_MODE_MASK (op_mode);
5718
5719  /* When we have an arithmetic operation, or a shift whose count we
5720     do not know, we need to assume that all bit the up to the highest-order
5721     bit in MASK will be needed.  This is how we form such a mask.  */
5722  if (op_mode)
5723    fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
5724		   ? GET_MODE_MASK (op_mode)
5725		   : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
5726  else
5727    fuller_mask = ~ (HOST_WIDE_INT) 0;
5728
5729  /* Determine what bits of X are guaranteed to be (non)zero.  */
5730  nonzero = nonzero_bits (x, mode);
5731
5732  /* If none of the bits in X are needed, return a zero.  */
5733  if (! just_select && (nonzero & mask) == 0)
5734    return const0_rtx;
5735
5736  /* If X is a CONST_INT, return a new one.  Do this here since the
5737     test below will fail.  */
5738  if (GET_CODE (x) == CONST_INT)
5739    {
5740      HOST_WIDE_INT cval = INTVAL (x) & mask;
5741      int width = GET_MODE_BITSIZE (mode);
5742
5743      /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
5744	 number, sign extend it.  */
5745      if (width > 0 && width < HOST_BITS_PER_WIDE_INT
5746	  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5747	cval |= (HOST_WIDE_INT) -1 << width;
5748
5749      return GEN_INT (cval);
5750    }
5751
5752  /* If X is narrower than MODE and we want all the bits in X's mode, just
5753     get X in the proper mode.  */
5754  if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
5755      && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
5756    return gen_lowpart_for_combine (mode, x);
5757
5758  /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
5759     MASK are already known to be zero in X, we need not do anything.  */
5760  if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
5761    return x;
5762
5763  switch (code)
5764    {
5765    case CLOBBER:
5766      /* If X is a (clobber (const_int)), return it since we know we are
5767	 generating something that won't match. */
5768      return x;
5769
5770    case USE:
5771      /* X is a (use (mem ..)) that was made from a bit-field extraction that
5772	 spanned the boundary of the MEM.  If we are now masking so it is
5773	 within that boundary, we don't need the USE any more.  */
5774      if (! BITS_BIG_ENDIAN
5775	  && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5776	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
5777      break;
5778
5779    case SIGN_EXTEND:
5780    case ZERO_EXTEND:
5781    case ZERO_EXTRACT:
5782    case SIGN_EXTRACT:
5783      x = expand_compound_operation (x);
5784      if (GET_CODE (x) != code)
5785	return force_to_mode (x, mode, mask, reg, next_select);
5786      break;
5787
5788    case REG:
5789      if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5790		       || rtx_equal_p (reg, get_last_value (x))))
5791	x = reg;
5792      break;
5793
5794    case SUBREG:
5795      if (subreg_lowpart_p (x)
5796	  /* We can ignore the effect of this SUBREG if it narrows the mode or
5797	     if the constant masks to zero all the bits the mode doesn't
5798	     have.  */
5799	  && ((GET_MODE_SIZE (GET_MODE (x))
5800	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
5801	      || (0 == (mask
5802			& GET_MODE_MASK (GET_MODE (x))
5803			& ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
5804	return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
5805      break;
5806
5807    case AND:
5808      /* If this is an AND with a constant, convert it into an AND
5809	 whose constant is the AND of that constant with MASK.  If it
5810	 remains an AND of MASK, delete it since it is redundant.  */
5811
5812      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
5813	{
5814	  x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
5815				      mask & INTVAL (XEXP (x, 1)));
5816
5817	  /* If X is still an AND, see if it is an AND with a mask that
5818	     is just some low-order bits.  If so, and it is MASK, we don't
5819	     need it.  */
5820
5821	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5822	      && INTVAL (XEXP (x, 1)) == mask)
5823	    x = XEXP (x, 0);
5824
5825	  /* If it remains an AND, try making another AND with the bits
5826	     in the mode mask that aren't in MASK turned on.  If the
5827	     constant in the AND is wide enough, this might make a
5828	     cheaper constant.  */
5829
5830	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5831	      && GET_MODE_MASK (GET_MODE (x)) != mask
5832	      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
5833	    {
5834	      HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
5835				    | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
5836	      int width = GET_MODE_BITSIZE (GET_MODE (x));
5837	      rtx y;
5838
5839	      /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
5840		 number, sign extend it.  */
5841	      if (width > 0 && width < HOST_BITS_PER_WIDE_INT
5842		  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5843		cval |= (HOST_WIDE_INT) -1 << width;
5844
5845	      y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
5846	      if (rtx_cost (y, SET) < rtx_cost (x, SET))
5847		x = y;
5848	    }
5849
5850	  break;
5851	}
5852
5853      goto binop;
5854
5855    case PLUS:
5856      /* In (and (plus FOO C1) M), if M is a mask that just turns off
5857	 low-order bits (as in an alignment operation) and FOO is already
5858	 aligned to that boundary, mask C1 to that boundary as well.
5859	 This may eliminate that PLUS and, later, the AND.  */
5860
5861      {
5862	int width = GET_MODE_BITSIZE (mode);
5863	unsigned HOST_WIDE_INT smask = mask;
5864
5865	/* If MODE is narrower than HOST_WIDE_INT and mask is a negative
5866	   number, sign extend it.  */
5867
5868	if (width < HOST_BITS_PER_WIDE_INT
5869	    && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5870	  smask |= (HOST_WIDE_INT) -1 << width;
5871
5872	if (GET_CODE (XEXP (x, 1)) == CONST_INT
5873	    && exact_log2 (- smask) >= 0
5874	    && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
5875	    && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
5876	  return force_to_mode (plus_constant (XEXP (x, 0),
5877					       INTVAL (XEXP (x, 1)) & mask),
5878				mode, mask, reg, next_select);
5879      }
5880
5881      /* ... fall through ... */
5882
5883    case MINUS:
5884    case MULT:
5885      /* For PLUS, MINUS and MULT, we need any bits less significant than the
5886	 most significant bit in MASK since carries from those bits will
5887	 affect the bits we are interested in.  */
5888      mask = fuller_mask;
5889      goto binop;
5890
5891    case IOR:
5892    case XOR:
5893      /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5894	 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5895	 operation which may be a bitfield extraction.  Ensure that the
5896	 constant we form is not wider than the mode of X.  */
5897
5898      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5899	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5900	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5901	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5902	  && GET_CODE (XEXP (x, 1)) == CONST_INT
5903	  && ((INTVAL (XEXP (XEXP (x, 0), 1))
5904	       + floor_log2 (INTVAL (XEXP (x, 1))))
5905	      < GET_MODE_BITSIZE (GET_MODE (x)))
5906	  && (INTVAL (XEXP (x, 1))
5907	      & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x)) == 0))
5908	{
5909	  temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
5910			      << INTVAL (XEXP (XEXP (x, 0), 1)));
5911	  temp = gen_binary (GET_CODE (x), GET_MODE (x),
5912			     XEXP (XEXP (x, 0), 0), temp);
5913	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (x, 1));
5914	  return force_to_mode (x, mode, mask, reg, next_select);
5915	}
5916
5917    binop:
5918      /* For most binary operations, just propagate into the operation and
5919	 change the mode if we have an operation of that mode.   */
5920
5921      op0 = gen_lowpart_for_combine (op_mode,
5922				     force_to_mode (XEXP (x, 0), mode, mask,
5923						    reg, next_select));
5924      op1 = gen_lowpart_for_combine (op_mode,
5925				     force_to_mode (XEXP (x, 1), mode, mask,
5926						    reg, next_select));
5927
5928      /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
5929	 MASK since OP1 might have been sign-extended but we never want
5930	 to turn on extra bits, since combine might have previously relied
5931	 on them being off.  */
5932      if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
5933	  && (INTVAL (op1) & mask) != 0)
5934	op1 = GEN_INT (INTVAL (op1) & mask);
5935
5936      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
5937	x = gen_binary (code, op_mode, op0, op1);
5938      break;
5939
5940    case ASHIFT:
5941      /* For left shifts, do the same, but just for the first operand.
5942	 However, we cannot do anything with shifts where we cannot
5943	 guarantee that the counts are smaller than the size of the mode
5944	 because such a count will have a different meaning in a
5945	 wider mode.  */
5946
5947      if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
5948	     && INTVAL (XEXP (x, 1)) >= 0
5949	     && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
5950	  && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
5951		&& (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
5952		    < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
5953	break;
5954
5955      /* If the shift count is a constant and we can do arithmetic in
5956	 the mode of the shift, refine which bits we need.  Otherwise, use the
5957	 conservative form of the mask.  */
5958      if (GET_CODE (XEXP (x, 1)) == CONST_INT
5959	  && INTVAL (XEXP (x, 1)) >= 0
5960	  && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
5961	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
5962	mask >>= INTVAL (XEXP (x, 1));
5963      else
5964	mask = fuller_mask;
5965
5966      op0 = gen_lowpart_for_combine (op_mode,
5967				     force_to_mode (XEXP (x, 0), op_mode,
5968						    mask, reg, next_select));
5969
5970      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
5971	x =  gen_binary (code, op_mode, op0, XEXP (x, 1));
5972      break;
5973
5974    case LSHIFTRT:
5975      /* Here we can only do something if the shift count is a constant,
5976	 this shift constant is valid for the host, and we can do arithmetic
5977	 in OP_MODE.  */
5978
5979      if (GET_CODE (XEXP (x, 1)) == CONST_INT
5980	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5981	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
5982	{
5983	  rtx inner = XEXP (x, 0);
5984
5985	  /* Select the mask of the bits we need for the shift operand.  */
5986	  mask <<= INTVAL (XEXP (x, 1));
5987
5988	  /* We can only change the mode of the shift if we can do arithmetic
5989	     in the mode of the shift and MASK is no wider than the width of
5990	     OP_MODE.  */
5991	  if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
5992	      || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
5993	    op_mode = GET_MODE (x);
5994
5995	  inner = force_to_mode (inner, op_mode, mask, reg, next_select);
5996
5997	  if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
5998	    x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
5999	}
6000
6001      /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6002	 shift and AND produces only copies of the sign bit (C2 is one less
6003	 than a power of two), we can do this with just a shift.  */
6004
6005      if (GET_CODE (x) == LSHIFTRT
6006	  && GET_CODE (XEXP (x, 1)) == CONST_INT
6007	  && ((INTVAL (XEXP (x, 1))
6008	       + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6009	      >= GET_MODE_BITSIZE (GET_MODE (x)))
6010	  && exact_log2 (mask + 1) >= 0
6011	  && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6012	      >= exact_log2 (mask + 1)))
6013	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6014			GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6015				 - exact_log2 (mask + 1)));
6016      break;
6017
6018    case ASHIFTRT:
6019      /* If we are just looking for the sign bit, we don't need this shift at
6020	 all, even if it has a variable count.  */
6021      if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6022	  && (mask == ((HOST_WIDE_INT) 1
6023		       << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
6024	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6025
6026      /* If this is a shift by a constant, get a mask that contains those bits
6027	 that are not copies of the sign bit.  We then have two cases:  If
6028	 MASK only includes those bits, this can be a logical shift, which may
6029	 allow simplifications.  If MASK is a single-bit field not within
6030	 those bits, we are requesting a copy of the sign bit and hence can
6031	 shift the sign bit to the appropriate location.  */
6032
6033      if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
6034	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6035	{
6036	  int i = -1;
6037
6038	  /* If the considered data is wider then HOST_WIDE_INT, we can't
6039	     represent a mask for all its bits in a single scalar.
6040	     But we only care about the lower bits, so calculate these.  */
6041
6042	  if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
6043	    {
6044	      nonzero = ~(HOST_WIDE_INT)0;
6045
6046	      /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6047		 is the number of bits a full-width mask would have set.
6048		 We need only shift if these are fewer than nonzero can
6049		 hold.  If not, we must keep all bits set in nonzero.  */
6050
6051	      if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6052		  < HOST_BITS_PER_WIDE_INT)
6053		nonzero >>= INTVAL (XEXP (x, 1))
6054			    + HOST_BITS_PER_WIDE_INT
6055			    - GET_MODE_BITSIZE (GET_MODE (x)) ;
6056	    }
6057	  else
6058	    {
6059	      nonzero = GET_MODE_MASK (GET_MODE (x));
6060	      nonzero >>= INTVAL (XEXP (x, 1));
6061	    }
6062
6063	  if ((mask & ~ nonzero) == 0
6064	      || (i = exact_log2 (mask)) >= 0)
6065	    {
6066	      x = simplify_shift_const
6067		(x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6068		 i < 0 ? INTVAL (XEXP (x, 1))
6069		 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
6070
6071	      if (GET_CODE (x) != ASHIFTRT)
6072		return force_to_mode (x, mode, mask, reg, next_select);
6073	    }
6074	}
6075
6076      /* If MASK is 1, convert this to a LSHIFTRT.  This can be done
6077	 even if the shift count isn't a constant.  */
6078      if (mask == 1)
6079	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
6080
6081      /* If this is a sign-extension operation that just affects bits
6082	 we don't care about, remove it.  Be sure the call above returned
6083	 something that is still a shift.  */
6084
6085      if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
6086	  && GET_CODE (XEXP (x, 1)) == CONST_INT
6087	  && INTVAL (XEXP (x, 1)) >= 0
6088	  && (INTVAL (XEXP (x, 1))
6089	      <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
6090	  && GET_CODE (XEXP (x, 0)) == ASHIFT
6091	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6092	  && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
6093	return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
6094			      reg, next_select);
6095
6096      break;
6097
6098    case ROTATE:
6099    case ROTATERT:
6100      /* If the shift count is constant and we can do computations
6101	 in the mode of X, compute where the bits we care about are.
6102	 Otherwise, we can't do anything.  Don't change the mode of
6103	 the shift or propagate MODE into the shift, though.  */
6104      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6105	  && INTVAL (XEXP (x, 1)) >= 0)
6106	{
6107	  temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
6108					    GET_MODE (x), GEN_INT (mask),
6109					    XEXP (x, 1));
6110	  if (temp && GET_CODE(temp) == CONST_INT)
6111	    SUBST (XEXP (x, 0),
6112		   force_to_mode (XEXP (x, 0), GET_MODE (x),
6113				  INTVAL (temp), reg, next_select));
6114	}
6115      break;
6116
6117    case NEG:
6118      /* If we just want the low-order bit, the NEG isn't needed since it
6119	 won't change the low-order bit.    */
6120      if (mask == 1)
6121	return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6122
6123      /* We need any bits less significant than the most significant bit in
6124	 MASK since carries from those bits will affect the bits we are
6125	 interested in.  */
6126      mask = fuller_mask;
6127      goto unop;
6128
6129    case NOT:
6130      /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6131	 same as the XOR case above.  Ensure that the constant we form is not
6132	 wider than the mode of X.  */
6133
6134      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6135	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6136	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6137	  && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6138	      < GET_MODE_BITSIZE (GET_MODE (x)))
6139	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6140	{
6141	  temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6142	  temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6143	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6144
6145	  return force_to_mode (x, mode, mask, reg, next_select);
6146	}
6147
6148    unop:
6149      op0 = gen_lowpart_for_combine (op_mode,
6150				     force_to_mode (XEXP (x, 0), mode, mask,
6151						    reg, next_select));
6152      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6153	x = gen_unary (code, op_mode, op_mode, op0);
6154      break;
6155
6156    case NE:
6157      /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
6158	 in STORE_FLAG_VALUE and FOO has no bits that might be nonzero not
6159	 in CONST.  */
6160      if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 0) == const0_rtx
6161	  && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0)
6162	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6163
6164      break;
6165
6166    case IF_THEN_ELSE:
6167      /* We have no way of knowing if the IF_THEN_ELSE can itself be
6168	 written in a narrower mode.  We play it safe and do not do so.  */
6169
6170      SUBST (XEXP (x, 1),
6171	     gen_lowpart_for_combine (GET_MODE (x),
6172				      force_to_mode (XEXP (x, 1), mode,
6173						     mask, reg, next_select)));
6174      SUBST (XEXP (x, 2),
6175	     gen_lowpart_for_combine (GET_MODE (x),
6176				      force_to_mode (XEXP (x, 2), mode,
6177						     mask, reg,next_select)));
6178      break;
6179    }
6180
6181  /* Ensure we return a value of the proper mode.  */
6182  return gen_lowpart_for_combine (mode, x);
6183}
6184
6185/* Return nonzero if X is an expression that has one of two values depending on
6186   whether some other value is zero or nonzero.  In that case, we return the
6187   value that is being tested, *PTRUE is set to the value if the rtx being
6188   returned has a nonzero value, and *PFALSE is set to the other alternative.
6189
6190   If we return zero, we set *PTRUE and *PFALSE to X.  */
6191
6192static rtx
6193if_then_else_cond (x, ptrue, pfalse)
6194     rtx x;
6195     rtx *ptrue, *pfalse;
6196{
6197  enum machine_mode mode = GET_MODE (x);
6198  enum rtx_code code = GET_CODE (x);
6199  int size = GET_MODE_BITSIZE (mode);
6200  rtx cond0, cond1, true0, true1, false0, false1;
6201  unsigned HOST_WIDE_INT nz;
6202
6203  /* If this is a unary operation whose operand has one of two values, apply
6204     our opcode to compute those values.  */
6205  if (GET_RTX_CLASS (code) == '1'
6206      && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6207    {
6208      *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6209      *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
6210      return cond0;
6211    }
6212
6213  /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
6214     make can't possibly match and would suppress other optimizations.  */
6215  else if (code == COMPARE)
6216    ;
6217
6218  /* If this is a binary operation, see if either side has only one of two
6219     values.  If either one does or if both do and they are conditional on
6220     the same value, compute the new true and false values.  */
6221  else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6222	   || GET_RTX_CLASS (code) == '<')
6223    {
6224      cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6225      cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6226
6227      if ((cond0 != 0 || cond1 != 0)
6228	  && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6229	{
6230	  *ptrue = gen_binary (code, mode, true0, true1);
6231	  *pfalse = gen_binary (code, mode, false0, false1);
6232	  return cond0 ? cond0 : cond1;
6233	}
6234
6235#if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
6236
6237      /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
6238	 operands is zero when the other is non-zero, and vice-versa.  */
6239
6240      if ((code == PLUS || code == IOR || code == XOR || code == MINUS
6241	   || code == UMAX)
6242	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6243	{
6244	  rtx op0 = XEXP (XEXP (x, 0), 1);
6245	  rtx op1 = XEXP (XEXP (x, 1), 1);
6246
6247	  cond0 = XEXP (XEXP (x, 0), 0);
6248	  cond1 = XEXP (XEXP (x, 1), 0);
6249
6250	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6251	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6252	      && reversible_comparison_p (cond1)
6253	      && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6254		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6255		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6256		  || ((swap_condition (GET_CODE (cond0))
6257		       == reverse_condition (GET_CODE (cond1)))
6258		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6259		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6260	      && ! side_effects_p (x))
6261	    {
6262	      *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6263	      *pfalse = gen_binary (MULT, mode,
6264				    (code == MINUS
6265				     ? gen_unary (NEG, mode, mode, op1) : op1),
6266				    const_true_rtx);
6267	      return cond0;
6268	    }
6269	}
6270
6271      /* Similarly for MULT, AND and UMIN, execpt that for these the result
6272	 is always zero.  */
6273      if ((code == MULT || code == AND || code == UMIN)
6274	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6275	{
6276	  cond0 = XEXP (XEXP (x, 0), 0);
6277	  cond1 = XEXP (XEXP (x, 1), 0);
6278
6279	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6280	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6281	      && reversible_comparison_p (cond1)
6282	      && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6283		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6284		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6285		  || ((swap_condition (GET_CODE (cond0))
6286		       == reverse_condition (GET_CODE (cond1)))
6287		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6288		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6289	      && ! side_effects_p (x))
6290	    {
6291	      *ptrue = *pfalse = const0_rtx;
6292	      return cond0;
6293	    }
6294	}
6295#endif
6296    }
6297
6298  else if (code == IF_THEN_ELSE)
6299    {
6300      /* If we have IF_THEN_ELSE already, extract the condition and
6301	 canonicalize it if it is NE or EQ.  */
6302      cond0 = XEXP (x, 0);
6303      *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6304      if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6305	return XEXP (cond0, 0);
6306      else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6307	{
6308	  *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6309	  return XEXP (cond0, 0);
6310	}
6311      else
6312	return cond0;
6313    }
6314
6315  /* If X is a normal SUBREG with both inner and outer modes integral,
6316     we can narrow both the true and false values of the inner expression,
6317     if there is a condition.  */
6318  else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6319	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6320	   && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6321	   && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6322					       &true0, &false0)))
6323    {
6324      *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6325      *pfalse
6326	= force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6327
6328      return cond0;
6329    }
6330
6331  /* If X is a constant, this isn't special and will cause confusions
6332     if we treat it as such.  Likewise if it is equivalent to a constant.  */
6333  else if (CONSTANT_P (x)
6334	   || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6335    ;
6336
6337  /* If X is known to be either 0 or -1, those are the true and
6338     false values when testing X.  */
6339  else if (num_sign_bit_copies (x, mode) == size)
6340    {
6341      *ptrue = constm1_rtx, *pfalse = const0_rtx;
6342      return x;
6343    }
6344
6345  /* Likewise for 0 or a single bit.  */
6346  else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6347    {
6348      *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6349      return x;
6350    }
6351
6352  /* Otherwise fail; show no condition with true and false values the same.  */
6353  *ptrue = *pfalse = x;
6354  return 0;
6355}
6356
6357/* Return the value of expression X given the fact that condition COND
6358   is known to be true when applied to REG as its first operand and VAL
6359   as its second.  X is known to not be shared and so can be modified in
6360   place.
6361
6362   We only handle the simplest cases, and specifically those cases that
6363   arise with IF_THEN_ELSE expressions.  */
6364
6365static rtx
6366known_cond (x, cond, reg, val)
6367     rtx x;
6368     enum rtx_code cond;
6369     rtx reg, val;
6370{
6371  enum rtx_code code = GET_CODE (x);
6372  rtx temp;
6373  char *fmt;
6374  int i, j;
6375
6376  if (side_effects_p (x))
6377    return x;
6378
6379  if (cond == EQ && rtx_equal_p (x, reg))
6380    return val;
6381
6382  /* If X is (abs REG) and we know something about REG's relationship
6383     with zero, we may be able to simplify this.  */
6384
6385  if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6386    switch (cond)
6387      {
6388      case GE:  case GT:  case EQ:
6389	return XEXP (x, 0);
6390      case LT:  case LE:
6391	return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
6392			  XEXP (x, 0));
6393      }
6394
6395  /* The only other cases we handle are MIN, MAX, and comparisons if the
6396     operands are the same as REG and VAL.  */
6397
6398  else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6399    {
6400      if (rtx_equal_p (XEXP (x, 0), val))
6401	cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6402
6403      if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6404	{
6405	  if (GET_RTX_CLASS (code) == '<')
6406	    return (comparison_dominates_p (cond, code) ? const_true_rtx
6407		    : (comparison_dominates_p (cond,
6408					       reverse_condition (code))
6409		       ? const0_rtx : x));
6410
6411	  else if (code == SMAX || code == SMIN
6412		   || code == UMIN || code == UMAX)
6413	    {
6414	      int unsignedp = (code == UMIN || code == UMAX);
6415
6416	      if (code == SMAX || code == UMAX)
6417		cond = reverse_condition (cond);
6418
6419	      switch (cond)
6420		{
6421		case GE:   case GT:
6422		  return unsignedp ? x : XEXP (x, 1);
6423		case LE:   case LT:
6424		  return unsignedp ? x : XEXP (x, 0);
6425		case GEU:  case GTU:
6426		  return unsignedp ? XEXP (x, 1) : x;
6427		case LEU:  case LTU:
6428		  return unsignedp ? XEXP (x, 0) : x;
6429		}
6430	    }
6431	}
6432    }
6433
6434  fmt = GET_RTX_FORMAT (code);
6435  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6436    {
6437      if (fmt[i] == 'e')
6438	SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6439      else if (fmt[i] == 'E')
6440	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6441	  SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6442						cond, reg, val));
6443    }
6444
6445  return x;
6446}
6447
6448/* See if X, a SET operation, can be rewritten as a bit-field assignment.
6449   Return that assignment if so.
6450
6451   We only handle the most common cases.  */
6452
6453static rtx
6454make_field_assignment (x)
6455     rtx x;
6456{
6457  rtx dest = SET_DEST (x);
6458  rtx src = SET_SRC (x);
6459  rtx assign;
6460  HOST_WIDE_INT c1;
6461  int pos, len;
6462  rtx other;
6463  enum machine_mode mode;
6464
6465  /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
6466     a clear of a one-bit field.  We will have changed it to
6467     (and (rotate (const_int -2) POS) DEST), so check for that.  Also check
6468     for a SUBREG.  */
6469
6470  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
6471      && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
6472      && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
6473      && (rtx_equal_p (dest, XEXP (src, 1))
6474	  || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6475	  || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
6476    {
6477      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
6478				1, 1, 1, 0);
6479      return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6480    }
6481
6482  else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
6483	   && subreg_lowpart_p (XEXP (src, 0))
6484	   && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
6485	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
6486	   && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
6487	   && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
6488	   && (rtx_equal_p (dest, XEXP (src, 1))
6489	       || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6490	       || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
6491    {
6492      assign = make_extraction (VOIDmode, dest, 0,
6493				XEXP (SUBREG_REG (XEXP (src, 0)), 1),
6494				1, 1, 1, 0);
6495      return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6496    }
6497
6498  /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
6499     one-bit field.  */
6500  else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
6501	   && XEXP (XEXP (src, 0), 0) == const1_rtx
6502	   && (rtx_equal_p (dest, XEXP (src, 1))
6503	       || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6504	       || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
6505    {
6506      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
6507				1, 1, 1, 0);
6508      return gen_rtx (SET, VOIDmode, assign, const1_rtx);
6509    }
6510
6511  /* The other case we handle is assignments into a constant-position
6512     field.  They look like (ior (and DEST C1) OTHER).  If C1 represents
6513     a mask that has all one bits except for a group of zero bits and
6514     OTHER is known to have zeros where C1 has ones, this is such an
6515     assignment.  Compute the position and length from C1.  Shift OTHER
6516     to the appropriate position, force it to the required mode, and
6517     make the extraction.  Check for the AND in both operands.  */
6518
6519  if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
6520      && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
6521      && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
6522	  || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
6523	  || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
6524    c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
6525  else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
6526	   && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
6527	   && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
6528	       || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
6529	       || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
6530			       dest)))
6531    c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
6532  else
6533    return x;
6534
6535  pos = get_pos_from_mask (c1 ^ GET_MODE_MASK (GET_MODE (dest)), &len);
6536  if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
6537      || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
6538	  && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
6539    return x;
6540
6541  assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
6542
6543  /* The mode to use for the source is the mode of the assignment, or of
6544     what is inside a possible STRICT_LOW_PART.  */
6545  mode = (GET_CODE (assign) == STRICT_LOW_PART
6546	  ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
6547
6548  /* Shift OTHER right POS places and make it the source, restricting it
6549     to the proper length and mode.  */
6550
6551  src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
6552					     GET_MODE (src), other, pos),
6553		       mode,
6554		       GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
6555		       ? GET_MODE_MASK (mode)
6556		       : ((HOST_WIDE_INT) 1 << len) - 1,
6557		       dest, 0);
6558
6559  return gen_rtx_combine (SET, VOIDmode, assign, src);
6560}
6561
6562/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
6563   if so.  */
6564
6565static rtx
6566apply_distributive_law (x)
6567     rtx x;
6568{
6569  enum rtx_code code = GET_CODE (x);
6570  rtx lhs, rhs, other;
6571  rtx tem;
6572  enum rtx_code inner_code;
6573
6574  /* Distributivity is not true for floating point.
6575     It can change the value.  So don't do it.
6576     -- rms and moshier@world.std.com.  */
6577  if (FLOAT_MODE_P (GET_MODE (x)))
6578    return x;
6579
6580  /* The outer operation can only be one of the following:  */
6581  if (code != IOR && code != AND && code != XOR
6582      && code != PLUS && code != MINUS)
6583    return x;
6584
6585  lhs = XEXP (x, 0), rhs = XEXP (x, 1);
6586
6587  /* If either operand is a primitive we can't do anything, so get out fast. */
6588  if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
6589      || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
6590    return x;
6591
6592  lhs = expand_compound_operation (lhs);
6593  rhs = expand_compound_operation (rhs);
6594  inner_code = GET_CODE (lhs);
6595  if (inner_code != GET_CODE (rhs))
6596    return x;
6597
6598  /* See if the inner and outer operations distribute.  */
6599  switch (inner_code)
6600    {
6601    case LSHIFTRT:
6602    case ASHIFTRT:
6603    case AND:
6604    case IOR:
6605      /* These all distribute except over PLUS.  */
6606      if (code == PLUS || code == MINUS)
6607	return x;
6608      break;
6609
6610    case MULT:
6611      if (code != PLUS && code != MINUS)
6612	return x;
6613      break;
6614
6615    case ASHIFT:
6616      /* This is also a multiply, so it distributes over everything.  */
6617      break;
6618
6619    case SUBREG:
6620      /* Non-paradoxical SUBREGs distributes over all operations, provided
6621	 the inner modes and word numbers are the same, this is an extraction
6622	 of a low-order part, we don't convert an fp operation to int or
6623	 vice versa, and we would not be converting a single-word
6624	 operation into a multi-word operation.  The latter test is not
6625	 required, but it prevents generating unneeded multi-word operations.
6626	 Some of the previous tests are redundant given the latter test, but
6627	 are retained because they are required for correctness.
6628
6629	 We produce the result slightly differently in this case.  */
6630
6631      if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
6632	  || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
6633	  || ! subreg_lowpart_p (lhs)
6634	  || (GET_MODE_CLASS (GET_MODE (lhs))
6635	      != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
6636	  || (GET_MODE_SIZE (GET_MODE (lhs))
6637	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
6638	  || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
6639	return x;
6640
6641      tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
6642			SUBREG_REG (lhs), SUBREG_REG (rhs));
6643      return gen_lowpart_for_combine (GET_MODE (x), tem);
6644
6645    default:
6646      return x;
6647    }
6648
6649  /* Set LHS and RHS to the inner operands (A and B in the example
6650     above) and set OTHER to the common operand (C in the example).
6651     These is only one way to do this unless the inner operation is
6652     commutative.  */
6653  if (GET_RTX_CLASS (inner_code) == 'c'
6654      && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
6655    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
6656  else if (GET_RTX_CLASS (inner_code) == 'c'
6657	   && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
6658    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
6659  else if (GET_RTX_CLASS (inner_code) == 'c'
6660	   && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
6661    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
6662  else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
6663    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
6664  else
6665    return x;
6666
6667  /* Form the new inner operation, seeing if it simplifies first.  */
6668  tem = gen_binary (code, GET_MODE (x), lhs, rhs);
6669
6670  /* There is one exception to the general way of distributing:
6671     (a ^ b) | (a ^ c) -> (~a) & (b ^ c)  */
6672  if (code == XOR && inner_code == IOR)
6673    {
6674      inner_code = AND;
6675      other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
6676    }
6677
6678  /* We may be able to continuing distributing the result, so call
6679     ourselves recursively on the inner operation before forming the
6680     outer operation, which we return.  */
6681  return gen_binary (inner_code, GET_MODE (x),
6682		     apply_distributive_law (tem), other);
6683}
6684
6685/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
6686   in MODE.
6687
6688   Return an equivalent form, if different from X.  Otherwise, return X.  If
6689   X is zero, we are to always construct the equivalent form.  */
6690
6691static rtx
6692simplify_and_const_int (x, mode, varop, constop)
6693     rtx x;
6694     enum machine_mode mode;
6695     rtx varop;
6696     unsigned HOST_WIDE_INT constop;
6697{
6698  unsigned HOST_WIDE_INT nonzero;
6699  int width = GET_MODE_BITSIZE (mode);
6700  int i;
6701
6702  /* Simplify VAROP knowing that we will be only looking at some of the
6703     bits in it.  */
6704  varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
6705
6706  /* If VAROP is a CLOBBER, we will fail so return it; if it is a
6707     CONST_INT, we are done.  */
6708  if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
6709    return varop;
6710
6711  /* See what bits may be nonzero in VAROP.  Unlike the general case of
6712     a call to nonzero_bits, here we don't care about bits outside
6713     MODE.  */
6714
6715  nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
6716
6717  /* If this would be an entire word for the target, but is not for
6718     the host, then sign-extend on the host so that the number will look
6719     the same way on the host that it would on the target.
6720
6721     For example, when building a 64 bit alpha hosted 32 bit sparc
6722     targeted compiler, then we want the 32 bit unsigned value -1 to be
6723     represented as a 64 bit value -1, and not as 0x00000000ffffffff.
6724     The later confuses the sparc backend.  */
6725
6726  if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
6727      && (nonzero & ((HOST_WIDE_INT) 1 << (width - 1))))
6728    nonzero |= ((HOST_WIDE_INT) (-1) << width);
6729
6730  /* Turn off all bits in the constant that are known to already be zero.
6731     Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
6732     which is tested below.  */
6733
6734  constop &= nonzero;
6735
6736  /* If we don't have any bits left, return zero.  */
6737  if (constop == 0)
6738    return const0_rtx;
6739
6740  /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
6741     a power of two, we can replace this with a ASHIFT.  */
6742  if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
6743      && (i = exact_log2 (constop)) >= 0)
6744    return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
6745
6746  /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
6747     or XOR, then try to apply the distributive law.  This may eliminate
6748     operations if either branch can be simplified because of the AND.
6749     It may also make some cases more complex, but those cases probably
6750     won't match a pattern either with or without this.  */
6751
6752  if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
6753    return
6754      gen_lowpart_for_combine
6755	(mode,
6756	 apply_distributive_law
6757	 (gen_binary (GET_CODE (varop), GET_MODE (varop),
6758		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6759					      XEXP (varop, 0), constop),
6760		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6761					      XEXP (varop, 1), constop))));
6762
6763  /* Get VAROP in MODE.  Try to get a SUBREG if not.  Don't make a new SUBREG
6764     if we already had one (just check for the simplest cases).  */
6765  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6766      && GET_MODE (XEXP (x, 0)) == mode
6767      && SUBREG_REG (XEXP (x, 0)) == varop)
6768    varop = XEXP (x, 0);
6769  else
6770    varop = gen_lowpart_for_combine (mode, varop);
6771
6772  /* If we can't make the SUBREG, try to return what we were given. */
6773  if (GET_CODE (varop) == CLOBBER)
6774    return x ? x : varop;
6775
6776  /* If we are only masking insignificant bits, return VAROP.  */
6777  if (constop == nonzero)
6778    x = varop;
6779
6780  /* Otherwise, return an AND.  See how much, if any, of X we can use.  */
6781  else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6782    x = gen_binary (AND, mode, varop, GEN_INT (constop));
6783
6784  else
6785    {
6786      if (GET_CODE (XEXP (x, 1)) != CONST_INT
6787	  || INTVAL (XEXP (x, 1)) != constop)
6788	SUBST (XEXP (x, 1), GEN_INT (constop));
6789
6790      SUBST (XEXP (x, 0), varop);
6791    }
6792
6793  return x;
6794}
6795
6796/* Given an expression, X, compute which bits in X can be non-zero.
6797   We don't care about bits outside of those defined in MODE.
6798
6799   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
6800   a shift, AND, or zero_extract, we can do better.  */
6801
6802static unsigned HOST_WIDE_INT
6803nonzero_bits (x, mode)
6804     rtx x;
6805     enum machine_mode mode;
6806{
6807  unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
6808  unsigned HOST_WIDE_INT inner_nz;
6809  enum rtx_code code;
6810  int mode_width = GET_MODE_BITSIZE (mode);
6811  rtx tem;
6812
6813  /* For floating-point values, assume all bits are needed.  */
6814  if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
6815    return nonzero;
6816
6817  /* If X is wider than MODE, use its mode instead.  */
6818  if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
6819    {
6820      mode = GET_MODE (x);
6821      nonzero = GET_MODE_MASK (mode);
6822      mode_width = GET_MODE_BITSIZE (mode);
6823    }
6824
6825  if (mode_width > HOST_BITS_PER_WIDE_INT)
6826    /* Our only callers in this case look for single bit values.  So
6827       just return the mode mask.  Those tests will then be false.  */
6828    return nonzero;
6829
6830#ifndef WORD_REGISTER_OPERATIONS
6831  /* If MODE is wider than X, but both are a single word for both the host
6832     and target machines, we can compute this from which bits of the
6833     object might be nonzero in its own mode, taking into account the fact
6834     that on many CISC machines, accessing an object in a wider mode
6835     causes the high-order bits to become undefined.  So they are
6836     not known to be zero.  */
6837
6838  if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
6839      && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
6840      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6841      && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
6842    {
6843      nonzero &= nonzero_bits (x, GET_MODE (x));
6844      nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
6845      return nonzero;
6846    }
6847#endif
6848
6849  code = GET_CODE (x);
6850  switch (code)
6851    {
6852    case REG:
6853#ifdef POINTERS_EXTEND_UNSIGNED
6854      /* If pointers extend unsigned and this is a pointer in Pmode, say that
6855	 all the bits above ptr_mode are known to be zero.  */
6856      if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
6857	  && REGNO_POINTER_FLAG (REGNO (x)))
6858	nonzero &= GET_MODE_MASK (ptr_mode);
6859#endif
6860
6861#ifdef STACK_BOUNDARY
6862      /* If this is the stack pointer, we may know something about its
6863	 alignment.  If PUSH_ROUNDING is defined, it is possible for the
6864	 stack to be momentarily aligned only to that amount, so we pick
6865	 the least alignment.  */
6866
6867      if (x == stack_pointer_rtx)
6868	{
6869	  int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6870
6871#ifdef PUSH_ROUNDING
6872	  sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
6873#endif
6874
6875	  /* We must return here, otherwise we may get a worse result from
6876	     one of the choices below.  There is nothing useful below as
6877	     far as the stack pointer is concerned.  */
6878	  return nonzero &= ~ (sp_alignment - 1);
6879	}
6880#endif
6881
6882      /* If X is a register whose nonzero bits value is current, use it.
6883	 Otherwise, if X is a register whose value we can find, use that
6884	 value.  Otherwise, use the previously-computed global nonzero bits
6885	 for this register.  */
6886
6887      if (reg_last_set_value[REGNO (x)] != 0
6888	  && reg_last_set_mode[REGNO (x)] == mode
6889	  && (reg_n_sets[REGNO (x)] == 1
6890	      || reg_last_set_label[REGNO (x)] == label_tick)
6891	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6892	return reg_last_set_nonzero_bits[REGNO (x)];
6893
6894      tem = get_last_value (x);
6895
6896      if (tem)
6897	{
6898#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6899	  /* If X is narrower than MODE and TEM is a non-negative
6900	     constant that would appear negative in the mode of X,
6901	     sign-extend it for use in reg_nonzero_bits because some
6902	     machines (maybe most) will actually do the sign-extension
6903	     and this is the conservative approach.
6904
6905	     ??? For 2.5, try to tighten up the MD files in this regard
6906	     instead of this kludge.  */
6907
6908	  if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
6909	      && GET_CODE (tem) == CONST_INT
6910	      && INTVAL (tem) > 0
6911	      && 0 != (INTVAL (tem)
6912		       & ((HOST_WIDE_INT) 1
6913			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
6914	    tem = GEN_INT (INTVAL (tem)
6915			   | ((HOST_WIDE_INT) (-1)
6916			      << GET_MODE_BITSIZE (GET_MODE (x))));
6917#endif
6918	  return nonzero_bits (tem, mode);
6919	}
6920      else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
6921	return reg_nonzero_bits[REGNO (x)] & nonzero;
6922      else
6923	return nonzero;
6924
6925    case CONST_INT:
6926#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6927      /* If X is negative in MODE, sign-extend the value.  */
6928      if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
6929	  && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
6930	return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
6931#endif
6932
6933      return INTVAL (x);
6934
6935    case MEM:
6936#ifdef LOAD_EXTEND_OP
6937      /* In many, if not most, RISC machines, reading a byte from memory
6938	 zeros the rest of the register.  Noticing that fact saves a lot
6939	 of extra zero-extends.  */
6940      if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
6941	nonzero &= GET_MODE_MASK (GET_MODE (x));
6942#endif
6943      break;
6944
6945    case EQ:  case NE:
6946    case GT:  case GTU:
6947    case LT:  case LTU:
6948    case GE:  case GEU:
6949    case LE:  case LEU:
6950
6951      /* If this produces an integer result, we know which bits are set.
6952	 Code here used to clear bits outside the mode of X, but that is
6953	 now done above.  */
6954
6955      if (GET_MODE_CLASS (mode) == MODE_INT
6956	  && mode_width <= HOST_BITS_PER_WIDE_INT)
6957	nonzero = STORE_FLAG_VALUE;
6958      break;
6959
6960    case NEG:
6961      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6962	  == GET_MODE_BITSIZE (GET_MODE (x)))
6963	nonzero = 1;
6964
6965      if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
6966	nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
6967      break;
6968
6969    case ABS:
6970      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6971	  == GET_MODE_BITSIZE (GET_MODE (x)))
6972	nonzero = 1;
6973      break;
6974
6975    case TRUNCATE:
6976      nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
6977      break;
6978
6979    case ZERO_EXTEND:
6980      nonzero &= nonzero_bits (XEXP (x, 0), mode);
6981      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6982	nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6983      break;
6984
6985    case SIGN_EXTEND:
6986      /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6987	 Otherwise, show all the bits in the outer mode but not the inner
6988	 may be non-zero.  */
6989      inner_nz = nonzero_bits (XEXP (x, 0), mode);
6990      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6991	{
6992	  inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6993	  if (inner_nz &
6994	      (((HOST_WIDE_INT) 1
6995		<< (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
6996	    inner_nz |= (GET_MODE_MASK (mode)
6997			  & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6998	}
6999
7000      nonzero &= inner_nz;
7001      break;
7002
7003    case AND:
7004      nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7005		  & nonzero_bits (XEXP (x, 1), mode));
7006      break;
7007
7008    case XOR:   case IOR:
7009    case UMIN:  case UMAX:  case SMIN:  case SMAX:
7010      nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7011		  | nonzero_bits (XEXP (x, 1), mode));
7012      break;
7013
7014    case PLUS:  case MINUS:
7015    case MULT:
7016    case DIV:   case UDIV:
7017    case MOD:   case UMOD:
7018      /* We can apply the rules of arithmetic to compute the number of
7019	 high- and low-order zero bits of these operations.  We start by
7020	 computing the width (position of the highest-order non-zero bit)
7021	 and the number of low-order zero bits for each value.  */
7022      {
7023	unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
7024	unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
7025	int width0 = floor_log2 (nz0) + 1;
7026	int width1 = floor_log2 (nz1) + 1;
7027	int low0 = floor_log2 (nz0 & -nz0);
7028	int low1 = floor_log2 (nz1 & -nz1);
7029	HOST_WIDE_INT op0_maybe_minusp
7030	  = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7031	HOST_WIDE_INT op1_maybe_minusp
7032	  = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7033	int result_width = mode_width;
7034	int result_low = 0;
7035
7036	switch (code)
7037	  {
7038	  case PLUS:
7039	    result_width = MAX (width0, width1) + 1;
7040	    result_low = MIN (low0, low1);
7041	    break;
7042	  case MINUS:
7043	    result_low = MIN (low0, low1);
7044	    break;
7045	  case MULT:
7046	    result_width = width0 + width1;
7047	    result_low = low0 + low1;
7048	    break;
7049	  case DIV:
7050	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
7051	      result_width = width0;
7052	    break;
7053	  case UDIV:
7054	    result_width = width0;
7055	    break;
7056	  case MOD:
7057	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
7058	      result_width = MIN (width0, width1);
7059	    result_low = MIN (low0, low1);
7060	    break;
7061	  case UMOD:
7062	    result_width = MIN (width0, width1);
7063	    result_low = MIN (low0, low1);
7064	    break;
7065	  }
7066
7067	if (result_width < mode_width)
7068	  nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
7069
7070	if (result_low > 0)
7071	  nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
7072      }
7073      break;
7074
7075    case ZERO_EXTRACT:
7076      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7077	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7078	nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
7079      break;
7080
7081    case SUBREG:
7082      /* If this is a SUBREG formed for a promoted variable that has
7083	 been zero-extended, we know that at least the high-order bits
7084	 are zero, though others might be too.  */
7085
7086      if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
7087	nonzero = (GET_MODE_MASK (GET_MODE (x))
7088		   & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
7089
7090      /* If the inner mode is a single word for both the host and target
7091	 machines, we can compute this from which bits of the inner
7092	 object might be nonzero.  */
7093      if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
7094	  && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7095	      <= HOST_BITS_PER_WIDE_INT))
7096	{
7097	  nonzero &= nonzero_bits (SUBREG_REG (x), mode);
7098
7099#ifndef WORD_REGISTER_OPERATIONS
7100	  /* On many CISC machines, accessing an object in a wider mode
7101	     causes the high-order bits to become undefined.  So they are
7102	     not known to be zero.  */
7103	  if (GET_MODE_SIZE (GET_MODE (x))
7104	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7105	    nonzero |= (GET_MODE_MASK (GET_MODE (x))
7106			& ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
7107#endif
7108	}
7109      break;
7110
7111    case ASHIFTRT:
7112    case LSHIFTRT:
7113    case ASHIFT:
7114    case ROTATE:
7115      /* The nonzero bits are in two classes: any bits within MODE
7116	 that aren't in GET_MODE (x) are always significant.  The rest of the
7117	 nonzero bits are those that are significant in the operand of
7118	 the shift when shifted the appropriate number of bits.  This
7119	 shows that high-order bits are cleared by the right shift and
7120	 low-order bits by left shifts.  */
7121      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7122	  && INTVAL (XEXP (x, 1)) >= 0
7123	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7124	{
7125	  enum machine_mode inner_mode = GET_MODE (x);
7126	  int width = GET_MODE_BITSIZE (inner_mode);
7127	  int count = INTVAL (XEXP (x, 1));
7128	  unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
7129	  unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
7130	  unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
7131	  unsigned HOST_WIDE_INT outer = 0;
7132
7133	  if (mode_width > width)
7134	    outer = (op_nonzero & nonzero & ~ mode_mask);
7135
7136	  if (code == LSHIFTRT)
7137	    inner >>= count;
7138	  else if (code == ASHIFTRT)
7139	    {
7140	      inner >>= count;
7141
7142	      /* If the sign bit may have been nonzero before the shift, we
7143		 need to mark all the places it could have been copied to
7144		 by the shift as possibly nonzero.  */
7145	      if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7146		inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
7147	    }
7148	  else if (code == ASHIFT)
7149	    inner <<= count;
7150	  else
7151	    inner = ((inner << (count % width)
7152		      | (inner >> (width - (count % width)))) & mode_mask);
7153
7154	  nonzero &= (outer | inner);
7155	}
7156      break;
7157
7158    case FFS:
7159      /* This is at most the number of bits in the mode.  */
7160      nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
7161      break;
7162
7163    case IF_THEN_ELSE:
7164      nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7165		  | nonzero_bits (XEXP (x, 2), mode));
7166      break;
7167    }
7168
7169  return nonzero;
7170}
7171
7172/* Return the number of bits at the high-order end of X that are known to
7173   be equal to the sign bit.  X will be used in mode MODE; if MODE is
7174   VOIDmode, X will be used in its own mode.  The returned value  will always
7175   be between 1 and the number of bits in MODE.  */
7176
7177static int
7178num_sign_bit_copies (x, mode)
7179     rtx x;
7180     enum machine_mode mode;
7181{
7182  enum rtx_code code = GET_CODE (x);
7183  int bitwidth;
7184  int num0, num1, result;
7185  unsigned HOST_WIDE_INT nonzero;
7186  rtx tem;
7187
7188  /* If we weren't given a mode, use the mode of X.  If the mode is still
7189     VOIDmode, we don't know anything.  Likewise if one of the modes is
7190     floating-point.  */
7191
7192  if (mode == VOIDmode)
7193    mode = GET_MODE (x);
7194
7195  if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
7196    return 1;
7197
7198  bitwidth = GET_MODE_BITSIZE (mode);
7199
7200  /* For a smaller object, just ignore the high bits. */
7201  if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7202    return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7203		    - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7204
7205#ifndef WORD_REGISTER_OPERATIONS
7206  /* If this machine does not do all register operations on the entire
7207     register and MODE is wider than the mode of X, we can say nothing
7208     at all about the high-order bits.  */
7209  if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7210    return 1;
7211#endif
7212
7213  switch (code)
7214    {
7215    case REG:
7216
7217#ifdef POINTERS_EXTEND_UNSIGNED
7218      /* If pointers extend signed and this is a pointer in Pmode, say that
7219	 all the bits above ptr_mode are known to be sign bit copies.  */
7220      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
7221	  && REGNO_POINTER_FLAG (REGNO (x)))
7222	return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
7223#endif
7224
7225      if (reg_last_set_value[REGNO (x)] != 0
7226	  && reg_last_set_mode[REGNO (x)] == mode
7227	  && (reg_n_sets[REGNO (x)] == 1
7228	      || reg_last_set_label[REGNO (x)] == label_tick)
7229	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7230	return reg_last_set_sign_bit_copies[REGNO (x)];
7231
7232      tem =  get_last_value (x);
7233      if (tem != 0)
7234	return num_sign_bit_copies (tem, mode);
7235
7236      if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
7237	return reg_sign_bit_copies[REGNO (x)];
7238      break;
7239
7240    case MEM:
7241#ifdef LOAD_EXTEND_OP
7242      /* Some RISC machines sign-extend all loads of smaller than a word.  */
7243      if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
7244	return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
7245#endif
7246      break;
7247
7248    case CONST_INT:
7249      /* If the constant is negative, take its 1's complement and remask.
7250	 Then see how many zero bits we have.  */
7251      nonzero = INTVAL (x) & GET_MODE_MASK (mode);
7252      if (bitwidth <= HOST_BITS_PER_WIDE_INT
7253	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7254	nonzero = (~ nonzero) & GET_MODE_MASK (mode);
7255
7256      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
7257
7258    case SUBREG:
7259      /* If this is a SUBREG for a promoted object that is sign-extended
7260	 and we are looking at it in a wider mode, we know that at least the
7261	 high-order bits are known to be sign bit copies.  */
7262
7263      if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
7264	return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7265		    num_sign_bit_copies (SUBREG_REG (x), mode));
7266
7267      /* For a smaller object, just ignore the high bits. */
7268      if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7269	{
7270	  num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7271	  return MAX (1, (num0
7272			  - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7273			     - bitwidth)));
7274	}
7275
7276#ifdef WORD_REGISTER_OPERATIONS
7277#ifdef LOAD_EXTEND_OP
7278      /* For paradoxical SUBREGs on machines where all register operations
7279	 affect the entire register, just look inside.  Note that we are
7280	 passing MODE to the recursive call, so the number of sign bit copies
7281	 will remain relative to that mode, not the inner mode.  */
7282
7283      /* This works only if loads sign extend.  Otherwise, if we get a
7284	 reload for the inner part, it may be loaded from the stack, and
7285	 then we lose all sign bit copies that existed before the store
7286	 to the stack.  */
7287
7288      if ((GET_MODE_SIZE (GET_MODE (x))
7289	   > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7290	  && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
7291	return num_sign_bit_copies (SUBREG_REG (x), mode);
7292#endif
7293#endif
7294      break;
7295
7296    case SIGN_EXTRACT:
7297      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7298	return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7299      break;
7300
7301    case SIGN_EXTEND:
7302      return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7303	      + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7304
7305    case TRUNCATE:
7306      /* For a smaller object, just ignore the high bits. */
7307      num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7308      return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7309			      - bitwidth)));
7310
7311    case NOT:
7312      return num_sign_bit_copies (XEXP (x, 0), mode);
7313
7314    case ROTATE:       case ROTATERT:
7315      /* If we are rotating left by a number of bits less than the number
7316	 of sign bit copies, we can just subtract that amount from the
7317	 number.  */
7318      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7319	  && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
7320	{
7321	  num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7322	  return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
7323				 : bitwidth - INTVAL (XEXP (x, 1))));
7324	}
7325      break;
7326
7327    case NEG:
7328      /* In general, this subtracts one sign bit copy.  But if the value
7329	 is known to be positive, the number of sign bit copies is the
7330	 same as that of the input.  Finally, if the input has just one bit
7331	 that might be nonzero, all the bits are copies of the sign bit.  */
7332      nonzero = nonzero_bits (XEXP (x, 0), mode);
7333      if (nonzero == 1)
7334	return bitwidth;
7335
7336      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7337      if (num0 > 1
7338	  && bitwidth <= HOST_BITS_PER_WIDE_INT
7339	  && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
7340	num0--;
7341
7342      return num0;
7343
7344    case IOR:   case AND:   case XOR:
7345    case SMIN:  case SMAX:  case UMIN:  case UMAX:
7346      /* Logical operations will preserve the number of sign-bit copies.
7347	 MIN and MAX operations always return one of the operands.  */
7348      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7349      num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7350      return MIN (num0, num1);
7351
7352    case PLUS:  case MINUS:
7353      /* For addition and subtraction, we can have a 1-bit carry.  However,
7354	 if we are subtracting 1 from a positive number, there will not
7355	 be such a carry.  Furthermore, if the positive number is known to
7356	 be 0 or 1, we know the result is either -1 or 0.  */
7357
7358      if (code == PLUS && XEXP (x, 1) == constm1_rtx
7359	  && bitwidth <= HOST_BITS_PER_WIDE_INT)
7360	{
7361	  nonzero = nonzero_bits (XEXP (x, 0), mode);
7362	  if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
7363	    return (nonzero == 1 || nonzero == 0 ? bitwidth
7364		    : bitwidth - floor_log2 (nonzero) - 1);
7365	}
7366
7367      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7368      num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7369      return MAX (1, MIN (num0, num1) - 1);
7370
7371    case MULT:
7372      /* The number of bits of the product is the sum of the number of
7373	 bits of both terms.  However, unless one of the terms if known
7374	 to be positive, we must allow for an additional bit since negating
7375	 a negative number can remove one sign bit copy.  */
7376
7377      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7378      num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7379
7380      result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
7381      if (result > 0
7382	  && bitwidth <= HOST_BITS_PER_WIDE_INT
7383	  && ((nonzero_bits (XEXP (x, 0), mode)
7384	       & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7385	  && (nonzero_bits (XEXP (x, 1), mode)
7386	      & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
7387	result--;
7388
7389      return MAX (1, result);
7390
7391    case UDIV:
7392      /* The result must be <= the first operand.  */
7393      return num_sign_bit_copies (XEXP (x, 0), mode);
7394
7395    case UMOD:
7396      /* The result must be <= the scond operand.  */
7397      return num_sign_bit_copies (XEXP (x, 1), mode);
7398
7399    case DIV:
7400      /* Similar to unsigned division, except that we have to worry about
7401	 the case where the divisor is negative, in which case we have
7402	 to add 1.  */
7403      result = num_sign_bit_copies (XEXP (x, 0), mode);
7404      if (result > 1
7405	  && bitwidth <= HOST_BITS_PER_WIDE_INT
7406	  && (nonzero_bits (XEXP (x, 1), mode)
7407	      & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7408	result --;
7409
7410      return result;
7411
7412    case MOD:
7413      result = num_sign_bit_copies (XEXP (x, 1), mode);
7414      if (result > 1
7415	  && bitwidth <= HOST_BITS_PER_WIDE_INT
7416	  && (nonzero_bits (XEXP (x, 1), mode)
7417	      & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7418	result --;
7419
7420      return result;
7421
7422    case ASHIFTRT:
7423      /* Shifts by a constant add to the number of bits equal to the
7424	 sign bit.  */
7425      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7426      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7427	  && INTVAL (XEXP (x, 1)) > 0)
7428	num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
7429
7430      return num0;
7431
7432    case ASHIFT:
7433      /* Left shifts destroy copies.  */
7434      if (GET_CODE (XEXP (x, 1)) != CONST_INT
7435	  || INTVAL (XEXP (x, 1)) < 0
7436	  || INTVAL (XEXP (x, 1)) >= bitwidth)
7437	return 1;
7438
7439      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7440      return MAX (1, num0 - INTVAL (XEXP (x, 1)));
7441
7442    case IF_THEN_ELSE:
7443      num0 = num_sign_bit_copies (XEXP (x, 1), mode);
7444      num1 = num_sign_bit_copies (XEXP (x, 2), mode);
7445      return MIN (num0, num1);
7446
7447#if STORE_FLAG_VALUE == -1
7448    case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
7449    case GEU: case GTU: case LEU: case LTU:
7450      return bitwidth;
7451#endif
7452    }
7453
7454  /* If we haven't been able to figure it out by one of the above rules,
7455     see if some of the high-order bits are known to be zero.  If so,
7456     count those bits and return one less than that amount.  If we can't
7457     safely compute the mask for this mode, always return BITWIDTH.  */
7458
7459  if (bitwidth > HOST_BITS_PER_WIDE_INT)
7460    return 1;
7461
7462  nonzero = nonzero_bits (x, mode);
7463  return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
7464	  ? 1 : bitwidth - floor_log2 (nonzero) - 1);
7465}
7466
7467/* Return the number of "extended" bits there are in X, when interpreted
7468   as a quantity in MODE whose signedness is indicated by UNSIGNEDP.  For
7469   unsigned quantities, this is the number of high-order zero bits.
7470   For signed quantities, this is the number of copies of the sign bit
7471   minus 1.  In both case, this function returns the number of "spare"
7472   bits.  For example, if two quantities for which this function returns
7473   at least 1 are added, the addition is known not to overflow.
7474
7475   This function will always return 0 unless called during combine, which
7476   implies that it must be called from a define_split.  */
7477
7478int
7479extended_count (x, mode, unsignedp)
7480     rtx x;
7481     enum machine_mode mode;
7482     int unsignedp;
7483{
7484  if (nonzero_sign_valid == 0)
7485    return 0;
7486
7487  return (unsignedp
7488	  ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7489	     && (GET_MODE_BITSIZE (mode) - 1
7490		 - floor_log2 (nonzero_bits (x, mode))))
7491	  : num_sign_bit_copies (x, mode) - 1);
7492}
7493
7494/* This function is called from `simplify_shift_const' to merge two
7495   outer operations.  Specifically, we have already found that we need
7496   to perform operation *POP0 with constant *PCONST0 at the outermost
7497   position.  We would now like to also perform OP1 with constant CONST1
7498   (with *POP0 being done last).
7499
7500   Return 1 if we can do the operation and update *POP0 and *PCONST0 with
7501   the resulting operation.  *PCOMP_P is set to 1 if we would need to
7502   complement the innermost operand, otherwise it is unchanged.
7503
7504   MODE is the mode in which the operation will be done.  No bits outside
7505   the width of this mode matter.  It is assumed that the width of this mode
7506   is smaller than or equal to HOST_BITS_PER_WIDE_INT.
7507
7508   If *POP0 or OP1 are NIL, it means no operation is required.  Only NEG, PLUS,
7509   IOR, XOR, and AND are supported.  We may set *POP0 to SET if the proper
7510   result is simply *PCONST0.
7511
7512   If the resulting operation cannot be expressed as one operation, we
7513   return 0 and do not change *POP0, *PCONST0, and *PCOMP_P.  */
7514
7515static int
7516merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
7517     enum rtx_code *pop0;
7518     HOST_WIDE_INT *pconst0;
7519     enum rtx_code op1;
7520     HOST_WIDE_INT const1;
7521     enum machine_mode mode;
7522     int *pcomp_p;
7523{
7524  enum rtx_code op0 = *pop0;
7525  HOST_WIDE_INT const0 = *pconst0;
7526  int width = GET_MODE_BITSIZE (mode);
7527
7528  const0 &= GET_MODE_MASK (mode);
7529  const1 &= GET_MODE_MASK (mode);
7530
7531  /* If OP0 is an AND, clear unimportant bits in CONST1.  */
7532  if (op0 == AND)
7533    const1 &= const0;
7534
7535  /* If OP0 or OP1 is NIL, this is easy.  Similarly if they are the same or
7536     if OP0 is SET.  */
7537
7538  if (op1 == NIL || op0 == SET)
7539    return 1;
7540
7541  else if (op0 == NIL)
7542    op0 = op1, const0 = const1;
7543
7544  else if (op0 == op1)
7545    {
7546      switch (op0)
7547	{
7548	case AND:
7549	  const0 &= const1;
7550	  break;
7551	case IOR:
7552	  const0 |= const1;
7553	  break;
7554	case XOR:
7555	  const0 ^= const1;
7556	  break;
7557	case PLUS:
7558	  const0 += const1;
7559	  break;
7560	case NEG:
7561	  op0 = NIL;
7562	  break;
7563	}
7564    }
7565
7566  /* Otherwise, if either is a PLUS or NEG, we can't do anything.  */
7567  else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
7568    return 0;
7569
7570  /* If the two constants aren't the same, we can't do anything.  The
7571     remaining six cases can all be done.  */
7572  else if (const0 != const1)
7573    return 0;
7574
7575  else
7576    switch (op0)
7577      {
7578      case IOR:
7579	if (op1 == AND)
7580	  /* (a & b) | b == b */
7581	  op0 = SET;
7582	else /* op1 == XOR */
7583	  /* (a ^ b) | b == a | b */
7584	  ;
7585	break;
7586
7587      case XOR:
7588	if (op1 == AND)
7589	  /* (a & b) ^ b == (~a) & b */
7590	  op0 = AND, *pcomp_p = 1;
7591	else /* op1 == IOR */
7592	  /* (a | b) ^ b == a & ~b */
7593	  op0 = AND, *pconst0 = ~ const0;
7594	break;
7595
7596      case AND:
7597	if (op1 == IOR)
7598	  /* (a | b) & b == b */
7599	op0 = SET;
7600	else /* op1 == XOR */
7601	  /* (a ^ b) & b) == (~a) & b */
7602	  *pcomp_p = 1;
7603	break;
7604      }
7605
7606  /* Check for NO-OP cases.  */
7607  const0 &= GET_MODE_MASK (mode);
7608  if (const0 == 0
7609      && (op0 == IOR || op0 == XOR || op0 == PLUS))
7610    op0 = NIL;
7611  else if (const0 == 0 && op0 == AND)
7612    op0 = SET;
7613  else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
7614    op0 = NIL;
7615
7616  /* If this would be an entire word for the target, but is not for
7617     the host, then sign-extend on the host so that the number will look
7618     the same way on the host that it would on the target.
7619
7620     For example, when building a 64 bit alpha hosted 32 bit sparc
7621     targeted compiler, then we want the 32 bit unsigned value -1 to be
7622     represented as a 64 bit value -1, and not as 0x00000000ffffffff.
7623     The later confuses the sparc backend.  */
7624
7625  if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
7626      && (const0 & ((HOST_WIDE_INT) 1 << (width - 1))))
7627    const0 |= ((HOST_WIDE_INT) (-1) << width);
7628
7629  *pop0 = op0;
7630  *pconst0 = const0;
7631
7632  return 1;
7633}
7634
7635/* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
7636   The result of the shift is RESULT_MODE.  X, if non-zero, is an expression
7637   that we started with.
7638
7639   The shift is normally computed in the widest mode we find in VAROP, as
7640   long as it isn't a different number of words than RESULT_MODE.  Exceptions
7641   are ASHIFTRT and ROTATE, which are always done in their original mode,  */
7642
7643static rtx
7644simplify_shift_const (x, code, result_mode, varop, count)
7645     rtx x;
7646     enum rtx_code code;
7647     enum machine_mode result_mode;
7648     rtx varop;
7649     int count;
7650{
7651  enum rtx_code orig_code = code;
7652  int orig_count = count;
7653  enum machine_mode mode = result_mode;
7654  enum machine_mode shift_mode, tmode;
7655  int mode_words
7656    = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
7657  /* We form (outer_op (code varop count) (outer_const)).  */
7658  enum rtx_code outer_op = NIL;
7659  HOST_WIDE_INT outer_const = 0;
7660  rtx const_rtx;
7661  int complement_p = 0;
7662  rtx new;
7663
7664  /* If we were given an invalid count, don't do anything except exactly
7665     what was requested.  */
7666
7667  if (count < 0 || count > GET_MODE_BITSIZE (mode))
7668    {
7669      if (x)
7670	return x;
7671
7672      return gen_rtx (code, mode, varop, GEN_INT (count));
7673    }
7674
7675  /* Unless one of the branches of the `if' in this loop does a `continue',
7676     we will `break' the loop after the `if'.  */
7677
7678  while (count != 0)
7679    {
7680      /* If we have an operand of (clobber (const_int 0)), just return that
7681	 value.  */
7682      if (GET_CODE (varop) == CLOBBER)
7683	return varop;
7684
7685      /* If we discovered we had to complement VAROP, leave.  Making a NOT
7686	 here would cause an infinite loop.  */
7687      if (complement_p)
7688	break;
7689
7690      /* Convert ROTATERT to ROTATE.  */
7691      if (code == ROTATERT)
7692	code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
7693
7694      /* We need to determine what mode we will do the shift in.  If the
7695	 shift is a right shift or a ROTATE, we must always do it in the mode
7696	 it was originally done in.  Otherwise, we can do it in MODE, the
7697	 widest mode encountered. */
7698      shift_mode
7699	= (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
7700	   ? result_mode : mode);
7701
7702      /* Handle cases where the count is greater than the size of the mode
7703	 minus 1.  For ASHIFT, use the size minus one as the count (this can
7704	 occur when simplifying (lshiftrt (ashiftrt ..))).  For rotates,
7705	 take the count modulo the size.  For other shifts, the result is
7706	 zero.
7707
7708	 Since these shifts are being produced by the compiler by combining
7709	 multiple operations, each of which are defined, we know what the
7710	 result is supposed to be.  */
7711
7712      if (count > GET_MODE_BITSIZE (shift_mode) - 1)
7713	{
7714	  if (code == ASHIFTRT)
7715	    count = GET_MODE_BITSIZE (shift_mode) - 1;
7716	  else if (code == ROTATE || code == ROTATERT)
7717	    count %= GET_MODE_BITSIZE (shift_mode);
7718	  else
7719	    {
7720	      /* We can't simply return zero because there may be an
7721		 outer op.  */
7722	      varop = const0_rtx;
7723	      count = 0;
7724	      break;
7725	    }
7726	}
7727
7728      /* Negative counts are invalid and should not have been made (a
7729	 programmer-specified negative count should have been handled
7730	 above). */
7731      else if (count < 0)
7732	abort ();
7733
7734      /* An arithmetic right shift of a quantity known to be -1 or 0
7735	 is a no-op.  */
7736      if (code == ASHIFTRT
7737	  && (num_sign_bit_copies (varop, shift_mode)
7738	      == GET_MODE_BITSIZE (shift_mode)))
7739	{
7740	  count = 0;
7741	  break;
7742	}
7743
7744      /* If we are doing an arithmetic right shift and discarding all but
7745	 the sign bit copies, this is equivalent to doing a shift by the
7746	 bitsize minus one.  Convert it into that shift because it will often
7747	 allow other simplifications.  */
7748
7749      if (code == ASHIFTRT
7750	  && (count + num_sign_bit_copies (varop, shift_mode)
7751	      >= GET_MODE_BITSIZE (shift_mode)))
7752	count = GET_MODE_BITSIZE (shift_mode) - 1;
7753
7754      /* We simplify the tests below and elsewhere by converting
7755	 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
7756	 `make_compound_operation' will convert it to a ASHIFTRT for
7757	 those machines (such as Vax) that don't have a LSHIFTRT.  */
7758      if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
7759	  && code == ASHIFTRT
7760	  && ((nonzero_bits (varop, shift_mode)
7761	       & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
7762	      == 0))
7763	code = LSHIFTRT;
7764
7765      switch (GET_CODE (varop))
7766	{
7767	case SIGN_EXTEND:
7768	case ZERO_EXTEND:
7769	case SIGN_EXTRACT:
7770	case ZERO_EXTRACT:
7771	  new = expand_compound_operation (varop);
7772	  if (new != varop)
7773	    {
7774	      varop = new;
7775	      continue;
7776	    }
7777	  break;
7778
7779	case MEM:
7780	  /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
7781	     minus the width of a smaller mode, we can do this with a
7782	     SIGN_EXTEND or ZERO_EXTEND from the narrower memory location.  */
7783	  if ((code == ASHIFTRT || code == LSHIFTRT)
7784	      && ! mode_dependent_address_p (XEXP (varop, 0))
7785	      && ! MEM_VOLATILE_P (varop)
7786	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7787					 MODE_INT, 1)) != BLKmode)
7788	    {
7789	      if (BYTES_BIG_ENDIAN)
7790		new = gen_rtx (MEM, tmode, XEXP (varop, 0));
7791	      else
7792		new = gen_rtx (MEM, tmode,
7793			       plus_constant (XEXP (varop, 0),
7794					      count / BITS_PER_UNIT));
7795	      RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
7796	      MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
7797	      MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
7798	      varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7799				       : ZERO_EXTEND, mode, new);
7800	      count = 0;
7801	      continue;
7802	    }
7803	  break;
7804
7805	case USE:
7806	  /* Similar to the case above, except that we can only do this if
7807	     the resulting mode is the same as that of the underlying
7808	     MEM and adjust the address depending on the *bits* endianness
7809	     because of the way that bit-field extract insns are defined.  */
7810	  if ((code == ASHIFTRT || code == LSHIFTRT)
7811	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7812					 MODE_INT, 1)) != BLKmode
7813	      && tmode == GET_MODE (XEXP (varop, 0)))
7814	    {
7815	      if (BITS_BIG_ENDIAN)
7816		new = XEXP (varop, 0);
7817	      else
7818		{
7819		  new = copy_rtx (XEXP (varop, 0));
7820		  SUBST (XEXP (new, 0),
7821			 plus_constant (XEXP (new, 0),
7822					count / BITS_PER_UNIT));
7823		}
7824
7825	      varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7826				       : ZERO_EXTEND, mode, new);
7827	      count = 0;
7828	      continue;
7829	    }
7830	  break;
7831
7832	case SUBREG:
7833	  /* If VAROP is a SUBREG, strip it as long as the inner operand has
7834	     the same number of words as what we've seen so far.  Then store
7835	     the widest mode in MODE.  */
7836	  if (subreg_lowpart_p (varop)
7837	      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7838		  > GET_MODE_SIZE (GET_MODE (varop)))
7839	      && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7840		    + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
7841		  == mode_words))
7842	    {
7843	      varop = SUBREG_REG (varop);
7844	      if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
7845		mode = GET_MODE (varop);
7846	      continue;
7847	    }
7848	  break;
7849
7850	case MULT:
7851	  /* Some machines use MULT instead of ASHIFT because MULT
7852	     is cheaper.  But it is still better on those machines to
7853	     merge two shifts into one.  */
7854	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7855	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7856	    {
7857	      varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
7858				  GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
7859	      continue;
7860	    }
7861	  break;
7862
7863	case UDIV:
7864	  /* Similar, for when divides are cheaper.  */
7865	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7866	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7867	    {
7868	      varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
7869				  GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
7870	      continue;
7871	    }
7872	  break;
7873
7874	case ASHIFTRT:
7875	  /* If we are extracting just the sign bit of an arithmetic right
7876	     shift, that shift is not needed.  */
7877	  if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
7878	    {
7879	      varop = XEXP (varop, 0);
7880	      continue;
7881	    }
7882
7883	  /* ... fall through ... */
7884
7885	case LSHIFTRT:
7886	case ASHIFT:
7887	case ROTATE:
7888	  /* Here we have two nested shifts.  The result is usually the
7889	     AND of a new shift with a mask.  We compute the result below.  */
7890	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7891	      && INTVAL (XEXP (varop, 1)) >= 0
7892	      && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
7893	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7894	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7895	    {
7896	      enum rtx_code first_code = GET_CODE (varop);
7897	      int first_count = INTVAL (XEXP (varop, 1));
7898	      unsigned HOST_WIDE_INT mask;
7899	      rtx mask_rtx;
7900
7901	      /* We have one common special case.  We can't do any merging if
7902		 the inner code is an ASHIFTRT of a smaller mode.  However, if
7903		 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
7904		 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
7905		 we can convert it to
7906		 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
7907		 This simplifies certain SIGN_EXTEND operations.  */
7908	      if (code == ASHIFT && first_code == ASHIFTRT
7909		  && (GET_MODE_BITSIZE (result_mode)
7910		      - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
7911		{
7912		  /* C3 has the low-order C1 bits zero.  */
7913
7914		  mask = (GET_MODE_MASK (mode)
7915			  & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
7916
7917		  varop = simplify_and_const_int (NULL_RTX, result_mode,
7918						  XEXP (varop, 0), mask);
7919		  varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
7920						varop, count);
7921		  count = first_count;
7922		  code = ASHIFTRT;
7923		  continue;
7924		}
7925
7926	      /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
7927		 than C1 high-order bits equal to the sign bit, we can convert
7928		 this to either an ASHIFT or a ASHIFTRT depending on the
7929		 two counts.
7930
7931		 We cannot do this if VAROP's mode is not SHIFT_MODE.  */
7932
7933	      if (code == ASHIFTRT && first_code == ASHIFT
7934		  && GET_MODE (varop) == shift_mode
7935		  && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
7936		      > first_count))
7937		{
7938		  count -= first_count;
7939		  if (count < 0)
7940		    count = - count, code = ASHIFT;
7941		  varop = XEXP (varop, 0);
7942		  continue;
7943		}
7944
7945	      /* There are some cases we can't do.  If CODE is ASHIFTRT,
7946		 we can only do this if FIRST_CODE is also ASHIFTRT.
7947
7948		 We can't do the case when CODE is ROTATE and FIRST_CODE is
7949		 ASHIFTRT.
7950
7951		 If the mode of this shift is not the mode of the outer shift,
7952		 we can't do this if either shift is a right shift or ROTATE.
7953
7954		 Finally, we can't do any of these if the mode is too wide
7955		 unless the codes are the same.
7956
7957		 Handle the case where the shift codes are the same
7958		 first.  */
7959
7960	      if (code == first_code)
7961		{
7962		  if (GET_MODE (varop) != result_mode
7963		      && (code == ASHIFTRT || code == LSHIFTRT
7964			  || code == ROTATE))
7965		    break;
7966
7967		  count += first_count;
7968		  varop = XEXP (varop, 0);
7969		  continue;
7970		}
7971
7972	      if (code == ASHIFTRT
7973		  || (code == ROTATE && first_code == ASHIFTRT)
7974		  || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
7975		  || (GET_MODE (varop) != result_mode
7976		      && (first_code == ASHIFTRT || first_code == LSHIFTRT
7977			  || first_code == ROTATE
7978			  || code == ROTATE)))
7979		break;
7980
7981	      /* To compute the mask to apply after the shift, shift the
7982		 nonzero bits of the inner shift the same way the
7983		 outer shift will.  */
7984
7985	      mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
7986
7987	      mask_rtx
7988		= simplify_binary_operation (code, result_mode, mask_rtx,
7989					     GEN_INT (count));
7990
7991	      /* Give up if we can't compute an outer operation to use.  */
7992	      if (mask_rtx == 0
7993		  || GET_CODE (mask_rtx) != CONST_INT
7994		  || ! merge_outer_ops (&outer_op, &outer_const, AND,
7995					INTVAL (mask_rtx),
7996					result_mode, &complement_p))
7997		break;
7998
7999	      /* If the shifts are in the same direction, we add the
8000		 counts.  Otherwise, we subtract them.  */
8001	      if ((code == ASHIFTRT || code == LSHIFTRT)
8002		  == (first_code == ASHIFTRT || first_code == LSHIFTRT))
8003		count += first_count;
8004	      else
8005		count -= first_count;
8006
8007	      /* If COUNT is positive, the new shift is usually CODE,
8008		 except for the two exceptions below, in which case it is
8009		 FIRST_CODE.  If the count is negative, FIRST_CODE should
8010		 always be used  */
8011	      if (count > 0
8012		  && ((first_code == ROTATE && code == ASHIFT)
8013		      || (first_code == ASHIFTRT && code == LSHIFTRT)))
8014		code = first_code;
8015	      else if (count < 0)
8016		code = first_code, count = - count;
8017
8018	      varop = XEXP (varop, 0);
8019	      continue;
8020	    }
8021
8022	  /* If we have (A << B << C) for any shift, we can convert this to
8023	     (A << C << B).  This wins if A is a constant.  Only try this if
8024	     B is not a constant.  */
8025
8026	  else if (GET_CODE (varop) == code
8027		   && GET_CODE (XEXP (varop, 1)) != CONST_INT
8028		   && 0 != (new
8029			    = simplify_binary_operation (code, mode,
8030							 XEXP (varop, 0),
8031							 GEN_INT (count))))
8032	    {
8033	      varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
8034	      count = 0;
8035	      continue;
8036	    }
8037	  break;
8038
8039	case NOT:
8040	  /* Make this fit the case below.  */
8041	  varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
8042				   GEN_INT (GET_MODE_MASK (mode)));
8043	  continue;
8044
8045	case IOR:
8046	case AND:
8047	case XOR:
8048	  /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
8049	     with C the size of VAROP - 1 and the shift is logical if
8050	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8051	     we have an (le X 0) operation.   If we have an arithmetic shift
8052	     and STORE_FLAG_VALUE is 1 or we have a logical shift with
8053	     STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
8054
8055	  if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
8056	      && XEXP (XEXP (varop, 0), 1) == constm1_rtx
8057	      && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8058	      && (code == LSHIFTRT || code == ASHIFTRT)
8059	      && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8060	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8061	    {
8062	      count = 0;
8063	      varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
8064				       const0_rtx);
8065
8066	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8067		varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8068
8069	      continue;
8070	    }
8071
8072	  /* If we have (shift (logical)), move the logical to the outside
8073	     to allow it to possibly combine with another logical and the
8074	     shift to combine with another shift.  This also canonicalizes to
8075	     what a ZERO_EXTRACT looks like.  Also, some machines have
8076	     (and (shift)) insns.  */
8077
8078	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8079	      && (new = simplify_binary_operation (code, result_mode,
8080						   XEXP (varop, 1),
8081						   GEN_INT (count))) != 0
8082	      && GET_CODE(new) == CONST_INT
8083	      && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
8084				  INTVAL (new), result_mode, &complement_p))
8085	    {
8086	      varop = XEXP (varop, 0);
8087	      continue;
8088	    }
8089
8090	  /* If we can't do that, try to simplify the shift in each arm of the
8091	     logical expression, make a new logical expression, and apply
8092	     the inverse distributive law.  */
8093	  {
8094	    rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8095					    XEXP (varop, 0), count);
8096	    rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8097					    XEXP (varop, 1), count);
8098
8099	    varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
8100	    varop = apply_distributive_law (varop);
8101
8102	    count = 0;
8103	  }
8104	  break;
8105
8106	case EQ:
8107	  /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
8108	     says that the sign bit can be tested, FOO has mode MODE, C is
8109	     GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
8110	     that may be nonzero.  */
8111	  if (code == LSHIFTRT
8112	      && XEXP (varop, 1) == const0_rtx
8113	      && GET_MODE (XEXP (varop, 0)) == result_mode
8114	      && count == GET_MODE_BITSIZE (result_mode) - 1
8115	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8116	      && ((STORE_FLAG_VALUE
8117		   & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
8118	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
8119	      && merge_outer_ops (&outer_op, &outer_const, XOR,
8120				  (HOST_WIDE_INT) 1, result_mode,
8121				  &complement_p))
8122	    {
8123	      varop = XEXP (varop, 0);
8124	      count = 0;
8125	      continue;
8126	    }
8127	  break;
8128
8129	case NEG:
8130	  /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
8131	     than the number of bits in the mode is equivalent to A.  */
8132	  if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8133	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
8134	    {
8135	      varop = XEXP (varop, 0);
8136	      count = 0;
8137	      continue;
8138	    }
8139
8140	  /* NEG commutes with ASHIFT since it is multiplication.  Move the
8141	     NEG outside to allow shifts to combine.  */
8142	  if (code == ASHIFT
8143	      && merge_outer_ops (&outer_op, &outer_const, NEG,
8144				  (HOST_WIDE_INT) 0, result_mode,
8145				  &complement_p))
8146	    {
8147	      varop = XEXP (varop, 0);
8148	      continue;
8149	    }
8150	  break;
8151
8152	case PLUS:
8153	  /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
8154	     is one less than the number of bits in the mode is
8155	     equivalent to (xor A 1).  */
8156	  if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8157	      && XEXP (varop, 1) == constm1_rtx
8158	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
8159	      && merge_outer_ops (&outer_op, &outer_const, XOR,
8160				  (HOST_WIDE_INT) 1, result_mode,
8161				  &complement_p))
8162	    {
8163	      count = 0;
8164	      varop = XEXP (varop, 0);
8165	      continue;
8166	    }
8167
8168	  /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
8169	     that might be nonzero in BAR are those being shifted out and those
8170	     bits are known zero in FOO, we can replace the PLUS with FOO.
8171	     Similarly in the other operand order.  This code occurs when
8172	     we are computing the size of a variable-size array.  */
8173
8174	  if ((code == ASHIFTRT || code == LSHIFTRT)
8175	      && count < HOST_BITS_PER_WIDE_INT
8176	      && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8177	      && (nonzero_bits (XEXP (varop, 1), result_mode)
8178		  & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
8179	    {
8180	      varop = XEXP (varop, 0);
8181	      continue;
8182	    }
8183	  else if ((code == ASHIFTRT || code == LSHIFTRT)
8184		   && count < HOST_BITS_PER_WIDE_INT
8185		   && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8186		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8187			    >> count)
8188		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8189			    & nonzero_bits (XEXP (varop, 1),
8190						 result_mode)))
8191	    {
8192	      varop = XEXP (varop, 1);
8193	      continue;
8194	    }
8195
8196	  /* (ashift (plus foo C) N) is (plus (ashift foo N) C').  */
8197	  if (code == ASHIFT
8198	      && GET_CODE (XEXP (varop, 1)) == CONST_INT
8199	      && (new = simplify_binary_operation (ASHIFT, result_mode,
8200						   XEXP (varop, 1),
8201						   GEN_INT (count))) != 0
8202	      && GET_CODE(new) == CONST_INT
8203	      && merge_outer_ops (&outer_op, &outer_const, PLUS,
8204				  INTVAL (new), result_mode, &complement_p))
8205	    {
8206	      varop = XEXP (varop, 0);
8207	      continue;
8208	    }
8209	  break;
8210
8211	case MINUS:
8212	  /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8213	     with C the size of VAROP - 1 and the shift is logical if
8214	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8215	     we have a (gt X 0) operation.  If the shift is arithmetic with
8216	     STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8217	     we have a (neg (gt X 0)) operation.  */
8218
8219	  if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
8220	      && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8221	      && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8222	      && (code == LSHIFTRT || code == ASHIFTRT)
8223	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8224	      && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8225	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8226	    {
8227	      count = 0;
8228	      varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8229				       const0_rtx);
8230
8231	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8232		varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8233
8234	      continue;
8235	    }
8236	  break;
8237	}
8238
8239      break;
8240    }
8241
8242  /* We need to determine what mode to do the shift in.  If the shift is
8243     a right shift or ROTATE, we must always do it in the mode it was
8244     originally done in.  Otherwise, we can do it in MODE, the widest mode
8245     encountered.  The code we care about is that of the shift that will
8246     actually be done, not the shift that was originally requested.  */
8247  shift_mode
8248    = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8249       ? result_mode : mode);
8250
8251  /* We have now finished analyzing the shift.  The result should be
8252     a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places.  If
8253     OUTER_OP is non-NIL, it is an operation that needs to be applied
8254     to the result of the shift.  OUTER_CONST is the relevant constant,
8255     but we must turn off all bits turned off in the shift.
8256
8257     If we were passed a value for X, see if we can use any pieces of
8258     it.  If not, make new rtx.  */
8259
8260  if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
8261      && GET_CODE (XEXP (x, 1)) == CONST_INT
8262      && INTVAL (XEXP (x, 1)) == count)
8263    const_rtx = XEXP (x, 1);
8264  else
8265    const_rtx = GEN_INT (count);
8266
8267  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8268      && GET_MODE (XEXP (x, 0)) == shift_mode
8269      && SUBREG_REG (XEXP (x, 0)) == varop)
8270    varop = XEXP (x, 0);
8271  else if (GET_MODE (varop) != shift_mode)
8272    varop = gen_lowpart_for_combine (shift_mode, varop);
8273
8274  /* If we can't make the SUBREG, try to return what we were given. */
8275  if (GET_CODE (varop) == CLOBBER)
8276    return x ? x : varop;
8277
8278  new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
8279  if (new != 0)
8280    x = new;
8281  else
8282    {
8283      if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
8284	x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
8285
8286      SUBST (XEXP (x, 0), varop);
8287      SUBST (XEXP (x, 1), const_rtx);
8288    }
8289
8290  /* If we have an outer operation and we just made a shift, it is
8291     possible that we could have simplified the shift were it not
8292     for the outer operation.  So try to do the simplification
8293     recursively.  */
8294
8295  if (outer_op != NIL && GET_CODE (x) == code
8296      && GET_CODE (XEXP (x, 1)) == CONST_INT)
8297    x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
8298			      INTVAL (XEXP (x, 1)));
8299
8300  /* If we were doing a LSHIFTRT in a wider mode than it was originally,
8301     turn off all the bits that the shift would have turned off.  */
8302  if (orig_code == LSHIFTRT && result_mode != shift_mode)
8303    x = simplify_and_const_int (NULL_RTX, shift_mode, x,
8304				GET_MODE_MASK (result_mode) >> orig_count);
8305
8306  /* Do the remainder of the processing in RESULT_MODE.  */
8307  x = gen_lowpart_for_combine (result_mode, x);
8308
8309  /* If COMPLEMENT_P is set, we have to complement X before doing the outer
8310     operation.  */
8311  if (complement_p)
8312    x = gen_unary (NOT, result_mode, result_mode, x);
8313
8314  if (outer_op != NIL)
8315    {
8316      if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
8317	{
8318	  int width = GET_MODE_BITSIZE (result_mode);
8319
8320	  outer_const &= GET_MODE_MASK (result_mode);
8321
8322	  /* If this would be an entire word for the target, but is not for
8323	     the host, then sign-extend on the host so that the number will
8324	     look the same way on the host that it would on the target.
8325
8326	     For example, when building a 64 bit alpha hosted 32 bit sparc
8327	     targeted compiler, then we want the 32 bit unsigned value -1 to be
8328	     represented as a 64 bit value -1, and not as 0x00000000ffffffff.
8329	     The later confuses the sparc backend.  */
8330
8331	  if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
8332	      && (outer_const & ((HOST_WIDE_INT) 1 << (width - 1))))
8333	    outer_const |= ((HOST_WIDE_INT) (-1) << width);
8334	}
8335
8336      if (outer_op == AND)
8337	x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
8338      else if (outer_op == SET)
8339	/* This means that we have determined that the result is
8340	   equivalent to a constant.  This should be rare.  */
8341	x = GEN_INT (outer_const);
8342      else if (GET_RTX_CLASS (outer_op) == '1')
8343	x = gen_unary (outer_op, result_mode, result_mode, x);
8344      else
8345	x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
8346    }
8347
8348  return x;
8349}
8350
8351/* Like recog, but we receive the address of a pointer to a new pattern.
8352   We try to match the rtx that the pointer points to.
8353   If that fails, we may try to modify or replace the pattern,
8354   storing the replacement into the same pointer object.
8355
8356   Modifications include deletion or addition of CLOBBERs.
8357
8358   PNOTES is a pointer to a location where any REG_UNUSED notes added for
8359   the CLOBBERs are placed.
8360
8361   PADDED_SCRATCHES is set to the number of (clobber (scratch)) patterns
8362   we had to add.
8363
8364   The value is the final insn code from the pattern ultimately matched,
8365   or -1.  */
8366
8367static int
8368recog_for_combine (pnewpat, insn, pnotes, padded_scratches)
8369     rtx *pnewpat;
8370     rtx insn;
8371     rtx *pnotes;
8372     int *padded_scratches;
8373{
8374  register rtx pat = *pnewpat;
8375  int insn_code_number;
8376  int num_clobbers_to_add = 0;
8377  int i;
8378  rtx notes = 0;
8379
8380  *padded_scratches = 0;
8381
8382  /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
8383     we use to indicate that something didn't match.  If we find such a
8384     thing, force rejection.  */
8385  if (GET_CODE (pat) == PARALLEL)
8386    for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
8387      if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
8388	  && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
8389	return -1;
8390
8391  /* Is the result of combination a valid instruction?  */
8392  insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8393
8394  /* If it isn't, there is the possibility that we previously had an insn
8395     that clobbered some register as a side effect, but the combined
8396     insn doesn't need to do that.  So try once more without the clobbers
8397     unless this represents an ASM insn.  */
8398
8399  if (insn_code_number < 0 && ! check_asm_operands (pat)
8400      && GET_CODE (pat) == PARALLEL)
8401    {
8402      int pos;
8403
8404      for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
8405	if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
8406	  {
8407	    if (i != pos)
8408	      SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
8409	    pos++;
8410	  }
8411
8412      SUBST_INT (XVECLEN (pat, 0), pos);
8413
8414      if (pos == 1)
8415	pat = XVECEXP (pat, 0, 0);
8416
8417      insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8418    }
8419
8420  /* If we had any clobbers to add, make a new pattern than contains
8421     them.  Then check to make sure that all of them are dead.  */
8422  if (num_clobbers_to_add)
8423    {
8424      rtx newpat = gen_rtx (PARALLEL, VOIDmode,
8425			    gen_rtvec (GET_CODE (pat) == PARALLEL
8426				       ? XVECLEN (pat, 0) + num_clobbers_to_add
8427				       : num_clobbers_to_add + 1));
8428
8429      if (GET_CODE (pat) == PARALLEL)
8430	for (i = 0; i < XVECLEN (pat, 0); i++)
8431	  XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
8432      else
8433	XVECEXP (newpat, 0, 0) = pat;
8434
8435      add_clobbers (newpat, insn_code_number);
8436
8437      for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
8438	   i < XVECLEN (newpat, 0); i++)
8439	{
8440	  if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
8441	      && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
8442	    return -1;
8443	  else if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == SCRATCH)
8444	    (*padded_scratches)++;
8445	  notes = gen_rtx (EXPR_LIST, REG_UNUSED,
8446			   XEXP (XVECEXP (newpat, 0, i), 0), notes);
8447	}
8448      pat = newpat;
8449    }
8450
8451  *pnewpat = pat;
8452  *pnotes = notes;
8453
8454  return insn_code_number;
8455}
8456
8457/* Like gen_lowpart but for use by combine.  In combine it is not possible
8458   to create any new pseudoregs.  However, it is safe to create
8459   invalid memory addresses, because combine will try to recognize
8460   them and all they will do is make the combine attempt fail.
8461
8462   If for some reason this cannot do its job, an rtx
8463   (clobber (const_int 0)) is returned.
8464   An insn containing that will not be recognized.  */
8465
8466#undef gen_lowpart
8467
8468static rtx
8469gen_lowpart_for_combine (mode, x)
8470     enum machine_mode mode;
8471     register rtx x;
8472{
8473  rtx result;
8474
8475  if (GET_MODE (x) == mode)
8476    return x;
8477
8478  /* We can only support MODE being wider than a word if X is a
8479     constant integer or has a mode the same size.  */
8480
8481  if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
8482      && ! ((GET_MODE (x) == VOIDmode
8483	     && (GET_CODE (x) == CONST_INT
8484		 || GET_CODE (x) == CONST_DOUBLE))
8485	    || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
8486    return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8487
8488  /* X might be a paradoxical (subreg (mem)).  In that case, gen_lowpart
8489     won't know what to do.  So we will strip off the SUBREG here and
8490     process normally.  */
8491  if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
8492    {
8493      x = SUBREG_REG (x);
8494      if (GET_MODE (x) == mode)
8495	return x;
8496    }
8497
8498  result = gen_lowpart_common (mode, x);
8499  if (result != 0
8500      && GET_CODE (result) == SUBREG
8501      && GET_CODE (SUBREG_REG (result)) == REG
8502      && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
8503      && (GET_MODE_SIZE (GET_MODE (result))
8504	  != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
8505    reg_changes_size[REGNO (SUBREG_REG (result))] = 1;
8506
8507  if (result)
8508    return result;
8509
8510  if (GET_CODE (x) == MEM)
8511    {
8512      register int offset = 0;
8513      rtx new;
8514
8515      /* Refuse to work on a volatile memory ref or one with a mode-dependent
8516	 address.  */
8517      if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
8518	return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8519
8520      /* If we want to refer to something bigger than the original memref,
8521	 generate a perverse subreg instead.  That will force a reload
8522	 of the original memref X.  */
8523      if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
8524	return gen_rtx (SUBREG, mode, x, 0);
8525
8526      if (WORDS_BIG_ENDIAN)
8527	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
8528		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
8529      if (BYTES_BIG_ENDIAN)
8530	{
8531	  /* Adjust the address so that the address-after-the-data is
8532	     unchanged.  */
8533	  offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
8534		     - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
8535	}
8536      new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
8537      RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
8538      MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
8539      MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
8540      return new;
8541    }
8542
8543  /* If X is a comparison operator, rewrite it in a new mode.  This
8544     probably won't match, but may allow further simplifications.  */
8545  else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
8546    return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
8547
8548  /* If we couldn't simplify X any other way, just enclose it in a
8549     SUBREG.  Normally, this SUBREG won't match, but some patterns may
8550     include an explicit SUBREG or we may simplify it further in combine.  */
8551  else
8552    {
8553      int word = 0;
8554
8555      if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
8556	word = ((GET_MODE_SIZE (GET_MODE (x))
8557		 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
8558		/ UNITS_PER_WORD);
8559      return gen_rtx (SUBREG, mode, x, word);
8560    }
8561}
8562
8563/* Make an rtx expression.  This is a subset of gen_rtx and only supports
8564   expressions of 1, 2, or 3 operands, each of which are rtx expressions.
8565
8566   If the identical expression was previously in the insn (in the undobuf),
8567   it will be returned.  Only if it is not found will a new expression
8568   be made.  */
8569
8570/*VARARGS2*/
8571static rtx
8572gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
8573{
8574#ifndef __STDC__
8575  enum rtx_code code;
8576  enum machine_mode mode;
8577#endif
8578  va_list p;
8579  int n_args;
8580  rtx args[3];
8581  int i, j;
8582  char *fmt;
8583  rtx rt;
8584
8585  VA_START (p, mode);
8586
8587#ifndef __STDC__
8588  code = va_arg (p, enum rtx_code);
8589  mode = va_arg (p, enum machine_mode);
8590#endif
8591
8592  n_args = GET_RTX_LENGTH (code);
8593  fmt = GET_RTX_FORMAT (code);
8594
8595  if (n_args == 0 || n_args > 3)
8596    abort ();
8597
8598  /* Get each arg and verify that it is supposed to be an expression.  */
8599  for (j = 0; j < n_args; j++)
8600    {
8601      if (*fmt++ != 'e')
8602	abort ();
8603
8604      args[j] = va_arg (p, rtx);
8605    }
8606
8607  /* See if this is in undobuf.  Be sure we don't use objects that came
8608     from another insn; this could produce circular rtl structures.  */
8609
8610  for (i = previous_num_undos; i < undobuf.num_undo; i++)
8611    if (!undobuf.undo[i].is_int
8612	&& GET_CODE (undobuf.undo[i].old_contents.r) == code
8613	&& GET_MODE (undobuf.undo[i].old_contents.r) == mode)
8614      {
8615	for (j = 0; j < n_args; j++)
8616	  if (XEXP (undobuf.undo[i].old_contents.r, j) != args[j])
8617	    break;
8618
8619	if (j == n_args)
8620	  return undobuf.undo[i].old_contents.r;
8621      }
8622
8623  /* Otherwise make a new rtx.  We know we have 1, 2, or 3 args.
8624     Use rtx_alloc instead of gen_rtx because it's faster on RISC.  */
8625  rt = rtx_alloc (code);
8626  PUT_MODE (rt, mode);
8627  XEXP (rt, 0) = args[0];
8628  if (n_args > 1)
8629    {
8630      XEXP (rt, 1) = args[1];
8631      if (n_args > 2)
8632	XEXP (rt, 2) = args[2];
8633    }
8634  return rt;
8635}
8636
8637/* These routines make binary and unary operations by first seeing if they
8638   fold; if not, a new expression is allocated.  */
8639
8640static rtx
8641gen_binary (code, mode, op0, op1)
8642     enum rtx_code code;
8643     enum machine_mode mode;
8644     rtx op0, op1;
8645{
8646  rtx result;
8647  rtx tem;
8648
8649  if (GET_RTX_CLASS (code) == 'c'
8650      && (GET_CODE (op0) == CONST_INT
8651	  || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
8652    tem = op0, op0 = op1, op1 = tem;
8653
8654  if (GET_RTX_CLASS (code) == '<')
8655    {
8656      enum machine_mode op_mode = GET_MODE (op0);
8657
8658      /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
8659	 just (REL_OP X Y). */
8660      if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
8661	{
8662	  op1 = XEXP (op0, 1);
8663	  op0 = XEXP (op0, 0);
8664	  op_mode = GET_MODE (op0);
8665	}
8666
8667      if (op_mode == VOIDmode)
8668	op_mode = GET_MODE (op1);
8669      result = simplify_relational_operation (code, op_mode, op0, op1);
8670    }
8671  else
8672    result = simplify_binary_operation (code, mode, op0, op1);
8673
8674  if (result)
8675    return result;
8676
8677  /* Put complex operands first and constants second.  */
8678  if (GET_RTX_CLASS (code) == 'c'
8679      && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
8680	  || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
8681	      && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
8682	  || (GET_CODE (op0) == SUBREG
8683	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
8684	      && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
8685    return gen_rtx_combine (code, mode, op1, op0);
8686
8687  return gen_rtx_combine (code, mode, op0, op1);
8688}
8689
8690static rtx
8691gen_unary (code, mode, op0_mode, op0)
8692     enum rtx_code code;
8693     enum machine_mode mode, op0_mode;
8694     rtx op0;
8695{
8696  rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
8697
8698  if (result)
8699    return result;
8700
8701  return gen_rtx_combine (code, mode, op0);
8702}
8703
8704/* Simplify a comparison between *POP0 and *POP1 where CODE is the
8705   comparison code that will be tested.
8706
8707   The result is a possibly different comparison code to use.  *POP0 and
8708   *POP1 may be updated.
8709
8710   It is possible that we might detect that a comparison is either always
8711   true or always false.  However, we do not perform general constant
8712   folding in combine, so this knowledge isn't useful.  Such tautologies
8713   should have been detected earlier.  Hence we ignore all such cases.  */
8714
8715static enum rtx_code
8716simplify_comparison (code, pop0, pop1)
8717     enum rtx_code code;
8718     rtx *pop0;
8719     rtx *pop1;
8720{
8721  rtx op0 = *pop0;
8722  rtx op1 = *pop1;
8723  rtx tem, tem1;
8724  int i;
8725  enum machine_mode mode, tmode;
8726
8727  /* Try a few ways of applying the same transformation to both operands.  */
8728  while (1)
8729    {
8730#ifndef WORD_REGISTER_OPERATIONS
8731      /* The test below this one won't handle SIGN_EXTENDs on these machines,
8732	 so check specially.  */
8733      if (code != GTU && code != GEU && code != LTU && code != LEU
8734	  && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
8735	  && GET_CODE (XEXP (op0, 0)) == ASHIFT
8736	  && GET_CODE (XEXP (op1, 0)) == ASHIFT
8737	  && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
8738	  && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
8739	  && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
8740	      == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
8741	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
8742	  && GET_CODE (XEXP (op1, 1)) == CONST_INT
8743	  && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8744	  && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
8745	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
8746	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
8747	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
8748	  && (INTVAL (XEXP (op0, 1))
8749	      == (GET_MODE_BITSIZE (GET_MODE (op0))
8750		  - (GET_MODE_BITSIZE
8751		     (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
8752	{
8753	  op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
8754	  op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
8755	}
8756#endif
8757
8758      /* If both operands are the same constant shift, see if we can ignore the
8759	 shift.  We can if the shift is a rotate or if the bits shifted out of
8760	 this shift are known to be zero for both inputs and if the type of
8761	 comparison is compatible with the shift.  */
8762      if (GET_CODE (op0) == GET_CODE (op1)
8763	  && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
8764	  && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
8765	      || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
8766		  && (code != GT && code != LT && code != GE && code != LE))
8767	      || (GET_CODE (op0) == ASHIFTRT
8768		  && (code != GTU && code != LTU
8769		      && code != GEU && code != GEU)))
8770	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
8771	  && INTVAL (XEXP (op0, 1)) >= 0
8772	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8773	  && XEXP (op0, 1) == XEXP (op1, 1))
8774	{
8775	  enum machine_mode mode = GET_MODE (op0);
8776	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
8777	  int shift_count = INTVAL (XEXP (op0, 1));
8778
8779	  if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
8780	    mask &= (mask >> shift_count) << shift_count;
8781	  else if (GET_CODE (op0) == ASHIFT)
8782	    mask = (mask & (mask << shift_count)) >> shift_count;
8783
8784	  if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
8785	      && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
8786	    op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
8787	  else
8788	    break;
8789	}
8790
8791      /* If both operands are AND's of a paradoxical SUBREG by constant, the
8792	 SUBREGs are of the same mode, and, in both cases, the AND would
8793	 be redundant if the comparison was done in the narrower mode,
8794	 do the comparison in the narrower mode (e.g., we are AND'ing with 1
8795	 and the operand's possibly nonzero bits are 0xffffff01; in that case
8796	 if we only care about QImode, we don't need the AND).  This case
8797	 occurs if the output mode of an scc insn is not SImode and
8798	 STORE_FLAG_VALUE == 1 (e.g., the 386).
8799
8800	 Similarly, check for a case where the AND's are ZERO_EXTEND
8801	 operations from some narrower mode even though a SUBREG is not
8802	 present.  */
8803
8804      else if  (GET_CODE (op0) == AND && GET_CODE (op1) == AND
8805		&& GET_CODE (XEXP (op0, 1)) == CONST_INT
8806		&& GET_CODE (XEXP (op1, 1)) == CONST_INT)
8807	{
8808	  rtx inner_op0 = XEXP (op0, 0);
8809	  rtx inner_op1 = XEXP (op1, 0);
8810	  HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
8811	  HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
8812	  int changed = 0;
8813
8814	  if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
8815	      && (GET_MODE_SIZE (GET_MODE (inner_op0))
8816		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
8817	      && (GET_MODE (SUBREG_REG (inner_op0))
8818		  == GET_MODE (SUBREG_REG (inner_op1)))
8819	      && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8820		  <= HOST_BITS_PER_WIDE_INT)
8821	      && (0 == (~c0) & nonzero_bits (SUBREG_REG (inner_op0),
8822					     GET_MODE (SUBREG_REG (op0))))
8823	      && (0 == (~c1) & nonzero_bits (SUBREG_REG (inner_op1),
8824					     GET_MODE (SUBREG_REG (inner_op1)))))
8825	    {
8826	      op0 = SUBREG_REG (inner_op0);
8827	      op1 = SUBREG_REG (inner_op1);
8828
8829	      /* The resulting comparison is always unsigned since we masked
8830		 off the original sign bit. */
8831	      code = unsigned_condition (code);
8832
8833	      changed = 1;
8834	    }
8835
8836	  else if (c0 == c1)
8837	    for (tmode = GET_CLASS_NARROWEST_MODE
8838		 (GET_MODE_CLASS (GET_MODE (op0)));
8839		 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
8840	      if (c0 == GET_MODE_MASK (tmode))
8841		{
8842		  op0 = gen_lowpart_for_combine (tmode, inner_op0);
8843		  op1 = gen_lowpart_for_combine (tmode, inner_op1);
8844		  code = unsigned_condition (code);
8845		  changed = 1;
8846		  break;
8847		}
8848
8849	  if (! changed)
8850	    break;
8851	}
8852
8853      /* If both operands are NOT, we can strip off the outer operation
8854	 and adjust the comparison code for swapped operands; similarly for
8855	 NEG, except that this must be an equality comparison.  */
8856      else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
8857	       || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
8858		   && (code == EQ || code == NE)))
8859	op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
8860
8861      else
8862	break;
8863    }
8864
8865  /* If the first operand is a constant, swap the operands and adjust the
8866     comparison code appropriately.  */
8867  if (CONSTANT_P (op0))
8868    {
8869      tem = op0, op0 = op1, op1 = tem;
8870      code = swap_condition (code);
8871    }
8872
8873  /* We now enter a loop during which we will try to simplify the comparison.
8874     For the most part, we only are concerned with comparisons with zero,
8875     but some things may really be comparisons with zero but not start
8876     out looking that way.  */
8877
8878  while (GET_CODE (op1) == CONST_INT)
8879    {
8880      enum machine_mode mode = GET_MODE (op0);
8881      int mode_width = GET_MODE_BITSIZE (mode);
8882      unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
8883      int equality_comparison_p;
8884      int sign_bit_comparison_p;
8885      int unsigned_comparison_p;
8886      HOST_WIDE_INT const_op;
8887
8888      /* We only want to handle integral modes.  This catches VOIDmode,
8889	 CCmode, and the floating-point modes.  An exception is that we
8890	 can handle VOIDmode if OP0 is a COMPARE or a comparison
8891	 operation.  */
8892
8893      if (GET_MODE_CLASS (mode) != MODE_INT
8894	  && ! (mode == VOIDmode
8895		&& (GET_CODE (op0) == COMPARE
8896		    || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
8897	break;
8898
8899      /* Get the constant we are comparing against and turn off all bits
8900	 not on in our mode.  */
8901      const_op = INTVAL (op1);
8902      if (mode_width <= HOST_BITS_PER_WIDE_INT)
8903	const_op &= mask;
8904
8905      /* If we are comparing against a constant power of two and the value
8906	 being compared can only have that single bit nonzero (e.g., it was
8907	 `and'ed with that bit), we can replace this with a comparison
8908	 with zero.  */
8909      if (const_op
8910	  && (code == EQ || code == NE || code == GE || code == GEU
8911	      || code == LT || code == LTU)
8912	  && mode_width <= HOST_BITS_PER_WIDE_INT
8913	  && exact_log2 (const_op) >= 0
8914	  && nonzero_bits (op0, mode) == const_op)
8915	{
8916	  code = (code == EQ || code == GE || code == GEU ? NE : EQ);
8917	  op1 = const0_rtx, const_op = 0;
8918	}
8919
8920      /* Similarly, if we are comparing a value known to be either -1 or
8921	 0 with -1, change it to the opposite comparison against zero.  */
8922
8923      if (const_op == -1
8924	  && (code == EQ || code == NE || code == GT || code == LE
8925	      || code == GEU || code == LTU)
8926	  && num_sign_bit_copies (op0, mode) == mode_width)
8927	{
8928	  code = (code == EQ || code == LE || code == GEU ? NE : EQ);
8929	  op1 = const0_rtx, const_op = 0;
8930	}
8931
8932      /* Do some canonicalizations based on the comparison code.  We prefer
8933	 comparisons against zero and then prefer equality comparisons.
8934	 If we can reduce the size of a constant, we will do that too.  */
8935
8936      switch (code)
8937	{
8938	case LT:
8939	  /* < C is equivalent to <= (C - 1) */
8940	  if (const_op > 0)
8941	    {
8942	      const_op -= 1;
8943	      op1 = GEN_INT (const_op);
8944	      code = LE;
8945	      /* ... fall through to LE case below.  */
8946	    }
8947	  else
8948	    break;
8949
8950	case LE:
8951	  /* <= C is equivalent to < (C + 1); we do this for C < 0  */
8952	  if (const_op < 0)
8953	    {
8954	      const_op += 1;
8955	      op1 = GEN_INT (const_op);
8956	      code = LT;
8957	    }
8958
8959	  /* If we are doing a <= 0 comparison on a value known to have
8960	     a zero sign bit, we can replace this with == 0.  */
8961	  else if (const_op == 0
8962		   && mode_width <= HOST_BITS_PER_WIDE_INT
8963		   && (nonzero_bits (op0, mode)
8964		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
8965	    code = EQ;
8966	  break;
8967
8968	case GE:
8969	  /* >= C is equivalent to > (C - 1). */
8970	  if (const_op > 0)
8971	    {
8972	      const_op -= 1;
8973	      op1 = GEN_INT (const_op);
8974	      code = GT;
8975	      /* ... fall through to GT below.  */
8976	    }
8977	  else
8978	    break;
8979
8980	case GT:
8981	  /* > C is equivalent to >= (C + 1); we do this for C < 0*/
8982	  if (const_op < 0)
8983	    {
8984	      const_op += 1;
8985	      op1 = GEN_INT (const_op);
8986	      code = GE;
8987	    }
8988
8989	  /* If we are doing a > 0 comparison on a value known to have
8990	     a zero sign bit, we can replace this with != 0.  */
8991	  else if (const_op == 0
8992		   && mode_width <= HOST_BITS_PER_WIDE_INT
8993		   && (nonzero_bits (op0, mode)
8994		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
8995	    code = NE;
8996	  break;
8997
8998	case LTU:
8999	  /* < C is equivalent to <= (C - 1).  */
9000	  if (const_op > 0)
9001	    {
9002	      const_op -= 1;
9003	      op1 = GEN_INT (const_op);
9004	      code = LEU;
9005	      /* ... fall through ... */
9006	    }
9007
9008	  /* (unsigned) < 0x80000000 is equivalent to >= 0.  */
9009	  else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
9010	    {
9011	      const_op = 0, op1 = const0_rtx;
9012	      code = GE;
9013	      break;
9014	    }
9015	  else
9016	    break;
9017
9018	case LEU:
9019	  /* unsigned <= 0 is equivalent to == 0 */
9020	  if (const_op == 0)
9021	    code = EQ;
9022
9023	  /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
9024	  else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
9025	    {
9026	      const_op = 0, op1 = const0_rtx;
9027	      code = GE;
9028	    }
9029	  break;
9030
9031	case GEU:
9032	  /* >= C is equivalent to < (C - 1).  */
9033	  if (const_op > 1)
9034	    {
9035	      const_op -= 1;
9036	      op1 = GEN_INT (const_op);
9037	      code = GTU;
9038	      /* ... fall through ... */
9039	    }
9040
9041	  /* (unsigned) >= 0x80000000 is equivalent to < 0.  */
9042	  else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
9043	    {
9044	      const_op = 0, op1 = const0_rtx;
9045	      code = LT;
9046	      break;
9047	    }
9048	  else
9049	    break;
9050
9051	case GTU:
9052	  /* unsigned > 0 is equivalent to != 0 */
9053	  if (const_op == 0)
9054	    code = NE;
9055
9056	  /* (unsigned) > 0x7fffffff is equivalent to < 0.  */
9057	  else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
9058	    {
9059	      const_op = 0, op1 = const0_rtx;
9060	      code = LT;
9061	    }
9062	  break;
9063	}
9064
9065      /* Compute some predicates to simplify code below.  */
9066
9067      equality_comparison_p = (code == EQ || code == NE);
9068      sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
9069      unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
9070			       || code == LEU);
9071
9072      /* If this is a sign bit comparison and we can do arithmetic in
9073	 MODE, say that we will only be needing the sign bit of OP0.  */
9074      if (sign_bit_comparison_p
9075	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9076	op0 = force_to_mode (op0, mode,
9077			     ((HOST_WIDE_INT) 1
9078			      << (GET_MODE_BITSIZE (mode) - 1)),
9079			     NULL_RTX, 0);
9080
9081      /* Now try cases based on the opcode of OP0.  If none of the cases
9082	 does a "continue", we exit this loop immediately after the
9083	 switch.  */
9084
9085      switch (GET_CODE (op0))
9086	{
9087	case ZERO_EXTRACT:
9088	  /* If we are extracting a single bit from a variable position in
9089	     a constant that has only a single bit set and are comparing it
9090	     with zero, we can convert this into an equality comparison
9091	     between the position and the location of the single bit.  We can't
9092	     do this if bit endian and we don't have an extzv since we then
9093	     can't know what mode to use for the endianness adjustment.  */
9094
9095	  if (GET_CODE (XEXP (op0, 0)) == CONST_INT
9096	      && XEXP (op0, 1) == const1_rtx
9097	      && equality_comparison_p && const_op == 0
9098	      && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0
9099	      && (! BITS_BIG_ENDIAN
9100#ifdef HAVE_extzv
9101		  || HAVE_extzv
9102#endif
9103		  ))
9104	    {
9105#ifdef HAVE_extzv
9106	      if (BITS_BIG_ENDIAN)
9107		i = (GET_MODE_BITSIZE
9108		     (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
9109#endif
9110
9111	      op0 = XEXP (op0, 2);
9112	      op1 = GEN_INT (i);
9113	      const_op = i;
9114
9115	      /* Result is nonzero iff shift count is equal to I.  */
9116	      code = reverse_condition (code);
9117	      continue;
9118	    }
9119
9120	  /* ... fall through ... */
9121
9122	case SIGN_EXTRACT:
9123	  tem = expand_compound_operation (op0);
9124	  if (tem != op0)
9125	    {
9126	      op0 = tem;
9127	      continue;
9128	    }
9129	  break;
9130
9131	case NOT:
9132	  /* If testing for equality, we can take the NOT of the constant.  */
9133	  if (equality_comparison_p
9134	      && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
9135	    {
9136	      op0 = XEXP (op0, 0);
9137	      op1 = tem;
9138	      continue;
9139	    }
9140
9141	  /* If just looking at the sign bit, reverse the sense of the
9142	     comparison.  */
9143	  if (sign_bit_comparison_p)
9144	    {
9145	      op0 = XEXP (op0, 0);
9146	      code = (code == GE ? LT : GE);
9147	      continue;
9148	    }
9149	  break;
9150
9151	case NEG:
9152	  /* If testing for equality, we can take the NEG of the constant.  */
9153	  if (equality_comparison_p
9154	      && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
9155	    {
9156	      op0 = XEXP (op0, 0);
9157	      op1 = tem;
9158	      continue;
9159	    }
9160
9161	  /* The remaining cases only apply to comparisons with zero.  */
9162	  if (const_op != 0)
9163	    break;
9164
9165	  /* When X is ABS or is known positive,
9166	     (neg X) is < 0 if and only if X != 0.  */
9167
9168	  if (sign_bit_comparison_p
9169	      && (GET_CODE (XEXP (op0, 0)) == ABS
9170		  || (mode_width <= HOST_BITS_PER_WIDE_INT
9171		      && (nonzero_bits (XEXP (op0, 0), mode)
9172			  & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
9173	    {
9174	      op0 = XEXP (op0, 0);
9175	      code = (code == LT ? NE : EQ);
9176	      continue;
9177	    }
9178
9179	  /* If we have NEG of something whose two high-order bits are the
9180	     same, we know that "(-a) < 0" is equivalent to "a > 0". */
9181	  if (num_sign_bit_copies (op0, mode) >= 2)
9182	    {
9183	      op0 = XEXP (op0, 0);
9184	      code = swap_condition (code);
9185	      continue;
9186	    }
9187	  break;
9188
9189	case ROTATE:
9190	  /* If we are testing equality and our count is a constant, we
9191	     can perform the inverse operation on our RHS.  */
9192	  if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9193	      && (tem = simplify_binary_operation (ROTATERT, mode,
9194						   op1, XEXP (op0, 1))) != 0)
9195	    {
9196	      op0 = XEXP (op0, 0);
9197	      op1 = tem;
9198	      continue;
9199	    }
9200
9201	  /* If we are doing a < 0 or >= 0 comparison, it means we are testing
9202	     a particular bit.  Convert it to an AND of a constant of that
9203	     bit.  This will be converted into a ZERO_EXTRACT.  */
9204	  if (const_op == 0 && sign_bit_comparison_p
9205	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
9206	      && mode_width <= HOST_BITS_PER_WIDE_INT)
9207	    {
9208	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9209					    ((HOST_WIDE_INT) 1
9210					     << (mode_width - 1
9211						 - INTVAL (XEXP (op0, 1)))));
9212	      code = (code == LT ? NE : EQ);
9213	      continue;
9214	    }
9215
9216	  /* ... fall through ... */
9217
9218	case ABS:
9219	  /* ABS is ignorable inside an equality comparison with zero.  */
9220	  if (const_op == 0 && equality_comparison_p)
9221	    {
9222	      op0 = XEXP (op0, 0);
9223	      continue;
9224	    }
9225	  break;
9226
9227
9228	case SIGN_EXTEND:
9229	  /* Can simplify (compare (zero/sign_extend FOO) CONST)
9230	     to (compare FOO CONST) if CONST fits in FOO's mode and we
9231	     are either testing inequality or have an unsigned comparison
9232	     with ZERO_EXTEND or a signed comparison with SIGN_EXTEND.  */
9233	  if (! unsigned_comparison_p
9234	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9235		  <= HOST_BITS_PER_WIDE_INT)
9236	      && ((unsigned HOST_WIDE_INT) const_op
9237		  < (((HOST_WIDE_INT) 1
9238		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
9239	    {
9240	      op0 = XEXP (op0, 0);
9241	      continue;
9242	    }
9243	  break;
9244
9245	case SUBREG:
9246	  /* Check for the case where we are comparing A - C1 with C2,
9247	     both constants are smaller than 1/2 the maximum positive
9248	     value in MODE, and the comparison is equality or unsigned.
9249	     In that case, if A is either zero-extended to MODE or has
9250	     sufficient sign bits so that the high-order bit in MODE
9251	     is a copy of the sign in the inner mode, we can prove that it is
9252	     safe to do the operation in the wider mode.  This simplifies
9253	     many range checks.  */
9254
9255	  if (mode_width <= HOST_BITS_PER_WIDE_INT
9256	      && subreg_lowpart_p (op0)
9257	      && GET_CODE (SUBREG_REG (op0)) == PLUS
9258	      && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
9259	      && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
9260	      && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
9261		  < GET_MODE_MASK (mode) / 2)
9262	      && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
9263	      && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
9264				      GET_MODE (SUBREG_REG (op0)))
9265			& ~ GET_MODE_MASK (mode))
9266		  || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
9267					   GET_MODE (SUBREG_REG (op0)))
9268		      > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9269			 - GET_MODE_BITSIZE (mode)))))
9270	    {
9271	      op0 = SUBREG_REG (op0);
9272	      continue;
9273	    }
9274
9275	  /* If the inner mode is narrower and we are extracting the low part,
9276	     we can treat the SUBREG as if it were a ZERO_EXTEND.  */
9277	  if (subreg_lowpart_p (op0)
9278	      && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
9279	    /* Fall through */ ;
9280	  else
9281	    break;
9282
9283	  /* ... fall through ... */
9284
9285	case ZERO_EXTEND:
9286	  if ((unsigned_comparison_p || equality_comparison_p)
9287	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9288		  <= HOST_BITS_PER_WIDE_INT)
9289	      && ((unsigned HOST_WIDE_INT) const_op
9290		  < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
9291	    {
9292	      op0 = XEXP (op0, 0);
9293	      continue;
9294	    }
9295	  break;
9296
9297	case PLUS:
9298	  /* (eq (plus X A) B) -> (eq X (minus B A)).  We can only do
9299	     this for equality comparisons due to pathological cases involving
9300	     overflows.  */
9301	  if (equality_comparison_p
9302	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
9303							op1, XEXP (op0, 1))))
9304	    {
9305	      op0 = XEXP (op0, 0);
9306	      op1 = tem;
9307	      continue;
9308	    }
9309
9310	  /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0.  */
9311	  if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
9312	      && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
9313	    {
9314	      op0 = XEXP (XEXP (op0, 0), 0);
9315	      code = (code == LT ? EQ : NE);
9316	      continue;
9317	    }
9318	  break;
9319
9320	case MINUS:
9321	  /* (eq (minus A B) C) -> (eq A (plus B C)) or
9322	     (eq B (minus A C)), whichever simplifies.  We can only do
9323	     this for equality comparisons due to pathological cases involving
9324	     overflows.  */
9325	  if (equality_comparison_p
9326	      && 0 != (tem = simplify_binary_operation (PLUS, mode,
9327							XEXP (op0, 1), op1)))
9328	    {
9329	      op0 = XEXP (op0, 0);
9330	      op1 = tem;
9331	      continue;
9332	    }
9333
9334	  if (equality_comparison_p
9335	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
9336							XEXP (op0, 0), op1)))
9337	    {
9338	      op0 = XEXP (op0, 1);
9339	      op1 = tem;
9340	      continue;
9341	    }
9342
9343	  /* The sign bit of (minus (ashiftrt X C) X), where C is the number
9344	     of bits in X minus 1, is one iff X > 0.  */
9345	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
9346	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9347	      && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
9348	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9349	    {
9350	      op0 = XEXP (op0, 1);
9351	      code = (code == GE ? LE : GT);
9352	      continue;
9353	    }
9354	  break;
9355
9356	case XOR:
9357	  /* (eq (xor A B) C) -> (eq A (xor B C)).  This is a simplification
9358	     if C is zero or B is a constant.  */
9359	  if (equality_comparison_p
9360	      && 0 != (tem = simplify_binary_operation (XOR, mode,
9361							XEXP (op0, 1), op1)))
9362	    {
9363	      op0 = XEXP (op0, 0);
9364	      op1 = tem;
9365	      continue;
9366	    }
9367	  break;
9368
9369	case EQ:  case NE:
9370	case LT:  case LTU:  case LE:  case LEU:
9371	case GT:  case GTU:  case GE:  case GEU:
9372	  /* We can't do anything if OP0 is a condition code value, rather
9373	     than an actual data value.  */
9374	  if (const_op != 0
9375#ifdef HAVE_cc0
9376	      || XEXP (op0, 0) == cc0_rtx
9377#endif
9378	      || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
9379	    break;
9380
9381	  /* Get the two operands being compared.  */
9382	  if (GET_CODE (XEXP (op0, 0)) == COMPARE)
9383	    tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
9384	  else
9385	    tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
9386
9387	  /* Check for the cases where we simply want the result of the
9388	     earlier test or the opposite of that result.  */
9389	  if (code == NE
9390	      || (code == EQ && reversible_comparison_p (op0))
9391	      || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9392		  && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9393		  && (STORE_FLAG_VALUE
9394		      & (((HOST_WIDE_INT) 1
9395			  << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
9396		  && (code == LT
9397		      || (code == GE && reversible_comparison_p (op0)))))
9398	    {
9399	      code = (code == LT || code == NE
9400		      ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
9401	      op0 = tem, op1 = tem1;
9402	      continue;
9403	    }
9404	  break;
9405
9406	case IOR:
9407	  /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
9408	     iff X <= 0.  */
9409	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
9410	      && XEXP (XEXP (op0, 0), 1) == constm1_rtx
9411	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9412	    {
9413	      op0 = XEXP (op0, 1);
9414	      code = (code == GE ? GT : LE);
9415	      continue;
9416	    }
9417	  break;
9418
9419	case AND:
9420	  /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1).  This
9421	     will be converted to a ZERO_EXTRACT later.  */
9422	  if (const_op == 0 && equality_comparison_p
9423	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
9424	      && XEXP (XEXP (op0, 0), 0) == const1_rtx)
9425	    {
9426	      op0 = simplify_and_const_int
9427		(op0, mode, gen_rtx_combine (LSHIFTRT, mode,
9428					     XEXP (op0, 1),
9429					     XEXP (XEXP (op0, 0), 1)),
9430		 (HOST_WIDE_INT) 1);
9431	      continue;
9432	    }
9433
9434	  /* If we are comparing (and (lshiftrt X C1) C2) for equality with
9435	     zero and X is a comparison and C1 and C2 describe only bits set
9436	     in STORE_FLAG_VALUE, we can compare with X.  */
9437	  if (const_op == 0 && equality_comparison_p
9438	      && mode_width <= HOST_BITS_PER_WIDE_INT
9439	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
9440	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
9441	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9442	      && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
9443	      && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
9444	    {
9445	      mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
9446		      << INTVAL (XEXP (XEXP (op0, 0), 1)));
9447	      if ((~ STORE_FLAG_VALUE & mask) == 0
9448		  && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
9449		      || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
9450			  && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
9451		{
9452		  op0 = XEXP (XEXP (op0, 0), 0);
9453		  continue;
9454		}
9455	    }
9456
9457	  /* If we are doing an equality comparison of an AND of a bit equal
9458	     to the sign bit, replace this with a LT or GE comparison of
9459	     the underlying value.  */
9460	  if (equality_comparison_p
9461	      && const_op == 0
9462	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
9463	      && mode_width <= HOST_BITS_PER_WIDE_INT
9464	      && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
9465		  == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9466	    {
9467	      op0 = XEXP (op0, 0);
9468	      code = (code == EQ ? GE : LT);
9469	      continue;
9470	    }
9471
9472	  /* If this AND operation is really a ZERO_EXTEND from a narrower
9473	     mode, the constant fits within that mode, and this is either an
9474	     equality or unsigned comparison, try to do this comparison in
9475	     the narrower mode.  */
9476	  if ((equality_comparison_p || unsigned_comparison_p)
9477	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
9478	      && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
9479				   & GET_MODE_MASK (mode))
9480				  + 1)) >= 0
9481	      && const_op >> i == 0
9482	      && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
9483	    {
9484	      op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
9485	      continue;
9486	    }
9487	  break;
9488
9489	case ASHIFT:
9490	  /* If we have (compare (ashift FOO N) (const_int C)) and
9491	     the high order N bits of FOO (N+1 if an inequality comparison)
9492	     are known to be zero, we can do this by comparing FOO with C
9493	     shifted right N bits so long as the low-order N bits of C are
9494	     zero.  */
9495	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9496	      && INTVAL (XEXP (op0, 1)) >= 0
9497	      && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
9498		  < HOST_BITS_PER_WIDE_INT)
9499	      && ((const_op
9500		   & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
9501	      && mode_width <= HOST_BITS_PER_WIDE_INT
9502	      && (nonzero_bits (XEXP (op0, 0), mode)
9503		  & ~ (mask >> (INTVAL (XEXP (op0, 1))
9504				+ ! equality_comparison_p))) == 0)
9505	    {
9506	      const_op >>= INTVAL (XEXP (op0, 1));
9507	      op1 = GEN_INT (const_op);
9508	      op0 = XEXP (op0, 0);
9509	      continue;
9510	    }
9511
9512	  /* If we are doing a sign bit comparison, it means we are testing
9513	     a particular bit.  Convert it to the appropriate AND.  */
9514	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9515	      && mode_width <= HOST_BITS_PER_WIDE_INT)
9516	    {
9517	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9518					    ((HOST_WIDE_INT) 1
9519					     << (mode_width - 1
9520						 - INTVAL (XEXP (op0, 1)))));
9521	      code = (code == LT ? NE : EQ);
9522	      continue;
9523	    }
9524
9525	  /* If this an equality comparison with zero and we are shifting
9526	     the low bit to the sign bit, we can convert this to an AND of the
9527	     low-order bit.  */
9528	  if (const_op == 0 && equality_comparison_p
9529	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
9530	      && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9531	    {
9532	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9533					    (HOST_WIDE_INT) 1);
9534	      continue;
9535	    }
9536	  break;
9537
9538	case ASHIFTRT:
9539	  /* If this is an equality comparison with zero, we can do this
9540	     as a logical shift, which might be much simpler.  */
9541	  if (equality_comparison_p && const_op == 0
9542	      && GET_CODE (XEXP (op0, 1)) == CONST_INT)
9543	    {
9544	      op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
9545					  XEXP (op0, 0),
9546					  INTVAL (XEXP (op0, 1)));
9547	      continue;
9548	    }
9549
9550	  /* If OP0 is a sign extension and CODE is not an unsigned comparison,
9551	     do the comparison in a narrower mode.  */
9552	  if (! unsigned_comparison_p
9553	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
9554	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
9555	      && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
9556	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
9557					 MODE_INT, 1)) != BLKmode
9558	      && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
9559		  || ((unsigned HOST_WIDE_INT) - const_op
9560		      <= GET_MODE_MASK (tmode))))
9561	    {
9562	      op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
9563	      continue;
9564	    }
9565
9566	  /* ... fall through ... */
9567	case LSHIFTRT:
9568	  /* If we have (compare (xshiftrt FOO N) (const_int C)) and
9569	     the low order N bits of FOO are known to be zero, we can do this
9570	     by comparing FOO with C shifted left N bits so long as no
9571	     overflow occurs.  */
9572	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9573	      && INTVAL (XEXP (op0, 1)) >= 0
9574	      && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9575	      && mode_width <= HOST_BITS_PER_WIDE_INT
9576	      && (nonzero_bits (XEXP (op0, 0), mode)
9577		  & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
9578	      && (const_op == 0
9579		  || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
9580		      < mode_width)))
9581	    {
9582	      const_op <<= INTVAL (XEXP (op0, 1));
9583	      op1 = GEN_INT (const_op);
9584	      op0 = XEXP (op0, 0);
9585	      continue;
9586	    }
9587
9588	  /* If we are using this shift to extract just the sign bit, we
9589	     can replace this with an LT or GE comparison.  */
9590	  if (const_op == 0
9591	      && (equality_comparison_p || sign_bit_comparison_p)
9592	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
9593	      && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9594	    {
9595	      op0 = XEXP (op0, 0);
9596	      code = (code == NE || code == GT ? LT : GE);
9597	      continue;
9598	    }
9599	  break;
9600	}
9601
9602      break;
9603    }
9604
9605  /* Now make any compound operations involved in this comparison.  Then,
9606     check for an outmost SUBREG on OP0 that isn't doing anything or is
9607     paradoxical.  The latter case can only occur when it is known that the
9608     "extra" bits will be zero.  Therefore, it is safe to remove the SUBREG.
9609     We can never remove a SUBREG for a non-equality comparison because the
9610     sign bit is in a different place in the underlying object.  */
9611
9612  op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
9613  op1 = make_compound_operation (op1, SET);
9614
9615  if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9616      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9617      && (code == NE || code == EQ)
9618      && ((GET_MODE_SIZE (GET_MODE (op0))
9619	   > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
9620    {
9621      op0 = SUBREG_REG (op0);
9622      op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
9623    }
9624
9625  else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9626	   && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9627	   && (code == NE || code == EQ)
9628	   && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9629	       <= HOST_BITS_PER_WIDE_INT)
9630	   && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
9631	       & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
9632	   && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
9633					      op1),
9634	       (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
9635		& ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
9636    op0 = SUBREG_REG (op0), op1 = tem;
9637
9638  /* We now do the opposite procedure: Some machines don't have compare
9639     insns in all modes.  If OP0's mode is an integer mode smaller than a
9640     word and we can't do a compare in that mode, see if there is a larger
9641     mode for which we can do the compare.  There are a number of cases in
9642     which we can use the wider mode.  */
9643
9644  mode = GET_MODE (op0);
9645  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
9646      && GET_MODE_SIZE (mode) < UNITS_PER_WORD
9647      && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
9648    for (tmode = GET_MODE_WIDER_MODE (mode);
9649	 (tmode != VOIDmode
9650	  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
9651	 tmode = GET_MODE_WIDER_MODE (tmode))
9652      if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
9653	{
9654	  /* If the only nonzero bits in OP0 and OP1 are those in the
9655	     narrower mode and this is an equality or unsigned comparison,
9656	     we can use the wider mode.  Similarly for sign-extended
9657	     values, in which case it is true for all comparisons.  */
9658	  if (((code == EQ || code == NE
9659		|| code == GEU || code == GTU || code == LEU || code == LTU)
9660	       && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
9661	       && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
9662	      || ((num_sign_bit_copies (op0, tmode)
9663		   > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
9664		  && (num_sign_bit_copies (op1, tmode)
9665		      > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
9666	    {
9667	      op0 = gen_lowpart_for_combine (tmode, op0);
9668	      op1 = gen_lowpart_for_combine (tmode, op1);
9669	      break;
9670	    }
9671
9672	  /* If this is a test for negative, we can make an explicit
9673	     test of the sign bit.  */
9674
9675	  if (op1 == const0_rtx && (code == LT || code == GE)
9676	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9677	    {
9678	      op0 = gen_binary (AND, tmode,
9679				gen_lowpart_for_combine (tmode, op0),
9680				GEN_INT ((HOST_WIDE_INT) 1
9681					 << (GET_MODE_BITSIZE (mode) - 1)));
9682	      code = (code == LT) ? NE : EQ;
9683	      break;
9684	    }
9685	}
9686
9687#ifdef CANONICALIZE_COMPARISON
9688  /* If this machine only supports a subset of valid comparisons, see if we
9689     can convert an unsupported one into a supported one.  */
9690  CANONICALIZE_COMPARISON (code, op0, op1);
9691#endif
9692
9693  *pop0 = op0;
9694  *pop1 = op1;
9695
9696  return code;
9697}
9698
9699/* Return 1 if we know that X, a comparison operation, is not operating
9700   on a floating-point value or is EQ or NE, meaning that we can safely
9701   reverse it.  */
9702
9703static int
9704reversible_comparison_p (x)
9705     rtx x;
9706{
9707  if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
9708      || flag_fast_math
9709      || GET_CODE (x) == NE || GET_CODE (x) == EQ)
9710    return 1;
9711
9712  switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
9713    {
9714    case MODE_INT:
9715    case MODE_PARTIAL_INT:
9716    case MODE_COMPLEX_INT:
9717      return 1;
9718
9719    case MODE_CC:
9720      /* If the mode of the condition codes tells us that this is safe,
9721	 we need look no further.  */
9722      if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
9723	return 1;
9724
9725      /* Otherwise try and find where the condition codes were last set and
9726	 use that.  */
9727      x = get_last_value (XEXP (x, 0));
9728      return (x && GET_CODE (x) == COMPARE
9729	      && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
9730    }
9731
9732  return 0;
9733}
9734
9735/* Utility function for following routine.  Called when X is part of a value
9736   being stored into reg_last_set_value.  Sets reg_last_set_table_tick
9737   for each register mentioned.  Similar to mention_regs in cse.c  */
9738
9739static void
9740update_table_tick (x)
9741     rtx x;
9742{
9743  register enum rtx_code code = GET_CODE (x);
9744  register char *fmt = GET_RTX_FORMAT (code);
9745  register int i;
9746
9747  if (code == REG)
9748    {
9749      int regno = REGNO (x);
9750      int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9751			      ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9752
9753      for (i = regno; i < endregno; i++)
9754	reg_last_set_table_tick[i] = label_tick;
9755
9756      return;
9757    }
9758
9759  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9760    /* Note that we can't have an "E" in values stored; see
9761       get_last_value_validate.  */
9762    if (fmt[i] == 'e')
9763      update_table_tick (XEXP (x, i));
9764}
9765
9766/* Record that REG is set to VALUE in insn INSN.  If VALUE is zero, we
9767   are saying that the register is clobbered and we no longer know its
9768   value.  If INSN is zero, don't update reg_last_set; this is only permitted
9769   with VALUE also zero and is used to invalidate the register.  */
9770
9771static void
9772record_value_for_reg (reg, insn, value)
9773     rtx reg;
9774     rtx insn;
9775     rtx value;
9776{
9777  int regno = REGNO (reg);
9778  int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9779			  ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
9780  int i;
9781
9782  /* If VALUE contains REG and we have a previous value for REG, substitute
9783     the previous value.  */
9784  if (value && insn && reg_overlap_mentioned_p (reg, value))
9785    {
9786      rtx tem;
9787
9788      /* Set things up so get_last_value is allowed to see anything set up to
9789	 our insn.  */
9790      subst_low_cuid = INSN_CUID (insn);
9791      tem = get_last_value (reg);
9792
9793      if (tem)
9794	value = replace_rtx (copy_rtx (value), reg, tem);
9795    }
9796
9797  /* For each register modified, show we don't know its value, that
9798     we don't know about its bitwise content, that its value has been
9799     updated, and that we don't know the location of the death of the
9800     register.  */
9801  for (i = regno; i < endregno; i ++)
9802    {
9803      if (insn)
9804	reg_last_set[i] = insn;
9805      reg_last_set_value[i] = 0;
9806      reg_last_set_mode[i] = 0;
9807      reg_last_set_nonzero_bits[i] = 0;
9808      reg_last_set_sign_bit_copies[i] = 0;
9809      reg_last_death[i] = 0;
9810    }
9811
9812  /* Mark registers that are being referenced in this value.  */
9813  if (value)
9814    update_table_tick (value);
9815
9816  /* Now update the status of each register being set.
9817     If someone is using this register in this block, set this register
9818     to invalid since we will get confused between the two lives in this
9819     basic block.  This makes using this register always invalid.  In cse, we
9820     scan the table to invalidate all entries using this register, but this
9821     is too much work for us.  */
9822
9823  for (i = regno; i < endregno; i++)
9824    {
9825      reg_last_set_label[i] = label_tick;
9826      if (value && reg_last_set_table_tick[i] == label_tick)
9827	reg_last_set_invalid[i] = 1;
9828      else
9829	reg_last_set_invalid[i] = 0;
9830    }
9831
9832  /* The value being assigned might refer to X (like in "x++;").  In that
9833     case, we must replace it with (clobber (const_int 0)) to prevent
9834     infinite loops.  */
9835  if (value && ! get_last_value_validate (&value,
9836					  reg_last_set_label[regno], 0))
9837    {
9838      value = copy_rtx (value);
9839      if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
9840	value = 0;
9841    }
9842
9843  /* For the main register being modified, update the value, the mode, the
9844     nonzero bits, and the number of sign bit copies.  */
9845
9846  reg_last_set_value[regno] = value;
9847
9848  if (value)
9849    {
9850      subst_low_cuid = INSN_CUID (insn);
9851      reg_last_set_mode[regno] = GET_MODE (reg);
9852      reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
9853      reg_last_set_sign_bit_copies[regno]
9854	= num_sign_bit_copies (value, GET_MODE (reg));
9855    }
9856}
9857
9858/* Used for communication between the following two routines.  */
9859static rtx record_dead_insn;
9860
9861/* Called via note_stores from record_dead_and_set_regs to handle one
9862   SET or CLOBBER in an insn.  */
9863
9864static void
9865record_dead_and_set_regs_1 (dest, setter)
9866     rtx dest, setter;
9867{
9868  if (GET_CODE (dest) == SUBREG)
9869    dest = SUBREG_REG (dest);
9870
9871  if (GET_CODE (dest) == REG)
9872    {
9873      /* If we are setting the whole register, we know its value.  Otherwise
9874	 show that we don't know the value.  We can handle SUBREG in
9875	 some cases.  */
9876      if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
9877	record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
9878      else if (GET_CODE (setter) == SET
9879	       && GET_CODE (SET_DEST (setter)) == SUBREG
9880	       && SUBREG_REG (SET_DEST (setter)) == dest
9881	       && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
9882	       && subreg_lowpart_p (SET_DEST (setter)))
9883	record_value_for_reg (dest, record_dead_insn,
9884			      gen_lowpart_for_combine (GET_MODE (dest),
9885						       SET_SRC (setter)));
9886      else
9887	record_value_for_reg (dest, record_dead_insn, NULL_RTX);
9888    }
9889  else if (GET_CODE (dest) == MEM
9890	   /* Ignore pushes, they clobber nothing.  */
9891	   && ! push_operand (dest, GET_MODE (dest)))
9892    mem_last_set = INSN_CUID (record_dead_insn);
9893}
9894
9895/* Update the records of when each REG was most recently set or killed
9896   for the things done by INSN.  This is the last thing done in processing
9897   INSN in the combiner loop.
9898
9899   We update reg_last_set, reg_last_set_value, reg_last_set_mode,
9900   reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
9901   and also the similar information mem_last_set (which insn most recently
9902   modified memory) and last_call_cuid (which insn was the most recent
9903   subroutine call).  */
9904
9905static void
9906record_dead_and_set_regs (insn)
9907     rtx insn;
9908{
9909  register rtx link;
9910  int i;
9911
9912  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
9913    {
9914      if (REG_NOTE_KIND (link) == REG_DEAD
9915	  && GET_CODE (XEXP (link, 0)) == REG)
9916	{
9917	  int regno = REGNO (XEXP (link, 0));
9918	  int endregno
9919	    = regno + (regno < FIRST_PSEUDO_REGISTER
9920		       ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
9921		       : 1);
9922
9923	  for (i = regno; i < endregno; i++)
9924	    reg_last_death[i] = insn;
9925	}
9926      else if (REG_NOTE_KIND (link) == REG_INC)
9927	record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
9928    }
9929
9930  if (GET_CODE (insn) == CALL_INSN)
9931    {
9932      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
9933	if (call_used_regs[i])
9934	  {
9935	    reg_last_set_value[i] = 0;
9936	    reg_last_set_mode[i] = 0;
9937	    reg_last_set_nonzero_bits[i] = 0;
9938	    reg_last_set_sign_bit_copies[i] = 0;
9939	    reg_last_death[i] = 0;
9940	  }
9941
9942      last_call_cuid = mem_last_set = INSN_CUID (insn);
9943    }
9944
9945  record_dead_insn = insn;
9946  note_stores (PATTERN (insn), record_dead_and_set_regs_1);
9947}
9948
9949/* Utility routine for the following function.  Verify that all the registers
9950   mentioned in *LOC are valid when *LOC was part of a value set when
9951   label_tick == TICK.  Return 0 if some are not.
9952
9953   If REPLACE is non-zero, replace the invalid reference with
9954   (clobber (const_int 0)) and return 1.  This replacement is useful because
9955   we often can get useful information about the form of a value (e.g., if
9956   it was produced by a shift that always produces -1 or 0) even though
9957   we don't know exactly what registers it was produced from.  */
9958
9959static int
9960get_last_value_validate (loc, tick, replace)
9961     rtx *loc;
9962     int tick;
9963     int replace;
9964{
9965  rtx x = *loc;
9966  char *fmt = GET_RTX_FORMAT (GET_CODE (x));
9967  int len = GET_RTX_LENGTH (GET_CODE (x));
9968  int i;
9969
9970  if (GET_CODE (x) == REG)
9971    {
9972      int regno = REGNO (x);
9973      int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9974			      ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9975      int j;
9976
9977      for (j = regno; j < endregno; j++)
9978	if (reg_last_set_invalid[j]
9979	    /* If this is a pseudo-register that was only set once, it is
9980	       always valid.  */
9981	    || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
9982		&& reg_last_set_label[j] > tick))
9983	  {
9984	    if (replace)
9985	      *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
9986	    return replace;
9987	  }
9988
9989      return 1;
9990    }
9991
9992  for (i = 0; i < len; i++)
9993    if ((fmt[i] == 'e'
9994	 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
9995	/* Don't bother with these.  They shouldn't occur anyway.  */
9996	|| fmt[i] == 'E')
9997      return 0;
9998
9999  /* If we haven't found a reason for it to be invalid, it is valid.  */
10000  return 1;
10001}
10002
10003/* Get the last value assigned to X, if known.  Some registers
10004   in the value may be replaced with (clobber (const_int 0)) if their value
10005   is known longer known reliably.  */
10006
10007static rtx
10008get_last_value (x)
10009     rtx x;
10010{
10011  int regno;
10012  rtx value;
10013
10014  /* If this is a non-paradoxical SUBREG, get the value of its operand and
10015     then convert it to the desired mode.  If this is a paradoxical SUBREG,
10016     we cannot predict what values the "extra" bits might have. */
10017  if (GET_CODE (x) == SUBREG
10018      && subreg_lowpart_p (x)
10019      && (GET_MODE_SIZE (GET_MODE (x))
10020	  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
10021      && (value = get_last_value (SUBREG_REG (x))) != 0)
10022    return gen_lowpart_for_combine (GET_MODE (x), value);
10023
10024  if (GET_CODE (x) != REG)
10025    return 0;
10026
10027  regno = REGNO (x);
10028  value = reg_last_set_value[regno];
10029
10030  /* If we don't have a value or if it isn't for this basic block, return 0. */
10031
10032  if (value == 0
10033      || (reg_n_sets[regno] != 1
10034	  && reg_last_set_label[regno] != label_tick))
10035    return 0;
10036
10037  /* If the value was set in a later insn than the ones we are processing,
10038     we can't use it even if the register was only set once, but make a quick
10039     check to see if the previous insn set it to something.  This is commonly
10040     the case when the same pseudo is used by repeated insns.
10041
10042     This does not work if there exists an instruction which is temporarily
10043     not on the insn chain.  */
10044
10045  if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
10046    {
10047      rtx insn, set;
10048
10049      /* We can not do anything useful in this case, because there is
10050	 an instruction which is not on the insn chain.  */
10051      if (subst_prev_insn)
10052	return 0;
10053
10054      /* Skip over USE insns.  They are not useful here, and they may have
10055	 been made by combine, in which case they do not have a INSN_CUID
10056	 value.  We can't use prev_real_insn, because that would incorrectly
10057	 take us backwards across labels.  Skip over BARRIERs also, since
10058	 they could have been made by combine.  If we see one, we must be
10059	 optimizing dead code, so it doesn't matter what we do.  */
10060      for (insn = prev_nonnote_insn (subst_insn);
10061	   insn && ((GET_CODE (insn) == INSN
10062		     && GET_CODE (PATTERN (insn)) == USE)
10063		    || GET_CODE (insn) == BARRIER
10064		    || INSN_CUID (insn) >= subst_low_cuid);
10065	   insn = prev_nonnote_insn (insn))
10066	;
10067
10068      if (insn
10069	  && (set = single_set (insn)) != 0
10070	  && rtx_equal_p (SET_DEST (set), x))
10071	{
10072	  value = SET_SRC (set);
10073
10074	  /* Make sure that VALUE doesn't reference X.  Replace any
10075	     explicit references with a CLOBBER.  If there are any remaining
10076	     references (rare), don't use the value.  */
10077
10078	  if (reg_mentioned_p (x, value))
10079	    value = replace_rtx (copy_rtx (value), x,
10080				 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
10081
10082	  if (reg_overlap_mentioned_p (x, value))
10083	    return 0;
10084	}
10085      else
10086	return 0;
10087    }
10088
10089  /* If the value has all its registers valid, return it.  */
10090  if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
10091    return value;
10092
10093  /* Otherwise, make a copy and replace any invalid register with
10094     (clobber (const_int 0)).  If that fails for some reason, return 0.  */
10095
10096  value = copy_rtx (value);
10097  if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
10098    return value;
10099
10100  return 0;
10101}
10102
10103/* Return nonzero if expression X refers to a REG or to memory
10104   that is set in an instruction more recent than FROM_CUID.  */
10105
10106static int
10107use_crosses_set_p (x, from_cuid)
10108     register rtx x;
10109     int from_cuid;
10110{
10111  register char *fmt;
10112  register int i;
10113  register enum rtx_code code = GET_CODE (x);
10114
10115  if (code == REG)
10116    {
10117      register int regno = REGNO (x);
10118      int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
10119			    ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10120
10121#ifdef PUSH_ROUNDING
10122      /* Don't allow uses of the stack pointer to be moved,
10123	 because we don't know whether the move crosses a push insn.  */
10124      if (regno == STACK_POINTER_REGNUM)
10125	return 1;
10126#endif
10127      for (;regno < endreg; regno++)
10128	if (reg_last_set[regno]
10129	    && INSN_CUID (reg_last_set[regno]) > from_cuid)
10130	  return 1;
10131      return 0;
10132    }
10133
10134  if (code == MEM && mem_last_set > from_cuid)
10135    return 1;
10136
10137  fmt = GET_RTX_FORMAT (code);
10138
10139  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10140    {
10141      if (fmt[i] == 'E')
10142	{
10143	  register int j;
10144	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10145	    if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
10146	      return 1;
10147	}
10148      else if (fmt[i] == 'e'
10149	       && use_crosses_set_p (XEXP (x, i), from_cuid))
10150	return 1;
10151    }
10152  return 0;
10153}
10154
10155/* Define three variables used for communication between the following
10156   routines.  */
10157
10158static int reg_dead_regno, reg_dead_endregno;
10159static int reg_dead_flag;
10160
10161/* Function called via note_stores from reg_dead_at_p.
10162
10163   If DEST is within [reg_dead_regno, reg_dead_endregno), set
10164   reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET.  */
10165
10166static void
10167reg_dead_at_p_1 (dest, x)
10168     rtx dest;
10169     rtx x;
10170{
10171  int regno, endregno;
10172
10173  if (GET_CODE (dest) != REG)
10174    return;
10175
10176  regno = REGNO (dest);
10177  endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10178		      ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
10179
10180  if (reg_dead_endregno > regno && reg_dead_regno < endregno)
10181    reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
10182}
10183
10184/* Return non-zero if REG is known to be dead at INSN.
10185
10186   We scan backwards from INSN.  If we hit a REG_DEAD note or a CLOBBER
10187   referencing REG, it is dead.  If we hit a SET referencing REG, it is
10188   live.  Otherwise, see if it is live or dead at the start of the basic
10189   block we are in.  Hard regs marked as being live in NEWPAT_USED_REGS
10190   must be assumed to be always live.  */
10191
10192static int
10193reg_dead_at_p (reg, insn)
10194     rtx reg;
10195     rtx insn;
10196{
10197  int block, i;
10198
10199  /* Set variables for reg_dead_at_p_1.  */
10200  reg_dead_regno = REGNO (reg);
10201  reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
10202					? HARD_REGNO_NREGS (reg_dead_regno,
10203							    GET_MODE (reg))
10204					: 1);
10205
10206  reg_dead_flag = 0;
10207
10208  /* Check that reg isn't mentioned in NEWPAT_USED_REGS.  */
10209  if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
10210    {
10211      for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10212	if (TEST_HARD_REG_BIT (newpat_used_regs, i))
10213	  return 0;
10214    }
10215
10216  /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
10217     beginning of function.  */
10218  for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
10219       insn = prev_nonnote_insn (insn))
10220    {
10221      note_stores (PATTERN (insn), reg_dead_at_p_1);
10222      if (reg_dead_flag)
10223	return reg_dead_flag == 1 ? 1 : 0;
10224
10225      if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
10226	return 1;
10227    }
10228
10229  /* Get the basic block number that we were in.  */
10230  if (insn == 0)
10231    block = 0;
10232  else
10233    {
10234      for (block = 0; block < n_basic_blocks; block++)
10235	if (insn == basic_block_head[block])
10236	  break;
10237
10238      if (block == n_basic_blocks)
10239	return 0;
10240    }
10241
10242  for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10243    if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
10244	& ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
10245      return 0;
10246
10247  return 1;
10248}
10249
10250/* Note hard registers in X that are used.  This code is similar to
10251   that in flow.c, but much simpler since we don't care about pseudos.  */
10252
10253static void
10254mark_used_regs_combine (x)
10255     rtx x;
10256{
10257  register RTX_CODE code = GET_CODE (x);
10258  register int regno;
10259  int i;
10260
10261  switch (code)
10262    {
10263    case LABEL_REF:
10264    case SYMBOL_REF:
10265    case CONST_INT:
10266    case CONST:
10267    case CONST_DOUBLE:
10268    case PC:
10269    case ADDR_VEC:
10270    case ADDR_DIFF_VEC:
10271    case ASM_INPUT:
10272#ifdef HAVE_cc0
10273    /* CC0 must die in the insn after it is set, so we don't need to take
10274       special note of it here.  */
10275    case CC0:
10276#endif
10277      return;
10278
10279    case CLOBBER:
10280      /* If we are clobbering a MEM, mark any hard registers inside the
10281	 address as used.  */
10282      if (GET_CODE (XEXP (x, 0)) == MEM)
10283	mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
10284      return;
10285
10286    case REG:
10287      regno = REGNO (x);
10288      /* A hard reg in a wide mode may really be multiple registers.
10289	 If so, mark all of them just like the first.  */
10290      if (regno < FIRST_PSEUDO_REGISTER)
10291	{
10292	  /* None of this applies to the stack, frame or arg pointers */
10293	  if (regno == STACK_POINTER_REGNUM
10294#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
10295	      || regno == HARD_FRAME_POINTER_REGNUM
10296#endif
10297#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
10298	      || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
10299#endif
10300	      || regno == FRAME_POINTER_REGNUM)
10301	    return;
10302
10303	  i = HARD_REGNO_NREGS (regno, GET_MODE (x));
10304	  while (i-- > 0)
10305	    SET_HARD_REG_BIT (newpat_used_regs, regno + i);
10306	}
10307      return;
10308
10309    case SET:
10310      {
10311	/* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
10312	   the address.  */
10313	register rtx testreg = SET_DEST (x);
10314
10315	while (GET_CODE (testreg) == SUBREG
10316	       || GET_CODE (testreg) == ZERO_EXTRACT
10317	       || GET_CODE (testreg) == SIGN_EXTRACT
10318	       || GET_CODE (testreg) == STRICT_LOW_PART)
10319	  testreg = XEXP (testreg, 0);
10320
10321	if (GET_CODE (testreg) == MEM)
10322	  mark_used_regs_combine (XEXP (testreg, 0));
10323
10324	mark_used_regs_combine (SET_SRC (x));
10325	return;
10326      }
10327    }
10328
10329  /* Recursively scan the operands of this expression.  */
10330
10331  {
10332    register char *fmt = GET_RTX_FORMAT (code);
10333
10334    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10335      {
10336        if (fmt[i] == 'e')
10337	  mark_used_regs_combine (XEXP (x, i));
10338        else if (fmt[i] == 'E')
10339          {
10340            register int j;
10341
10342            for (j = 0; j < XVECLEN (x, i); j++)
10343              mark_used_regs_combine (XVECEXP (x, i, j));
10344          }
10345      }
10346  }
10347}
10348
10349
10350/* Remove register number REGNO from the dead registers list of INSN.
10351
10352   Return the note used to record the death, if there was one.  */
10353
10354rtx
10355remove_death (regno, insn)
10356     int regno;
10357     rtx insn;
10358{
10359  register rtx note = find_regno_note (insn, REG_DEAD, regno);
10360
10361  if (note)
10362    {
10363      reg_n_deaths[regno]--;
10364      remove_note (insn, note);
10365    }
10366
10367  return note;
10368}
10369
10370/* For each register (hardware or pseudo) used within expression X, if its
10371   death is in an instruction with cuid between FROM_CUID (inclusive) and
10372   TO_INSN (exclusive), put a REG_DEAD note for that register in the
10373   list headed by PNOTES.
10374
10375   This is done when X is being merged by combination into TO_INSN.  These
10376   notes will then be distributed as needed.  */
10377
10378static void
10379move_deaths (x, from_cuid, to_insn, pnotes)
10380     rtx x;
10381     int from_cuid;
10382     rtx to_insn;
10383     rtx *pnotes;
10384{
10385  register char *fmt;
10386  register int len, i;
10387  register enum rtx_code code = GET_CODE (x);
10388
10389  if (code == REG)
10390    {
10391      register int regno = REGNO (x);
10392      register rtx where_dead = reg_last_death[regno];
10393      register rtx before_dead, after_dead;
10394
10395      /* WHERE_DEAD could be a USE insn made by combine, so first we
10396	 make sure that we have insns with valid INSN_CUID values.  */
10397      before_dead = where_dead;
10398      while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
10399	before_dead = PREV_INSN (before_dead);
10400      after_dead = where_dead;
10401      while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
10402	after_dead = NEXT_INSN (after_dead);
10403
10404      if (before_dead && after_dead
10405	  && INSN_CUID (before_dead) >= from_cuid
10406	  && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
10407	      || (where_dead != after_dead
10408		  && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
10409	{
10410	  rtx note = remove_death (regno, where_dead);
10411
10412	  /* It is possible for the call above to return 0.  This can occur
10413	     when reg_last_death points to I2 or I1 that we combined with.
10414	     In that case make a new note.
10415
10416	     We must also check for the case where X is a hard register
10417	     and NOTE is a death note for a range of hard registers
10418	     including X.  In that case, we must put REG_DEAD notes for
10419	     the remaining registers in place of NOTE.  */
10420
10421	  if (note != 0 && regno < FIRST_PSEUDO_REGISTER
10422	      && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
10423		  != GET_MODE_SIZE (GET_MODE (x))))
10424	    {
10425	      int deadregno = REGNO (XEXP (note, 0));
10426	      int deadend
10427		= (deadregno + HARD_REGNO_NREGS (deadregno,
10428						 GET_MODE (XEXP (note, 0))));
10429	      int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10430	      int i;
10431
10432	      for (i = deadregno; i < deadend; i++)
10433		if (i < regno || i >= ourend)
10434		  REG_NOTES (where_dead)
10435		    = gen_rtx (EXPR_LIST, REG_DEAD,
10436			       gen_rtx (REG, reg_raw_mode[i], i),
10437			       REG_NOTES (where_dead));
10438	    }
10439	  /* If we didn't find any note, and we have a multi-reg hard
10440	     register, then to be safe we must check for REG_DEAD notes
10441	     for each register other than the first.  They could have
10442	     their own REG_DEAD notes lying around.  */
10443	  else if (note == 0 && regno < FIRST_PSEUDO_REGISTER
10444		   && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
10445	    {
10446	      int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10447	      int i;
10448	      rtx oldnotes = 0;
10449
10450	      for (i = regno + 1; i < ourend; i++)
10451		move_deaths (gen_rtx (REG, reg_raw_mode[i], i),
10452			     from_cuid, to_insn, &oldnotes);
10453	    }
10454
10455	  if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
10456	    {
10457	      XEXP (note, 1) = *pnotes;
10458	      *pnotes = note;
10459	    }
10460	  else
10461	    *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
10462
10463	  reg_n_deaths[regno]++;
10464	}
10465
10466      return;
10467    }
10468
10469  else if (GET_CODE (x) == SET)
10470    {
10471      rtx dest = SET_DEST (x);
10472
10473      move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
10474
10475      /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
10476	 that accesses one word of a multi-word item, some
10477	 piece of everything register in the expression is used by
10478	 this insn, so remove any old death.  */
10479
10480      if (GET_CODE (dest) == ZERO_EXTRACT
10481	  || GET_CODE (dest) == STRICT_LOW_PART
10482	  || (GET_CODE (dest) == SUBREG
10483	      && (((GET_MODE_SIZE (GET_MODE (dest))
10484		    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
10485		  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
10486		       + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
10487	{
10488	  move_deaths (dest, from_cuid, to_insn, pnotes);
10489	  return;
10490	}
10491
10492      /* If this is some other SUBREG, we know it replaces the entire
10493	 value, so use that as the destination.  */
10494      if (GET_CODE (dest) == SUBREG)
10495	dest = SUBREG_REG (dest);
10496
10497      /* If this is a MEM, adjust deaths of anything used in the address.
10498	 For a REG (the only other possibility), the entire value is
10499	 being replaced so the old value is not used in this insn.  */
10500
10501      if (GET_CODE (dest) == MEM)
10502	move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
10503      return;
10504    }
10505
10506  else if (GET_CODE (x) == CLOBBER)
10507    return;
10508
10509  len = GET_RTX_LENGTH (code);
10510  fmt = GET_RTX_FORMAT (code);
10511
10512  for (i = 0; i < len; i++)
10513    {
10514      if (fmt[i] == 'E')
10515	{
10516	  register int j;
10517	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10518	    move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
10519	}
10520      else if (fmt[i] == 'e')
10521	move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
10522    }
10523}
10524
10525/* Return 1 if X is the target of a bit-field assignment in BODY, the
10526   pattern of an insn.  X must be a REG.  */
10527
10528static int
10529reg_bitfield_target_p (x, body)
10530     rtx x;
10531     rtx body;
10532{
10533  int i;
10534
10535  if (GET_CODE (body) == SET)
10536    {
10537      rtx dest = SET_DEST (body);
10538      rtx target;
10539      int regno, tregno, endregno, endtregno;
10540
10541      if (GET_CODE (dest) == ZERO_EXTRACT)
10542	target = XEXP (dest, 0);
10543      else if (GET_CODE (dest) == STRICT_LOW_PART)
10544	target = SUBREG_REG (XEXP (dest, 0));
10545      else
10546	return 0;
10547
10548      if (GET_CODE (target) == SUBREG)
10549	target = SUBREG_REG (target);
10550
10551      if (GET_CODE (target) != REG)
10552	return 0;
10553
10554      tregno = REGNO (target), regno = REGNO (x);
10555      if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
10556	return target == x;
10557
10558      endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
10559      endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10560
10561      return endregno > tregno && regno < endtregno;
10562    }
10563
10564  else if (GET_CODE (body) == PARALLEL)
10565    for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
10566      if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
10567	return 1;
10568
10569  return 0;
10570}
10571
10572/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
10573   as appropriate.  I3 and I2 are the insns resulting from the combination
10574   insns including FROM (I2 may be zero).
10575
10576   ELIM_I2 and ELIM_I1 are either zero or registers that we know will
10577   not need REG_DEAD notes because they are being substituted for.  This
10578   saves searching in the most common cases.
10579
10580   Each note in the list is either ignored or placed on some insns, depending
10581   on the type of note.  */
10582
10583static void
10584distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
10585     rtx notes;
10586     rtx from_insn;
10587     rtx i3, i2;
10588     rtx elim_i2, elim_i1;
10589{
10590  rtx note, next_note;
10591  rtx tem;
10592
10593  for (note = notes; note; note = next_note)
10594    {
10595      rtx place = 0, place2 = 0;
10596
10597      /* If this NOTE references a pseudo register, ensure it references
10598	 the latest copy of that register.  */
10599      if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
10600	  && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
10601	XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
10602
10603      next_note = XEXP (note, 1);
10604      switch (REG_NOTE_KIND (note))
10605	{
10606	case REG_UNUSED:
10607	  /* Any clobbers for i3 may still exist, and so we must process
10608	     REG_UNUSED notes from that insn.
10609
10610	     Any clobbers from i2 or i1 can only exist if they were added by
10611	     recog_for_combine.  In that case, recog_for_combine created the
10612	     necessary REG_UNUSED notes.  Trying to keep any original
10613	     REG_UNUSED notes from these insns can cause incorrect output
10614	     if it is for the same register as the original i3 dest.
10615	     In that case, we will notice that the register is set in i3,
10616	     and then add a REG_UNUSED note for the destination of i3, which
10617	     is wrong.  However, it is possible to have REG_UNUSED notes from
10618	     i2 or i1 for register which were both used and clobbered, so
10619	     we keep notes from i2 or i1 if they will turn into REG_DEAD
10620	     notes.  */
10621
10622	  /* If this register is set or clobbered in I3, put the note there
10623	     unless there is one already.  */
10624	  if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
10625	    {
10626	      if (from_insn != i3)
10627		break;
10628
10629	      if (! (GET_CODE (XEXP (note, 0)) == REG
10630		     ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
10631		     : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
10632		place = i3;
10633	    }
10634	  /* Otherwise, if this register is used by I3, then this register
10635	     now dies here, so we must put a REG_DEAD note here unless there
10636	     is one already.  */
10637	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
10638		   && ! (GET_CODE (XEXP (note, 0)) == REG
10639			 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
10640			 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
10641	    {
10642	      PUT_REG_NOTE_KIND (note, REG_DEAD);
10643	      place = i3;
10644	    }
10645	  break;
10646
10647	case REG_EQUAL:
10648	case REG_EQUIV:
10649	case REG_NONNEG:
10650	  /* These notes say something about results of an insn.  We can
10651	     only support them if they used to be on I3 in which case they
10652	     remain on I3.  Otherwise they are ignored.
10653
10654	     If the note refers to an expression that is not a constant, we
10655	     must also ignore the note since we cannot tell whether the
10656	     equivalence is still true.  It might be possible to do
10657	     slightly better than this (we only have a problem if I2DEST
10658	     or I1DEST is present in the expression), but it doesn't
10659	     seem worth the trouble.  */
10660
10661	  if (from_insn == i3
10662	      && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
10663	    place = i3;
10664	  break;
10665
10666	case REG_INC:
10667	case REG_NO_CONFLICT:
10668	case REG_LABEL:
10669	  /* These notes say something about how a register is used.  They must
10670	     be present on any use of the register in I2 or I3.  */
10671	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
10672	    place = i3;
10673
10674	  if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
10675	    {
10676	      if (place)
10677		place2 = i2;
10678	      else
10679		place = i2;
10680	    }
10681	  break;
10682
10683	case REG_WAS_0:
10684	  /* It is too much trouble to try to see if this note is still
10685	     correct in all situations.  It is better to simply delete it.  */
10686	  break;
10687
10688	case REG_RETVAL:
10689	  /* If the insn previously containing this note still exists,
10690	     put it back where it was.  Otherwise move it to the previous
10691	     insn.  Adjust the corresponding REG_LIBCALL note.  */
10692	  if (GET_CODE (from_insn) != NOTE)
10693	    place = from_insn;
10694	  else
10695	    {
10696	      tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
10697	      place = prev_real_insn (from_insn);
10698	      if (tem && place)
10699		XEXP (tem, 0) = place;
10700	    }
10701	  break;
10702
10703	case REG_LIBCALL:
10704	  /* This is handled similarly to REG_RETVAL.  */
10705	  if (GET_CODE (from_insn) != NOTE)
10706	    place = from_insn;
10707	  else
10708	    {
10709	      tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
10710	      place = next_real_insn (from_insn);
10711	      if (tem && place)
10712		XEXP (tem, 0) = place;
10713	    }
10714	  break;
10715
10716	case REG_DEAD:
10717	  /* If the register is used as an input in I3, it dies there.
10718	     Similarly for I2, if it is non-zero and adjacent to I3.
10719
10720	     If the register is not used as an input in either I3 or I2
10721	     and it is not one of the registers we were supposed to eliminate,
10722	     there are two possibilities.  We might have a non-adjacent I2
10723	     or we might have somehow eliminated an additional register
10724	     from a computation.  For example, we might have had A & B where
10725	     we discover that B will always be zero.  In this case we will
10726	     eliminate the reference to A.
10727
10728	     In both cases, we must search to see if we can find a previous
10729	     use of A and put the death note there.  */
10730
10731	  if (from_insn
10732	      && GET_CODE (from_insn) == CALL_INSN
10733              && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
10734	    place = from_insn;
10735	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
10736	    place = i3;
10737	  else if (i2 != 0 && next_nonnote_insn (i2) == i3
10738		   && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10739	    place = i2;
10740
10741	  if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
10742	    break;
10743
10744	  /* If the register is used in both I2 and I3 and it dies in I3,
10745	     we might have added another reference to it.  If reg_n_refs
10746	     was 2, bump it to 3.  This has to be correct since the
10747	     register must have been set somewhere.  The reason this is
10748	     done is because local-alloc.c treats 2 references as a
10749	     special case.  */
10750
10751	  if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
10752	      && reg_n_refs[REGNO (XEXP (note, 0))]== 2
10753	      && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10754	    reg_n_refs[REGNO (XEXP (note, 0))] = 3;
10755
10756	  if (place == 0)
10757	    {
10758	      for (tem = prev_nonnote_insn (i3);
10759		   place == 0 && tem
10760		   && (GET_CODE (tem) == INSN || GET_CODE (tem) == CALL_INSN);
10761		   tem = prev_nonnote_insn (tem))
10762		{
10763		  /* If the register is being set at TEM, see if that is all
10764		     TEM is doing.  If so, delete TEM.  Otherwise, make this
10765		     into a REG_UNUSED note instead.  */
10766		  if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
10767		    {
10768		      rtx set = single_set (tem);
10769
10770		      /* Verify that it was the set, and not a clobber that
10771			 modified the register.  */
10772
10773		      if (set != 0 && ! side_effects_p (SET_SRC (set))
10774			  && (rtx_equal_p (XEXP (note, 0), SET_DEST (set))
10775			      || (GET_CODE (SET_DEST (set)) == SUBREG
10776				  && rtx_equal_p (XEXP (note, 0),
10777						  XEXP (SET_DEST (set), 0)))))
10778			{
10779			  /* Move the notes and links of TEM elsewhere.
10780			     This might delete other dead insns recursively.
10781			     First set the pattern to something that won't use
10782			     any register.  */
10783
10784			  PATTERN (tem) = pc_rtx;
10785
10786			  distribute_notes (REG_NOTES (tem), tem, tem,
10787					    NULL_RTX, NULL_RTX, NULL_RTX);
10788			  distribute_links (LOG_LINKS (tem));
10789
10790			  PUT_CODE (tem, NOTE);
10791			  NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
10792			  NOTE_SOURCE_FILE (tem) = 0;
10793			}
10794		      else
10795			{
10796			  PUT_REG_NOTE_KIND (note, REG_UNUSED);
10797
10798			  /*  If there isn't already a REG_UNUSED note, put one
10799			      here.  */
10800			  if (! find_regno_note (tem, REG_UNUSED,
10801						 REGNO (XEXP (note, 0))))
10802			    place = tem;
10803			  break;
10804		      }
10805		  }
10806		else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
10807			 || (GET_CODE (tem) == CALL_INSN
10808			     && find_reg_fusage (tem, USE, XEXP (note, 0))))
10809		  {
10810		    place = tem;
10811
10812		    /* If we are doing a 3->2 combination, and we have a
10813		       register which formerly died in i3 and was not used
10814		       by i2, which now no longer dies in i3 and is used in
10815		       i2 but does not die in i2, and place is between i2
10816		       and i3, then we may need to move a link from place to
10817		       i2.  */
10818		    if (i2 && INSN_UID (place) <= max_uid_cuid
10819			&& INSN_CUID (place) > INSN_CUID (i2)
10820			&& from_insn && INSN_CUID (from_insn) > INSN_CUID (i2)
10821			&& reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10822		      {
10823			rtx links = LOG_LINKS (place);
10824			LOG_LINKS (place) = 0;
10825			distribute_links (links);
10826		      }
10827		    break;
10828		  }
10829		}
10830
10831	      /* If we haven't found an insn for the death note and it
10832		 is still a REG_DEAD note, but we have hit a CODE_LABEL,
10833		 insert a USE insn for the register at that label and
10834		 put the death node there.  This prevents problems with
10835		 call-state tracking in caller-save.c.  */
10836	      if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0)
10837		{
10838		  place
10839		    = emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (note, 0)),
10840				       tem);
10841
10842		  /* If this insn was emitted between blocks, then update
10843		     basic_block_head of the current block to include it.  */
10844		  if (basic_block_end[this_basic_block - 1] == tem)
10845		    basic_block_head[this_basic_block] = place;
10846		}
10847	    }
10848
10849	  /* If the register is set or already dead at PLACE, we needn't do
10850	     anything with this note if it is still a REG_DEAD note.
10851
10852	     Note that we cannot use just `dead_or_set_p' here since we can
10853	     convert an assignment to a register into a bit-field assignment.
10854	     Therefore, we must also omit the note if the register is the
10855	     target of a bitfield assignment.  */
10856
10857	  if (place && REG_NOTE_KIND (note) == REG_DEAD)
10858	    {
10859	      int regno = REGNO (XEXP (note, 0));
10860
10861	      if (dead_or_set_p (place, XEXP (note, 0))
10862		  || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
10863		{
10864		  /* Unless the register previously died in PLACE, clear
10865		     reg_last_death.  [I no longer understand why this is
10866		     being done.] */
10867		  if (reg_last_death[regno] != place)
10868		    reg_last_death[regno] = 0;
10869		  place = 0;
10870		}
10871	      else
10872		reg_last_death[regno] = place;
10873
10874	      /* If this is a death note for a hard reg that is occupying
10875		 multiple registers, ensure that we are still using all
10876		 parts of the object.  If we find a piece of the object
10877		 that is unused, we must add a USE for that piece before
10878		 PLACE and put the appropriate REG_DEAD note on it.
10879
10880		 An alternative would be to put a REG_UNUSED for the pieces
10881		 on the insn that set the register, but that can't be done if
10882		 it is not in the same block.  It is simpler, though less
10883		 efficient, to add the USE insns.  */
10884
10885	      if (place && regno < FIRST_PSEUDO_REGISTER
10886		  && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
10887		{
10888		  int endregno
10889		    = regno + HARD_REGNO_NREGS (regno,
10890						GET_MODE (XEXP (note, 0)));
10891		  int all_used = 1;
10892		  int i;
10893
10894		  for (i = regno; i < endregno; i++)
10895		    if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
10896			&& ! find_regno_fusage (place, USE, i))
10897		      {
10898			rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
10899			rtx p;
10900
10901			/* See if we already placed a USE note for this
10902			   register in front of PLACE.  */
10903			for (p = place;
10904			     GET_CODE (PREV_INSN (p)) == INSN
10905			     && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
10906			     p = PREV_INSN (p))
10907			  if (rtx_equal_p (piece,
10908					   XEXP (PATTERN (PREV_INSN (p)), 0)))
10909			    {
10910			      p = 0;
10911			      break;
10912			    }
10913
10914			if (p)
10915			  {
10916			    rtx use_insn
10917			      = emit_insn_before (gen_rtx (USE, VOIDmode,
10918							   piece),
10919						  p);
10920			    REG_NOTES (use_insn)
10921			      = gen_rtx (EXPR_LIST, REG_DEAD, piece,
10922					 REG_NOTES (use_insn));
10923			  }
10924
10925			all_used = 0;
10926		      }
10927
10928		  /* Check for the case where the register dying partially
10929		     overlaps the register set by this insn.  */
10930		  if (all_used)
10931		    for (i = regno; i < endregno; i++)
10932		      if (dead_or_set_regno_p (place, i))
10933			  {
10934			    all_used = 0;
10935			    break;
10936			  }
10937
10938		  if (! all_used)
10939		    {
10940		      /* Put only REG_DEAD notes for pieces that are
10941			 still used and that are not already dead or set.  */
10942
10943		      for (i = regno; i < endregno; i++)
10944			{
10945			  rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
10946
10947			  if ((reg_referenced_p (piece, PATTERN (place))
10948			       || (GET_CODE (place) == CALL_INSN
10949				   && find_reg_fusage (place, USE, piece)))
10950			      && ! dead_or_set_p (place, piece)
10951			      && ! reg_bitfield_target_p (piece,
10952							  PATTERN (place)))
10953			    REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
10954							 piece,
10955							 REG_NOTES (place));
10956			}
10957
10958		      place = 0;
10959		    }
10960		}
10961	    }
10962	  break;
10963
10964	default:
10965	  /* Any other notes should not be present at this point in the
10966	     compilation.  */
10967	  abort ();
10968	}
10969
10970      if (place)
10971	{
10972	  XEXP (note, 1) = REG_NOTES (place);
10973	  REG_NOTES (place) = note;
10974	}
10975      else if ((REG_NOTE_KIND (note) == REG_DEAD
10976		|| REG_NOTE_KIND (note) == REG_UNUSED)
10977	       && GET_CODE (XEXP (note, 0)) == REG)
10978	reg_n_deaths[REGNO (XEXP (note, 0))]--;
10979
10980      if (place2)
10981	{
10982	  if ((REG_NOTE_KIND (note) == REG_DEAD
10983	       || REG_NOTE_KIND (note) == REG_UNUSED)
10984	      && GET_CODE (XEXP (note, 0)) == REG)
10985	    reg_n_deaths[REGNO (XEXP (note, 0))]++;
10986
10987	  REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
10988					XEXP (note, 0), REG_NOTES (place2));
10989	}
10990    }
10991}
10992
10993/* Similarly to above, distribute the LOG_LINKS that used to be present on
10994   I3, I2, and I1 to new locations.  This is also called in one case to
10995   add a link pointing at I3 when I3's destination is changed.  */
10996
10997static void
10998distribute_links (links)
10999     rtx links;
11000{
11001  rtx link, next_link;
11002
11003  for (link = links; link; link = next_link)
11004    {
11005      rtx place = 0;
11006      rtx insn;
11007      rtx set, reg;
11008
11009      next_link = XEXP (link, 1);
11010
11011      /* If the insn that this link points to is a NOTE or isn't a single
11012	 set, ignore it.  In the latter case, it isn't clear what we
11013	 can do other than ignore the link, since we can't tell which
11014	 register it was for.  Such links wouldn't be used by combine
11015	 anyway.
11016
11017	 It is not possible for the destination of the target of the link to
11018	 have been changed by combine.  The only potential of this is if we
11019	 replace I3, I2, and I1 by I3 and I2.  But in that case the
11020	 destination of I2 also remains unchanged.  */
11021
11022      if (GET_CODE (XEXP (link, 0)) == NOTE
11023	  || (set = single_set (XEXP (link, 0))) == 0)
11024	continue;
11025
11026      reg = SET_DEST (set);
11027      while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
11028	     || GET_CODE (reg) == SIGN_EXTRACT
11029	     || GET_CODE (reg) == STRICT_LOW_PART)
11030	reg = XEXP (reg, 0);
11031
11032      /* A LOG_LINK is defined as being placed on the first insn that uses
11033	 a register and points to the insn that sets the register.  Start
11034	 searching at the next insn after the target of the link and stop
11035	 when we reach a set of the register or the end of the basic block.
11036
11037	 Note that this correctly handles the link that used to point from
11038	 I3 to I2.  Also note that not much searching is typically done here
11039	 since most links don't point very far away.  */
11040
11041      for (insn = NEXT_INSN (XEXP (link, 0));
11042	   (insn && (this_basic_block == n_basic_blocks - 1
11043		     || basic_block_head[this_basic_block + 1] != insn));
11044	   insn = NEXT_INSN (insn))
11045	if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
11046	    && reg_overlap_mentioned_p (reg, PATTERN (insn)))
11047	  {
11048	    if (reg_referenced_p (reg, PATTERN (insn)))
11049	      place = insn;
11050	    break;
11051	  }
11052	else if (GET_CODE (insn) == CALL_INSN
11053	      && find_reg_fusage (insn, USE, reg))
11054	  {
11055	    place = insn;
11056	    break;
11057	  }
11058
11059      /* If we found a place to put the link, place it there unless there
11060	 is already a link to the same insn as LINK at that point.  */
11061
11062      if (place)
11063	{
11064	  rtx link2;
11065
11066	  for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
11067	    if (XEXP (link2, 0) == XEXP (link, 0))
11068	      break;
11069
11070	  if (link2 == 0)
11071	    {
11072	      XEXP (link, 1) = LOG_LINKS (place);
11073	      LOG_LINKS (place) = link;
11074
11075	      /* Set added_links_insn to the earliest insn we added a
11076		 link to.  */
11077	      if (added_links_insn == 0
11078		  || INSN_CUID (added_links_insn) > INSN_CUID (place))
11079		added_links_insn = place;
11080	    }
11081	}
11082    }
11083}
11084
11085void
11086dump_combine_stats (file)
11087     FILE *file;
11088{
11089  fprintf
11090    (file,
11091     ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
11092     combine_attempts, combine_merges, combine_extras, combine_successes);
11093}
11094
11095void
11096dump_combine_total_stats (file)
11097     FILE *file;
11098{
11099  fprintf
11100    (file,
11101     "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
11102     total_attempts, total_merges, total_extras, total_successes);
11103}
11104