combine.c revision 132718
1284345Ssjg/* Optimize by combining instructions for GNU compiler.
2284345Ssjg   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3284345Ssjg   1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4284345Ssjg
5284345SsjgThis file is part of GCC.
6284345Ssjg
7284345SsjgGCC is free software; you can redistribute it and/or modify it under
8284345Ssjgthe terms of the GNU General Public License as published by the Free
9284345SsjgSoftware Foundation; either version 2, or (at your option) any later
10284345Ssjgversion.
11284345Ssjg
12284345SsjgGCC is distributed in the hope that it will be useful, but WITHOUT ANY
13284345SsjgWARRANTY; without even the implied warranty of MERCHANTABILITY or
14284345SsjgFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15284345Ssjgfor more details.
16284345Ssjg
17284345SsjgYou should have received a copy of the GNU General Public License
18284345Ssjgalong with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23   Portable Optimizer, but redone to work on our list-structured
24   representation for RTL instead of their string representation.
25
26   The LOG_LINKS of each insn identify the most recent assignment
27   to each REG used in the insn.  It is a list of previous insns,
28   each of which contains a SET for a REG that is used in this insn
29   and not used or set in between.  LOG_LINKs never cross basic blocks.
30   They were set up by the preceding pass (lifetime analysis).
31
32   We try to combine each pair of insns joined by a logical link.
33   We also try to combine triples of insns A, B and C when
34   C has a link back to B and B has a link back to A.
35
36   LOG_LINKS does not have links for use of the CC0.  They don't
37   need to, because the insn that sets the CC0 is always immediately
38   before the insn that tests it.  So we always regard a branch
39   insn as having a logical link to the preceding insn.  The same is true
40   for an insn explicitly using CC0.
41
42   We check (with use_crosses_set_p) to avoid combining in such a way
43   as to move a computation to a place where its value would be different.
44
45   Combination is done by mathematically substituting the previous
46   insn(s) values for the regs they set into the expressions in
47   the later insns that refer to these regs.  If the result is a valid insn
48   for our target machine, according to the machine description,
49   we install it, delete the earlier insns, and update the data flow
50   information (LOG_LINKS and REG_NOTES) for what we did.
51
52   There are a few exceptions where the dataflow information created by
53   flow.c aren't completely updated:
54
55   - reg_live_length is not updated
56   - a LOG_LINKS entry that refers to an insn with multiple SETs may be
57     removed because there is no way to know which register it was
58     linking
59
60   To simplify substitution, we combine only when the earlier insn(s)
61   consist of only a single assignment.  To simplify updating afterward,
62   we never combine when a subroutine call appears in the middle.
63
64   Since we do not represent assignments to CC0 explicitly except when that
65   is all an insn does, there is no LOG_LINKS entry in an insn that uses
66   the condition code for the insn that set the condition code.
67   Fortunately, these two insns must be consecutive.
68   Therefore, every JUMP_INSN is taken to have an implicit logical link
69   to the preceding insn.  This is not quite right, since non-jumps can
70   also use the condition code; but in practice such insns would not
71   combine anyway.  */
72
73#include "config.h"
74#include "system.h"
75#include "coretypes.h"
76#include "tm.h"
77#include "rtl.h"
78#include "tree.h"
79#include "tm_p.h"
80#include "flags.h"
81#include "regs.h"
82#include "hard-reg-set.h"
83#include "basic-block.h"
84#include "insn-config.h"
85#include "function.h"
86/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
87#include "expr.h"
88#include "insn-attr.h"
89#include "recog.h"
90#include "real.h"
91#include "toplev.h"
92#include "target.h"
93
94#ifndef SHIFT_COUNT_TRUNCATED
95#define SHIFT_COUNT_TRUNCATED 0
96#endif
97
98/* It is not safe to use ordinary gen_lowpart in combine.
99   Use gen_lowpart_for_combine instead.  See comments there.  */
100#define gen_lowpart dont_use_gen_lowpart_you_dummy
101
102/* Number of attempts to combine instructions in this function.  */
103
104static int combine_attempts;
105
106/* Number of attempts that got as far as substitution in this function.  */
107
108static int combine_merges;
109
110/* Number of instructions combined with added SETs in this function.  */
111
112static int combine_extras;
113
114/* Number of instructions combined in this function.  */
115
116static int combine_successes;
117
118/* Totals over entire compilation.  */
119
120static int total_attempts, total_merges, total_extras, total_successes;
121
122
123/* Vector mapping INSN_UIDs to cuids.
124   The cuids are like uids but increase monotonically always.
125   Combine always uses cuids so that it can compare them.
126   But actually renumbering the uids, which we used to do,
127   proves to be a bad idea because it makes it hard to compare
128   the dumps produced by earlier passes with those from later passes.  */
129
130static int *uid_cuid;
131static int max_uid_cuid;
132
133/* Get the cuid of an insn.  */
134
135#define INSN_CUID(INSN) \
136(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
137
138/* In case BITS_PER_WORD == HOST_BITS_PER_WIDE_INT, shifting by
139   BITS_PER_WORD would invoke undefined behavior.  Work around it.  */
140
141#define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \
142  (((unsigned HOST_WIDE_INT) (val) << (BITS_PER_WORD - 1)) << 1)
143
144#define nonzero_bits(X, M) \
145  cached_nonzero_bits (X, M, NULL_RTX, VOIDmode, 0)
146
147#define num_sign_bit_copies(X, M) \
148  cached_num_sign_bit_copies (X, M, NULL_RTX, VOIDmode, 0)
149
150/* Maximum register number, which is the size of the tables below.  */
151
152static unsigned int combine_max_regno;
153
154/* Record last point of death of (hard or pseudo) register n.  */
155
156static rtx *reg_last_death;
157
158/* Record last point of modification of (hard or pseudo) register n.  */
159
160static rtx *reg_last_set;
161
162/* Record the cuid of the last insn that invalidated memory
163   (anything that writes memory, and subroutine calls, but not pushes).  */
164
165static int mem_last_set;
166
167/* Record the cuid of the last CALL_INSN
168   so we can tell whether a potential combination crosses any calls.  */
169
170static int last_call_cuid;
171
172/* When `subst' is called, this is the insn that is being modified
173   (by combining in a previous insn).  The PATTERN of this insn
174   is still the old pattern partially modified and it should not be
175   looked at, but this may be used to examine the successors of the insn
176   to judge whether a simplification is valid.  */
177
178static rtx subst_insn;
179
180/* This is the lowest CUID that `subst' is currently dealing with.
181   get_last_value will not return a value if the register was set at or
182   after this CUID.  If not for this mechanism, we could get confused if
183   I2 or I1 in try_combine were an insn that used the old value of a register
184   to obtain a new value.  In that case, we might erroneously get the
185   new value of the register when we wanted the old one.  */
186
187static int subst_low_cuid;
188
189/* This contains any hard registers that are used in newpat; reg_dead_at_p
190   must consider all these registers to be always live.  */
191
192static HARD_REG_SET newpat_used_regs;
193
194/* This is an insn to which a LOG_LINKS entry has been added.  If this
195   insn is the earlier than I2 or I3, combine should rescan starting at
196   that location.  */
197
198static rtx added_links_insn;
199
200/* Basic block in which we are performing combines.  */
201static basic_block this_basic_block;
202
203/* A bitmap indicating which blocks had registers go dead at entry.
204   After combine, we'll need to re-do global life analysis with
205   those blocks as starting points.  */
206static sbitmap refresh_blocks;
207
208/* The next group of arrays allows the recording of the last value assigned
209   to (hard or pseudo) register n.  We use this information to see if an
210   operation being processed is redundant given a prior operation performed
211   on the register.  For example, an `and' with a constant is redundant if
212   all the zero bits are already known to be turned off.
213
214   We use an approach similar to that used by cse, but change it in the
215   following ways:
216
217   (1) We do not want to reinitialize at each label.
218   (2) It is useful, but not critical, to know the actual value assigned
219       to a register.  Often just its form is helpful.
220
221   Therefore, we maintain the following arrays:
222
223   reg_last_set_value		the last value assigned
224   reg_last_set_label		records the value of label_tick when the
225				register was assigned
226   reg_last_set_table_tick	records the value of label_tick when a
227				value using the register is assigned
228   reg_last_set_invalid		set to nonzero when it is not valid
229				to use the value of this register in some
230				register's value
231
232   To understand the usage of these tables, it is important to understand
233   the distinction between the value in reg_last_set_value being valid
234   and the register being validly contained in some other expression in the
235   table.
236
237   Entry I in reg_last_set_value is valid if it is nonzero, and either
238   reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
239
240   Register I may validly appear in any expression returned for the value
241   of another register if reg_n_sets[i] is 1.  It may also appear in the
242   value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
243   reg_last_set_invalid[j] is zero.
244
245   If an expression is found in the table containing a register which may
246   not validly appear in an expression, the register is replaced by
247   something that won't match, (clobber (const_int 0)).
248
249   reg_last_set_invalid[i] is set nonzero when register I is being assigned
250   to and reg_last_set_table_tick[i] == label_tick.  */
251
252/* Record last value assigned to (hard or pseudo) register n.  */
253
254static rtx *reg_last_set_value;
255
256/* Record the value of label_tick when the value for register n is placed in
257   reg_last_set_value[n].  */
258
259static int *reg_last_set_label;
260
261/* Record the value of label_tick when an expression involving register n
262   is placed in reg_last_set_value.  */
263
264static int *reg_last_set_table_tick;
265
266/* Set nonzero if references to register n in expressions should not be
267   used.  */
268
269static char *reg_last_set_invalid;
270
271/* Incremented for each label.  */
272
273static int label_tick;
274
275/* Some registers that are set more than once and used in more than one
276   basic block are nevertheless always set in similar ways.  For example,
277   a QImode register may be loaded from memory in two places on a machine
278   where byte loads zero extend.
279
280   We record in the following array what we know about the nonzero
281   bits of a register, specifically which bits are known to be zero.
282
283   If an entry is zero, it means that we don't know anything special.  */
284
285static unsigned HOST_WIDE_INT *reg_nonzero_bits;
286
287/* Mode used to compute significance in reg_nonzero_bits.  It is the largest
288   integer mode that can fit in HOST_BITS_PER_WIDE_INT.  */
289
290static enum machine_mode nonzero_bits_mode;
291
292/* Nonzero if we know that a register has some leading bits that are always
293   equal to the sign bit.  */
294
295static unsigned char *reg_sign_bit_copies;
296
297/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
298   It is zero while computing them and after combine has completed.  This
299   former test prevents propagating values based on previously set values,
300   which can be incorrect if a variable is modified in a loop.  */
301
302static int nonzero_sign_valid;
303
304/* These arrays are maintained in parallel with reg_last_set_value
305   and are used to store the mode in which the register was last set,
306   the bits that were known to be zero when it was last set, and the
307   number of sign bits copies it was known to have when it was last set.  */
308
309static enum machine_mode *reg_last_set_mode;
310static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
311static char *reg_last_set_sign_bit_copies;
312
313/* Record one modification to rtl structure
314   to be undone by storing old_contents into *where.
315   is_int is 1 if the contents are an int.  */
316
317struct undo
318{
319  struct undo *next;
320  int is_int;
321  union {rtx r; int i;} old_contents;
322  union {rtx *r; int *i;} where;
323};
324
325/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
326   num_undo says how many are currently recorded.
327
328   other_insn is nonzero if we have modified some other insn in the process
329   of working on subst_insn.  It must be verified too.  */
330
331struct undobuf
332{
333  struct undo *undos;
334  struct undo *frees;
335  rtx other_insn;
336};
337
338static struct undobuf undobuf;
339
340/* Number of times the pseudo being substituted for
341   was found and replaced.  */
342
343static int n_occurrences;
344
345static void do_SUBST (rtx *, rtx);
346static void do_SUBST_INT (int *, int);
347static void init_reg_last_arrays (void);
348static void setup_incoming_promotions (void);
349static void set_nonzero_bits_and_sign_copies (rtx, rtx, void *);
350static int cant_combine_insn_p (rtx);
351static int can_combine_p (rtx, rtx, rtx, rtx, rtx *, rtx *);
352static int combinable_i3pat (rtx, rtx *, rtx, rtx, int, rtx *);
353static int contains_muldiv (rtx);
354static rtx try_combine (rtx, rtx, rtx, int *);
355static void undo_all (void);
356static void undo_commit (void);
357static rtx *find_split_point (rtx *, rtx);
358static rtx subst (rtx, rtx, rtx, int, int);
359static rtx combine_simplify_rtx (rtx, enum machine_mode, int, int);
360static rtx simplify_if_then_else (rtx);
361static rtx simplify_set (rtx);
362static rtx simplify_logical (rtx, int);
363static rtx expand_compound_operation (rtx);
364static rtx expand_field_assignment (rtx);
365static rtx make_extraction (enum machine_mode, rtx, HOST_WIDE_INT,
366			    rtx, unsigned HOST_WIDE_INT, int, int, int);
367static rtx extract_left_shift (rtx, int);
368static rtx make_compound_operation (rtx, enum rtx_code);
369static int get_pos_from_mask (unsigned HOST_WIDE_INT,
370			      unsigned HOST_WIDE_INT *);
371static rtx force_to_mode (rtx, enum machine_mode,
372			  unsigned HOST_WIDE_INT, rtx, int);
373static rtx if_then_else_cond (rtx, rtx *, rtx *);
374static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
375static int rtx_equal_for_field_assignment_p (rtx, rtx);
376static rtx make_field_assignment (rtx);
377static rtx apply_distributive_law (rtx);
378static rtx simplify_and_const_int (rtx, enum machine_mode, rtx,
379				   unsigned HOST_WIDE_INT);
380static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
381						   rtx, enum machine_mode,
382						   unsigned HOST_WIDE_INT);
383static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
384					     enum machine_mode,
385					     unsigned HOST_WIDE_INT);
386static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
387						enum machine_mode,
388						unsigned int);
389static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
390					  enum machine_mode, unsigned int);
391static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
392			    HOST_WIDE_INT, enum machine_mode, int *);
393static rtx simplify_shift_const	(rtx, enum rtx_code, enum machine_mode, rtx,
394				 int);
395static int recog_for_combine (rtx *, rtx, rtx *);
396static rtx gen_lowpart_for_combine (enum machine_mode, rtx);
397static rtx gen_binary (enum rtx_code, enum machine_mode, rtx, rtx);
398static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
399static void update_table_tick (rtx);
400static void record_value_for_reg (rtx, rtx, rtx);
401static void check_promoted_subreg (rtx, rtx);
402static void record_dead_and_set_regs_1 (rtx, rtx, void *);
403static void record_dead_and_set_regs (rtx);
404static int get_last_value_validate (rtx *, rtx, int, int);
405static rtx get_last_value (rtx);
406static int use_crosses_set_p (rtx, int);
407static void reg_dead_at_p_1 (rtx, rtx, void *);
408static int reg_dead_at_p (rtx, rtx);
409static void move_deaths (rtx, rtx, int, rtx, rtx *);
410static int reg_bitfield_target_p (rtx, rtx);
411static void distribute_notes (rtx, rtx, rtx, rtx);
412static void distribute_links (rtx);
413static void mark_used_regs_combine (rtx);
414static int insn_cuid (rtx);
415static void record_promoted_value (rtx, rtx);
416static rtx reversed_comparison (rtx, enum machine_mode, rtx, rtx);
417static enum rtx_code combine_reversed_comparison_code (rtx);
418
419/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
420   insn.  The substitution can be undone by undo_all.  If INTO is already
421   set to NEWVAL, do not record this change.  Because computing NEWVAL might
422   also call SUBST, we have to compute it before we put anything into
423   the undo table.  */
424
425static void
426do_SUBST (rtx *into, rtx newval)
427{
428  struct undo *buf;
429  rtx oldval = *into;
430
431  if (oldval == newval)
432    return;
433
434  /* We'd like to catch as many invalid transformations here as
435     possible.  Unfortunately, there are way too many mode changes
436     that are perfectly valid, so we'd waste too much effort for
437     little gain doing the checks here.  Focus on catching invalid
438     transformations involving integer constants.  */
439  if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
440      && GET_CODE (newval) == CONST_INT)
441    {
442      /* Sanity check that we're replacing oldval with a CONST_INT
443	 that is a valid sign-extension for the original mode.  */
444      if (INTVAL (newval) != trunc_int_for_mode (INTVAL (newval),
445						 GET_MODE (oldval)))
446	abort ();
447
448      /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
449	 CONST_INT is not valid, because after the replacement, the
450	 original mode would be gone.  Unfortunately, we can't tell
451	 when do_SUBST is called to replace the operand thereof, so we
452	 perform this test on oldval instead, checking whether an
453	 invalid replacement took place before we got here.  */
454      if ((GET_CODE (oldval) == SUBREG
455	   && GET_CODE (SUBREG_REG (oldval)) == CONST_INT)
456	  || (GET_CODE (oldval) == ZERO_EXTEND
457	      && GET_CODE (XEXP (oldval, 0)) == CONST_INT))
458	abort ();
459    }
460
461  if (undobuf.frees)
462    buf = undobuf.frees, undobuf.frees = buf->next;
463  else
464    buf = xmalloc (sizeof (struct undo));
465
466  buf->is_int = 0;
467  buf->where.r = into;
468  buf->old_contents.r = oldval;
469  *into = newval;
470
471  buf->next = undobuf.undos, undobuf.undos = buf;
472}
473
474#define SUBST(INTO, NEWVAL)	do_SUBST(&(INTO), (NEWVAL))
475
476/* Similar to SUBST, but NEWVAL is an int expression.  Note that substitution
477   for the value of a HOST_WIDE_INT value (including CONST_INT) is
478   not safe.  */
479
480static void
481do_SUBST_INT (int *into, int newval)
482{
483  struct undo *buf;
484  int oldval = *into;
485
486  if (oldval == newval)
487    return;
488
489  if (undobuf.frees)
490    buf = undobuf.frees, undobuf.frees = buf->next;
491  else
492    buf = xmalloc (sizeof (struct undo));
493
494  buf->is_int = 1;
495  buf->where.i = into;
496  buf->old_contents.i = oldval;
497  *into = newval;
498
499  buf->next = undobuf.undos, undobuf.undos = buf;
500}
501
502#define SUBST_INT(INTO, NEWVAL)  do_SUBST_INT(&(INTO), (NEWVAL))
503
504/* Main entry point for combiner.  F is the first insn of the function.
505   NREGS is the first unused pseudo-reg number.
506
507   Return nonzero if the combiner has turned an indirect jump
508   instruction into a direct jump.  */
509int
510combine_instructions (rtx f, unsigned int nregs)
511{
512  rtx insn, next;
513#ifdef HAVE_cc0
514  rtx prev;
515#endif
516  int i;
517  rtx links, nextlinks;
518
519  int new_direct_jump_p = 0;
520
521  combine_attempts = 0;
522  combine_merges = 0;
523  combine_extras = 0;
524  combine_successes = 0;
525
526  combine_max_regno = nregs;
527
528  reg_nonzero_bits = xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT));
529  reg_sign_bit_copies = xcalloc (nregs, sizeof (unsigned char));
530
531  reg_last_death = xmalloc (nregs * sizeof (rtx));
532  reg_last_set = xmalloc (nregs * sizeof (rtx));
533  reg_last_set_value = xmalloc (nregs * sizeof (rtx));
534  reg_last_set_table_tick = xmalloc (nregs * sizeof (int));
535  reg_last_set_label = xmalloc (nregs * sizeof (int));
536  reg_last_set_invalid = xmalloc (nregs * sizeof (char));
537  reg_last_set_mode = xmalloc (nregs * sizeof (enum machine_mode));
538  reg_last_set_nonzero_bits = xmalloc (nregs * sizeof (HOST_WIDE_INT));
539  reg_last_set_sign_bit_copies = xmalloc (nregs * sizeof (char));
540
541  init_reg_last_arrays ();
542
543  init_recog_no_volatile ();
544
545  /* Compute maximum uid value so uid_cuid can be allocated.  */
546
547  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
548    if (INSN_UID (insn) > i)
549      i = INSN_UID (insn);
550
551  uid_cuid = xmalloc ((i + 1) * sizeof (int));
552  max_uid_cuid = i;
553
554  nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
555
556  /* Don't use reg_nonzero_bits when computing it.  This can cause problems
557     when, for example, we have j <<= 1 in a loop.  */
558
559  nonzero_sign_valid = 0;
560
561  /* Compute the mapping from uids to cuids.
562     Cuids are numbers assigned to insns, like uids,
563     except that cuids increase monotonically through the code.
564
565     Scan all SETs and see if we can deduce anything about what
566     bits are known to be zero for some registers and how many copies
567     of the sign bit are known to exist for those registers.
568
569     Also set any known values so that we can use it while searching
570     for what bits are known to be set.  */
571
572  label_tick = 1;
573
574  setup_incoming_promotions ();
575
576  refresh_blocks = sbitmap_alloc (last_basic_block);
577  sbitmap_zero (refresh_blocks);
578
579  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
580    {
581      uid_cuid[INSN_UID (insn)] = ++i;
582      subst_low_cuid = i;
583      subst_insn = insn;
584
585      if (INSN_P (insn))
586	{
587	  note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
588		       NULL);
589	  record_dead_and_set_regs (insn);
590
591#ifdef AUTO_INC_DEC
592	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
593	    if (REG_NOTE_KIND (links) == REG_INC)
594	      set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
595						NULL);
596#endif
597	}
598
599      if (GET_CODE (insn) == CODE_LABEL)
600	label_tick++;
601    }
602
603  nonzero_sign_valid = 1;
604
605  /* Now scan all the insns in forward order.  */
606
607  label_tick = 1;
608  last_call_cuid = 0;
609  mem_last_set = 0;
610  init_reg_last_arrays ();
611  setup_incoming_promotions ();
612
613  FOR_EACH_BB (this_basic_block)
614    {
615      for (insn = BB_HEAD (this_basic_block);
616           insn != NEXT_INSN (BB_END (this_basic_block));
617	   insn = next ? next : NEXT_INSN (insn))
618	{
619	  next = 0;
620
621	  if (GET_CODE (insn) == CODE_LABEL)
622	    label_tick++;
623
624	  else if (INSN_P (insn))
625	    {
626	      /* See if we know about function return values before this
627		 insn based upon SUBREG flags.  */
628	      check_promoted_subreg (insn, PATTERN (insn));
629
630	      /* Try this insn with each insn it links back to.  */
631
632	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
633		if ((next = try_combine (insn, XEXP (links, 0),
634					 NULL_RTX, &new_direct_jump_p)) != 0)
635		  goto retry;
636
637	      /* Try each sequence of three linked insns ending with this one.  */
638
639	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
640		{
641		  rtx link = XEXP (links, 0);
642
643		  /* If the linked insn has been replaced by a note, then there
644		     is no point in pursuing this chain any further.  */
645		  if (GET_CODE (link) == NOTE)
646		    continue;
647
648		  for (nextlinks = LOG_LINKS (link);
649		       nextlinks;
650		       nextlinks = XEXP (nextlinks, 1))
651		    if ((next = try_combine (insn, link,
652					     XEXP (nextlinks, 0),
653					     &new_direct_jump_p)) != 0)
654		      goto retry;
655		}
656
657#ifdef HAVE_cc0
658	      /* Try to combine a jump insn that uses CC0
659		 with a preceding insn that sets CC0, and maybe with its
660		 logical predecessor as well.
661		 This is how we make decrement-and-branch insns.
662		 We need this special code because data flow connections
663		 via CC0 do not get entered in LOG_LINKS.  */
664
665	      if (GET_CODE (insn) == JUMP_INSN
666		  && (prev = prev_nonnote_insn (insn)) != 0
667		  && GET_CODE (prev) == INSN
668		  && sets_cc0_p (PATTERN (prev)))
669		{
670		  if ((next = try_combine (insn, prev,
671					   NULL_RTX, &new_direct_jump_p)) != 0)
672		    goto retry;
673
674		  for (nextlinks = LOG_LINKS (prev); nextlinks;
675		       nextlinks = XEXP (nextlinks, 1))
676		    if ((next = try_combine (insn, prev,
677					     XEXP (nextlinks, 0),
678					     &new_direct_jump_p)) != 0)
679		      goto retry;
680		}
681
682	      /* Do the same for an insn that explicitly references CC0.  */
683	      if (GET_CODE (insn) == INSN
684		  && (prev = prev_nonnote_insn (insn)) != 0
685		  && GET_CODE (prev) == INSN
686		  && sets_cc0_p (PATTERN (prev))
687		  && GET_CODE (PATTERN (insn)) == SET
688		  && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
689		{
690		  if ((next = try_combine (insn, prev,
691					   NULL_RTX, &new_direct_jump_p)) != 0)
692		    goto retry;
693
694		  for (nextlinks = LOG_LINKS (prev); nextlinks;
695		       nextlinks = XEXP (nextlinks, 1))
696		    if ((next = try_combine (insn, prev,
697					     XEXP (nextlinks, 0),
698					     &new_direct_jump_p)) != 0)
699		      goto retry;
700		}
701
702	      /* Finally, see if any of the insns that this insn links to
703		 explicitly references CC0.  If so, try this insn, that insn,
704		 and its predecessor if it sets CC0.  */
705	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
706		if (GET_CODE (XEXP (links, 0)) == INSN
707		    && GET_CODE (PATTERN (XEXP (links, 0))) == SET
708		    && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
709		    && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
710		    && GET_CODE (prev) == INSN
711		    && sets_cc0_p (PATTERN (prev))
712		    && (next = try_combine (insn, XEXP (links, 0),
713					    prev, &new_direct_jump_p)) != 0)
714		  goto retry;
715#endif
716
717	      /* Try combining an insn with two different insns whose results it
718		 uses.  */
719	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
720		for (nextlinks = XEXP (links, 1); nextlinks;
721		     nextlinks = XEXP (nextlinks, 1))
722		  if ((next = try_combine (insn, XEXP (links, 0),
723					   XEXP (nextlinks, 0),
724					   &new_direct_jump_p)) != 0)
725		    goto retry;
726
727	      if (GET_CODE (insn) != NOTE)
728		record_dead_and_set_regs (insn);
729
730	    retry:
731	      ;
732	    }
733	}
734    }
735  clear_bb_flags ();
736
737  EXECUTE_IF_SET_IN_SBITMAP (refresh_blocks, 0, i,
738			     BASIC_BLOCK (i)->flags |= BB_DIRTY);
739  new_direct_jump_p |= purge_all_dead_edges (0);
740  delete_noop_moves (f);
741
742  update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
743				    PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
744				    | PROP_KILL_DEAD_CODE);
745
746  /* Clean up.  */
747  sbitmap_free (refresh_blocks);
748  free (reg_nonzero_bits);
749  free (reg_sign_bit_copies);
750  free (reg_last_death);
751  free (reg_last_set);
752  free (reg_last_set_value);
753  free (reg_last_set_table_tick);
754  free (reg_last_set_label);
755  free (reg_last_set_invalid);
756  free (reg_last_set_mode);
757  free (reg_last_set_nonzero_bits);
758  free (reg_last_set_sign_bit_copies);
759  free (uid_cuid);
760
761  {
762    struct undo *undo, *next;
763    for (undo = undobuf.frees; undo; undo = next)
764      {
765	next = undo->next;
766	free (undo);
767      }
768    undobuf.frees = 0;
769  }
770
771  total_attempts += combine_attempts;
772  total_merges += combine_merges;
773  total_extras += combine_extras;
774  total_successes += combine_successes;
775
776  nonzero_sign_valid = 0;
777
778  /* Make recognizer allow volatile MEMs again.  */
779  init_recog ();
780
781  return new_direct_jump_p;
782}
783
784/* Wipe the reg_last_xxx arrays in preparation for another pass.  */
785
786static void
787init_reg_last_arrays (void)
788{
789  unsigned int nregs = combine_max_regno;
790
791  memset (reg_last_death, 0, nregs * sizeof (rtx));
792  memset (reg_last_set, 0, nregs * sizeof (rtx));
793  memset (reg_last_set_value, 0, nregs * sizeof (rtx));
794  memset (reg_last_set_table_tick, 0, nregs * sizeof (int));
795  memset (reg_last_set_label, 0, nregs * sizeof (int));
796  memset (reg_last_set_invalid, 0, nregs * sizeof (char));
797  memset (reg_last_set_mode, 0, nregs * sizeof (enum machine_mode));
798  memset (reg_last_set_nonzero_bits, 0, nregs * sizeof (HOST_WIDE_INT));
799  memset (reg_last_set_sign_bit_copies, 0, nregs * sizeof (char));
800}
801
802/* Set up any promoted values for incoming argument registers.  */
803
804static void
805setup_incoming_promotions (void)
806{
807  unsigned int regno;
808  rtx reg;
809  enum machine_mode mode;
810  int unsignedp;
811  rtx first = get_insns ();
812
813  if (targetm.calls.promote_function_args (TREE_TYPE (cfun->decl)))
814    {
815#ifndef OUTGOING_REGNO
816#define OUTGOING_REGNO(N) N
817#endif
818      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
819	/* Check whether this register can hold an incoming pointer
820	   argument.  FUNCTION_ARG_REGNO_P tests outgoing register
821	   numbers, so translate if necessary due to register windows.  */
822	if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
823	    && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
824	  {
825	    record_value_for_reg
826	      (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
827					   : SIGN_EXTEND),
828					  GET_MODE (reg),
829					  gen_rtx_CLOBBER (mode, const0_rtx)));
830	  }
831    }
832}
833
834/* Called via note_stores.  If X is a pseudo that is narrower than
835   HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
836
837   If we are setting only a portion of X and we can't figure out what
838   portion, assume all bits will be used since we don't know what will
839   be happening.
840
841   Similarly, set how many bits of X are known to be copies of the sign bit
842   at all locations in the function.  This is the smallest number implied
843   by any set of X.  */
844
845static void
846set_nonzero_bits_and_sign_copies (rtx x, rtx set,
847				  void *data ATTRIBUTE_UNUSED)
848{
849  unsigned int num;
850
851  if (GET_CODE (x) == REG
852      && REGNO (x) >= FIRST_PSEUDO_REGISTER
853      /* If this register is undefined at the start of the file, we can't
854	 say what its contents were.  */
855      && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, REGNO (x))
856      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
857    {
858      if (set == 0 || GET_CODE (set) == CLOBBER)
859	{
860	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
861	  reg_sign_bit_copies[REGNO (x)] = 1;
862	  return;
863	}
864
865      /* If this is a complex assignment, see if we can convert it into a
866	 simple assignment.  */
867      set = expand_field_assignment (set);
868
869      /* If this is a simple assignment, or we have a paradoxical SUBREG,
870	 set what we know about X.  */
871
872      if (SET_DEST (set) == x
873	  || (GET_CODE (SET_DEST (set)) == SUBREG
874	      && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
875		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
876	      && SUBREG_REG (SET_DEST (set)) == x))
877	{
878	  rtx src = SET_SRC (set);
879
880#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
881	  /* If X is narrower than a word and SRC is a non-negative
882	     constant that would appear negative in the mode of X,
883	     sign-extend it for use in reg_nonzero_bits because some
884	     machines (maybe most) will actually do the sign-extension
885	     and this is the conservative approach.
886
887	     ??? For 2.5, try to tighten up the MD files in this regard
888	     instead of this kludge.  */
889
890	  if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
891	      && GET_CODE (src) == CONST_INT
892	      && INTVAL (src) > 0
893	      && 0 != (INTVAL (src)
894		       & ((HOST_WIDE_INT) 1
895			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
896	    src = GEN_INT (INTVAL (src)
897			   | ((HOST_WIDE_INT) (-1)
898			      << GET_MODE_BITSIZE (GET_MODE (x))));
899#endif
900
901	  /* Don't call nonzero_bits if it cannot change anything.  */
902	  if (reg_nonzero_bits[REGNO (x)] != ~(unsigned HOST_WIDE_INT) 0)
903	    reg_nonzero_bits[REGNO (x)]
904	      |= nonzero_bits (src, nonzero_bits_mode);
905	  num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
906	  if (reg_sign_bit_copies[REGNO (x)] == 0
907	      || reg_sign_bit_copies[REGNO (x)] > num)
908	    reg_sign_bit_copies[REGNO (x)] = num;
909	}
910      else
911	{
912	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
913	  reg_sign_bit_copies[REGNO (x)] = 1;
914	}
915    }
916}
917
918/* See if INSN can be combined into I3.  PRED and SUCC are optionally
919   insns that were previously combined into I3 or that will be combined
920   into the merger of INSN and I3.
921
922   Return 0 if the combination is not allowed for any reason.
923
924   If the combination is allowed, *PDEST will be set to the single
925   destination of INSN and *PSRC to the single source, and this function
926   will return 1.  */
927
928static int
929can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
930	       rtx *pdest, rtx *psrc)
931{
932  int i;
933  rtx set = 0, src, dest;
934  rtx p;
935#ifdef AUTO_INC_DEC
936  rtx link;
937#endif
938  int all_adjacent = (succ ? (next_active_insn (insn) == succ
939			      && next_active_insn (succ) == i3)
940		      : next_active_insn (insn) == i3);
941
942  /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
943     or a PARALLEL consisting of such a SET and CLOBBERs.
944
945     If INSN has CLOBBER parallel parts, ignore them for our processing.
946     By definition, these happen during the execution of the insn.  When it
947     is merged with another insn, all bets are off.  If they are, in fact,
948     needed and aren't also supplied in I3, they may be added by
949     recog_for_combine.  Otherwise, it won't match.
950
951     We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
952     note.
953
954     Get the source and destination of INSN.  If more than one, can't
955     combine.  */
956
957  if (GET_CODE (PATTERN (insn)) == SET)
958    set = PATTERN (insn);
959  else if (GET_CODE (PATTERN (insn)) == PARALLEL
960	   && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
961    {
962      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
963	{
964	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
965	  rtx note;
966
967	  switch (GET_CODE (elt))
968	    {
969	    /* This is important to combine floating point insns
970	       for the SH4 port.  */
971	    case USE:
972	      /* Combining an isolated USE doesn't make sense.
973		 We depend here on combinable_i3pat to reject them.  */
974	      /* The code below this loop only verifies that the inputs of
975		 the SET in INSN do not change.  We call reg_set_between_p
976		 to verify that the REG in the USE does not change between
977		 I3 and INSN.
978		 If the USE in INSN was for a pseudo register, the matching
979		 insn pattern will likely match any register; combining this
980		 with any other USE would only be safe if we knew that the
981		 used registers have identical values, or if there was
982		 something to tell them apart, e.g. different modes.  For
983		 now, we forgo such complicated tests and simply disallow
984		 combining of USES of pseudo registers with any other USE.  */
985	      if (GET_CODE (XEXP (elt, 0)) == REG
986		  && GET_CODE (PATTERN (i3)) == PARALLEL)
987		{
988		  rtx i3pat = PATTERN (i3);
989		  int i = XVECLEN (i3pat, 0) - 1;
990		  unsigned int regno = REGNO (XEXP (elt, 0));
991
992		  do
993		    {
994		      rtx i3elt = XVECEXP (i3pat, 0, i);
995
996		      if (GET_CODE (i3elt) == USE
997			  && GET_CODE (XEXP (i3elt, 0)) == REG
998			  && (REGNO (XEXP (i3elt, 0)) == regno
999			      ? reg_set_between_p (XEXP (elt, 0),
1000						   PREV_INSN (insn), i3)
1001			      : regno >= FIRST_PSEUDO_REGISTER))
1002			return 0;
1003		    }
1004		  while (--i >= 0);
1005		}
1006	      break;
1007
1008	      /* We can ignore CLOBBERs.  */
1009	    case CLOBBER:
1010	      break;
1011
1012	    case SET:
1013	      /* Ignore SETs whose result isn't used but not those that
1014		 have side-effects.  */
1015	      if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1016		  && (!(note = find_reg_note (insn, REG_EH_REGION, NULL_RTX))
1017		      || INTVAL (XEXP (note, 0)) <= 0)
1018		  && ! side_effects_p (elt))
1019		break;
1020
1021	      /* If we have already found a SET, this is a second one and
1022		 so we cannot combine with this insn.  */
1023	      if (set)
1024		return 0;
1025
1026	      set = elt;
1027	      break;
1028
1029	    default:
1030	      /* Anything else means we can't combine.  */
1031	      return 0;
1032	    }
1033	}
1034
1035      if (set == 0
1036	  /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1037	     so don't do anything with it.  */
1038	  || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1039	return 0;
1040    }
1041  else
1042    return 0;
1043
1044  if (set == 0)
1045    return 0;
1046
1047  set = expand_field_assignment (set);
1048  src = SET_SRC (set), dest = SET_DEST (set);
1049
1050  /* Don't eliminate a store in the stack pointer.  */
1051  if (dest == stack_pointer_rtx
1052      /* Don't combine with an insn that sets a register to itself if it has
1053	 a REG_EQUAL note.  This may be part of a REG_NO_CONFLICT sequence.  */
1054      || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1055      /* Can't merge an ASM_OPERANDS.  */
1056      || GET_CODE (src) == ASM_OPERANDS
1057      /* Can't merge a function call.  */
1058      || GET_CODE (src) == CALL
1059      /* Don't eliminate a function call argument.  */
1060      || (GET_CODE (i3) == CALL_INSN
1061	  && (find_reg_fusage (i3, USE, dest)
1062	      || (GET_CODE (dest) == REG
1063		  && REGNO (dest) < FIRST_PSEUDO_REGISTER
1064		  && global_regs[REGNO (dest)])))
1065      /* Don't substitute into an incremented register.  */
1066      || FIND_REG_INC_NOTE (i3, dest)
1067      || (succ && FIND_REG_INC_NOTE (succ, dest))
1068#if 0
1069      /* Don't combine the end of a libcall into anything.  */
1070      /* ??? This gives worse code, and appears to be unnecessary, since no
1071	 pass after flow uses REG_LIBCALL/REG_RETVAL notes.  Local-alloc does
1072	 use REG_RETVAL notes for noconflict blocks, but other code here
1073	 makes sure that those insns don't disappear.  */
1074      || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1075#endif
1076      /* Make sure that DEST is not used after SUCC but before I3.  */
1077      || (succ && ! all_adjacent
1078	  && reg_used_between_p (dest, succ, i3))
1079      /* Make sure that the value that is to be substituted for the register
1080	 does not use any registers whose values alter in between.  However,
1081	 If the insns are adjacent, a use can't cross a set even though we
1082	 think it might (this can happen for a sequence of insns each setting
1083	 the same destination; reg_last_set of that register might point to
1084	 a NOTE).  If INSN has a REG_EQUIV note, the register is always
1085	 equivalent to the memory so the substitution is valid even if there
1086	 are intervening stores.  Also, don't move a volatile asm or
1087	 UNSPEC_VOLATILE across any other insns.  */
1088      || (! all_adjacent
1089	  && (((GET_CODE (src) != MEM
1090		|| ! find_reg_note (insn, REG_EQUIV, src))
1091	       && use_crosses_set_p (src, INSN_CUID (insn)))
1092	      || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1093	      || GET_CODE (src) == UNSPEC_VOLATILE))
1094      /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1095	 better register allocation by not doing the combine.  */
1096      || find_reg_note (i3, REG_NO_CONFLICT, dest)
1097      || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1098      /* Don't combine across a CALL_INSN, because that would possibly
1099	 change whether the life span of some REGs crosses calls or not,
1100	 and it is a pain to update that information.
1101	 Exception: if source is a constant, moving it later can't hurt.
1102	 Accept that special case, because it helps -fforce-addr a lot.  */
1103      || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1104    return 0;
1105
1106  /* DEST must either be a REG or CC0.  */
1107  if (GET_CODE (dest) == REG)
1108    {
1109      /* If register alignment is being enforced for multi-word items in all
1110	 cases except for parameters, it is possible to have a register copy
1111	 insn referencing a hard register that is not allowed to contain the
1112	 mode being copied and which would not be valid as an operand of most
1113	 insns.  Eliminate this problem by not combining with such an insn.
1114
1115	 Also, on some machines we don't want to extend the life of a hard
1116	 register.  */
1117
1118      if (GET_CODE (src) == REG
1119	  && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1120	       && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1121	      /* Don't extend the life of a hard register unless it is
1122		 user variable (if we have few registers) or it can't
1123		 fit into the desired register (meaning something special
1124		 is going on).
1125		 Also avoid substituting a return register into I3, because
1126		 reload can't handle a conflict with constraints of other
1127		 inputs.  */
1128	      || (REGNO (src) < FIRST_PSEUDO_REGISTER
1129		  && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1130	return 0;
1131    }
1132  else if (GET_CODE (dest) != CC0)
1133    return 0;
1134
1135  /* Don't substitute for a register intended as a clobberable operand.
1136     Similarly, don't substitute an expression containing a register that
1137     will be clobbered in I3.  */
1138  if (GET_CODE (PATTERN (i3)) == PARALLEL)
1139    for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1140      if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
1141	  && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1142				       src)
1143	      || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
1144	return 0;
1145
1146  /* If INSN contains anything volatile, or is an `asm' (whether volatile
1147     or not), reject, unless nothing volatile comes between it and I3 */
1148
1149  if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1150    {
1151      /* Make sure succ doesn't contain a volatile reference.  */
1152      if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1153        return 0;
1154
1155      for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1156        if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1157	  return 0;
1158    }
1159
1160  /* If INSN is an asm, and DEST is a hard register, reject, since it has
1161     to be an explicit register variable, and was chosen for a reason.  */
1162
1163  if (GET_CODE (src) == ASM_OPERANDS
1164      && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1165    return 0;
1166
1167  /* If there are any volatile insns between INSN and I3, reject, because
1168     they might affect machine state.  */
1169
1170  for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1171    if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1172      return 0;
1173
1174  /* If INSN or I2 contains an autoincrement or autodecrement,
1175     make sure that register is not used between there and I3,
1176     and not already used in I3 either.
1177     Also insist that I3 not be a jump; if it were one
1178     and the incremented register were spilled, we would lose.  */
1179
1180#ifdef AUTO_INC_DEC
1181  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1182    if (REG_NOTE_KIND (link) == REG_INC
1183	&& (GET_CODE (i3) == JUMP_INSN
1184	    || reg_used_between_p (XEXP (link, 0), insn, i3)
1185	    || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1186      return 0;
1187#endif
1188
1189#ifdef HAVE_cc0
1190  /* Don't combine an insn that follows a CC0-setting insn.
1191     An insn that uses CC0 must not be separated from the one that sets it.
1192     We do, however, allow I2 to follow a CC0-setting insn if that insn
1193     is passed as I1; in that case it will be deleted also.
1194     We also allow combining in this case if all the insns are adjacent
1195     because that would leave the two CC0 insns adjacent as well.
1196     It would be more logical to test whether CC0 occurs inside I1 or I2,
1197     but that would be much slower, and this ought to be equivalent.  */
1198
1199  p = prev_nonnote_insn (insn);
1200  if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1201      && ! all_adjacent)
1202    return 0;
1203#endif
1204
1205  /* If we get here, we have passed all the tests and the combination is
1206     to be allowed.  */
1207
1208  *pdest = dest;
1209  *psrc = src;
1210
1211  return 1;
1212}
1213
1214/* LOC is the location within I3 that contains its pattern or the component
1215   of a PARALLEL of the pattern.  We validate that it is valid for combining.
1216
1217   One problem is if I3 modifies its output, as opposed to replacing it
1218   entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1219   so would produce an insn that is not equivalent to the original insns.
1220
1221   Consider:
1222
1223         (set (reg:DI 101) (reg:DI 100))
1224	 (set (subreg:SI (reg:DI 101) 0) <foo>)
1225
1226   This is NOT equivalent to:
1227
1228         (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1229		    (set (reg:DI 101) (reg:DI 100))])
1230
1231   Not only does this modify 100 (in which case it might still be valid
1232   if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1233
1234   We can also run into a problem if I2 sets a register that I1
1235   uses and I1 gets directly substituted into I3 (not via I2).  In that
1236   case, we would be getting the wrong value of I2DEST into I3, so we
1237   must reject the combination.  This case occurs when I2 and I1 both
1238   feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1239   If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
1240   of a SET must prevent combination from occurring.
1241
1242   Before doing the above check, we first try to expand a field assignment
1243   into a set of logical operations.
1244
1245   If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
1246   we place a register that is both set and used within I3.  If more than one
1247   such register is detected, we fail.
1248
1249   Return 1 if the combination is valid, zero otherwise.  */
1250
1251static int
1252combinable_i3pat (rtx i3, rtx *loc, rtx i2dest, rtx i1dest,
1253		  int i1_not_in_src, rtx *pi3dest_killed)
1254{
1255  rtx x = *loc;
1256
1257  if (GET_CODE (x) == SET)
1258    {
1259      rtx set = x ;
1260      rtx dest = SET_DEST (set);
1261      rtx src = SET_SRC (set);
1262      rtx inner_dest = dest;
1263
1264      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1265	     || GET_CODE (inner_dest) == SUBREG
1266	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
1267	inner_dest = XEXP (inner_dest, 0);
1268
1269      /* Check for the case where I3 modifies its output, as discussed
1270	 above.  We don't want to prevent pseudos from being combined
1271	 into the address of a MEM, so only prevent the combination if
1272	 i1 or i2 set the same MEM.  */
1273      if ((inner_dest != dest &&
1274	   (GET_CODE (inner_dest) != MEM
1275	    || rtx_equal_p (i2dest, inner_dest)
1276	    || (i1dest && rtx_equal_p (i1dest, inner_dest)))
1277	   && (reg_overlap_mentioned_p (i2dest, inner_dest)
1278	       || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1279
1280	  /* This is the same test done in can_combine_p except we can't test
1281	     all_adjacent; we don't have to, since this instruction will stay
1282	     in place, thus we are not considering increasing the lifetime of
1283	     INNER_DEST.
1284
1285	     Also, if this insn sets a function argument, combining it with
1286	     something that might need a spill could clobber a previous
1287	     function argument; the all_adjacent test in can_combine_p also
1288	     checks this; here, we do a more specific test for this case.  */
1289
1290	  || (GET_CODE (inner_dest) == REG
1291	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1292	      && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1293					GET_MODE (inner_dest))))
1294	  || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1295	return 0;
1296
1297      /* If DEST is used in I3, it is being killed in this insn,
1298	 so record that for later.
1299	 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1300	 STACK_POINTER_REGNUM, since these are always considered to be
1301	 live.  Similarly for ARG_POINTER_REGNUM if it is fixed.  */
1302      if (pi3dest_killed && GET_CODE (dest) == REG
1303	  && reg_referenced_p (dest, PATTERN (i3))
1304	  && REGNO (dest) != FRAME_POINTER_REGNUM
1305#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1306	  && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1307#endif
1308#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1309	  && (REGNO (dest) != ARG_POINTER_REGNUM
1310	      || ! fixed_regs [REGNO (dest)])
1311#endif
1312	  && REGNO (dest) != STACK_POINTER_REGNUM)
1313	{
1314	  if (*pi3dest_killed)
1315	    return 0;
1316
1317	  *pi3dest_killed = dest;
1318	}
1319    }
1320
1321  else if (GET_CODE (x) == PARALLEL)
1322    {
1323      int i;
1324
1325      for (i = 0; i < XVECLEN (x, 0); i++)
1326	if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1327				i1_not_in_src, pi3dest_killed))
1328	  return 0;
1329    }
1330
1331  return 1;
1332}
1333
1334/* Return 1 if X is an arithmetic expression that contains a multiplication
1335   and division.  We don't count multiplications by powers of two here.  */
1336
1337static int
1338contains_muldiv (rtx x)
1339{
1340  switch (GET_CODE (x))
1341    {
1342    case MOD:  case DIV:  case UMOD:  case UDIV:
1343      return 1;
1344
1345    case MULT:
1346      return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1347		&& exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1348    default:
1349      switch (GET_RTX_CLASS (GET_CODE (x)))
1350	{
1351	case 'c':  case '<':  case '2':
1352	  return contains_muldiv (XEXP (x, 0))
1353	    || contains_muldiv (XEXP (x, 1));
1354
1355	case '1':
1356	  return contains_muldiv (XEXP (x, 0));
1357
1358	default:
1359	  return 0;
1360	}
1361    }
1362}
1363
1364/* Determine whether INSN can be used in a combination.  Return nonzero if
1365   not.  This is used in try_combine to detect early some cases where we
1366   can't perform combinations.  */
1367
1368static int
1369cant_combine_insn_p (rtx insn)
1370{
1371  rtx set;
1372  rtx src, dest;
1373
1374  /* If this isn't really an insn, we can't do anything.
1375     This can occur when flow deletes an insn that it has merged into an
1376     auto-increment address.  */
1377  if (! INSN_P (insn))
1378    return 1;
1379
1380  /* Never combine loads and stores involving hard regs that are likely
1381     to be spilled.  The register allocator can usually handle such
1382     reg-reg moves by tying.  If we allow the combiner to make
1383     substitutions of likely-spilled regs, we may abort in reload.
1384     As an exception, we allow combinations involving fixed regs; these are
1385     not available to the register allocator so there's no risk involved.  */
1386
1387  set = single_set (insn);
1388  if (! set)
1389    return 0;
1390  src = SET_SRC (set);
1391  dest = SET_DEST (set);
1392  if (GET_CODE (src) == SUBREG)
1393    src = SUBREG_REG (src);
1394  if (GET_CODE (dest) == SUBREG)
1395    dest = SUBREG_REG (dest);
1396  if (REG_P (src) && REG_P (dest)
1397      && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1398	   && ! fixed_regs[REGNO (src)]
1399	   && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (src))))
1400	  || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1401	      && ! fixed_regs[REGNO (dest)]
1402	      && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
1403    return 1;
1404
1405  return 0;
1406}
1407
1408/* Adjust INSN after we made a change to its destination.
1409
1410   Changing the destination can invalidate notes that say something about
1411   the results of the insn and a LOG_LINK pointing to the insn.  */
1412
1413static void
1414adjust_for_new_dest (rtx insn)
1415{
1416  rtx *loc;
1417
1418  /* For notes, be conservative and simply remove them.  */
1419  loc = &REG_NOTES (insn);
1420  while (*loc)
1421    {
1422      enum reg_note kind = REG_NOTE_KIND (*loc);
1423      if (kind == REG_EQUAL || kind == REG_EQUIV)
1424	*loc = XEXP (*loc, 1);
1425      else
1426	loc = &XEXP (*loc, 1);
1427    }
1428
1429  /* The new insn will have a destination that was previously the destination
1430     of an insn just above it.  Call distribute_links to make a LOG_LINK from
1431     the next use of that destination.  */
1432  distribute_links (gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX));
1433}
1434
1435/* Try to combine the insns I1 and I2 into I3.
1436   Here I1 and I2 appear earlier than I3.
1437   I1 can be zero; then we combine just I2 into I3.
1438
1439   If we are combining three insns and the resulting insn is not recognized,
1440   try splitting it into two insns.  If that happens, I2 and I3 are retained
1441   and I1 is pseudo-deleted by turning it into a NOTE.  Otherwise, I1 and I2
1442   are pseudo-deleted.
1443
1444   Return 0 if the combination does not work.  Then nothing is changed.
1445   If we did the combination, return the insn at which combine should
1446   resume scanning.
1447
1448   Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
1449   new direct jump instruction.  */
1450
1451static rtx
1452try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
1453{
1454  /* New patterns for I3 and I2, respectively.  */
1455  rtx newpat, newi2pat = 0;
1456  int substed_i2 = 0, substed_i1 = 0;
1457  /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead.  */
1458  int added_sets_1, added_sets_2;
1459  /* Total number of SETs to put into I3.  */
1460  int total_sets;
1461  /* Nonzero is I2's body now appears in I3.  */
1462  int i2_is_used;
1463  /* INSN_CODEs for new I3, new I2, and user of condition code.  */
1464  int insn_code_number, i2_code_number = 0, other_code_number = 0;
1465  /* Contains I3 if the destination of I3 is used in its source, which means
1466     that the old life of I3 is being killed.  If that usage is placed into
1467     I2 and not in I3, a REG_DEAD note must be made.  */
1468  rtx i3dest_killed = 0;
1469  /* SET_DEST and SET_SRC of I2 and I1.  */
1470  rtx i2dest, i2src, i1dest = 0, i1src = 0;
1471  /* PATTERN (I2), or a copy of it in certain cases.  */
1472  rtx i2pat;
1473  /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC.  */
1474  int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1475  int i1_feeds_i3 = 0;
1476  /* Notes that must be added to REG_NOTES in I3 and I2.  */
1477  rtx new_i3_notes, new_i2_notes;
1478  /* Notes that we substituted I3 into I2 instead of the normal case.  */
1479  int i3_subst_into_i2 = 0;
1480  /* Notes that I1, I2 or I3 is a MULT operation.  */
1481  int have_mult = 0;
1482
1483  int maxreg;
1484  rtx temp;
1485  rtx link;
1486  int i;
1487
1488  /* Exit early if one of the insns involved can't be used for
1489     combinations.  */
1490  if (cant_combine_insn_p (i3)
1491      || cant_combine_insn_p (i2)
1492      || (i1 && cant_combine_insn_p (i1))
1493      /* We also can't do anything if I3 has a
1494	 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1495	 libcall.  */
1496#if 0
1497      /* ??? This gives worse code, and appears to be unnecessary, since no
1498	 pass after flow uses REG_LIBCALL/REG_RETVAL notes.  */
1499      || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1500#endif
1501      )
1502    return 0;
1503
1504  combine_attempts++;
1505  undobuf.other_insn = 0;
1506
1507  /* Reset the hard register usage information.  */
1508  CLEAR_HARD_REG_SET (newpat_used_regs);
1509
1510  /* If I1 and I2 both feed I3, they can be in any order.  To simplify the
1511     code below, set I1 to be the earlier of the two insns.  */
1512  if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1513    temp = i1, i1 = i2, i2 = temp;
1514
1515  added_links_insn = 0;
1516
1517  /* First check for one important special-case that the code below will
1518     not handle.  Namely, the case where I1 is zero, I2 is a PARALLEL
1519     and I3 is a SET whose SET_SRC is a SET_DEST in I2.  In that case,
1520     we may be able to replace that destination with the destination of I3.
1521     This occurs in the common code where we compute both a quotient and
1522     remainder into a structure, in which case we want to do the computation
1523     directly into the structure to avoid register-register copies.
1524
1525     Note that this case handles both multiple sets in I2 and also
1526     cases where I2 has a number of CLOBBER or PARALLELs.
1527
1528     We make very conservative checks below and only try to handle the
1529     most common cases of this.  For example, we only handle the case
1530     where I2 and I3 are adjacent to avoid making difficult register
1531     usage tests.  */
1532
1533  if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1534      && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1535      && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1536      && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1537      && GET_CODE (PATTERN (i2)) == PARALLEL
1538      && ! side_effects_p (SET_DEST (PATTERN (i3)))
1539      /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1540	 below would need to check what is inside (and reg_overlap_mentioned_p
1541	 doesn't support those codes anyway).  Don't allow those destinations;
1542	 the resulting insn isn't likely to be recognized anyway.  */
1543      && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1544      && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1545      && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1546				    SET_DEST (PATTERN (i3)))
1547      && next_real_insn (i2) == i3)
1548    {
1549      rtx p2 = PATTERN (i2);
1550
1551      /* Make sure that the destination of I3,
1552	 which we are going to substitute into one output of I2,
1553	 is not used within another output of I2.  We must avoid making this:
1554	 (parallel [(set (mem (reg 69)) ...)
1555		    (set (reg 69) ...)])
1556	 which is not well-defined as to order of actions.
1557	 (Besides, reload can't handle output reloads for this.)
1558
1559	 The problem can also happen if the dest of I3 is a memory ref,
1560	 if another dest in I2 is an indirect memory ref.  */
1561      for (i = 0; i < XVECLEN (p2, 0); i++)
1562	if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1563	     || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1564	    && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1565					SET_DEST (XVECEXP (p2, 0, i))))
1566	  break;
1567
1568      if (i == XVECLEN (p2, 0))
1569	for (i = 0; i < XVECLEN (p2, 0); i++)
1570	  if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1571	       || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1572	      && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1573	    {
1574	      combine_merges++;
1575
1576	      subst_insn = i3;
1577	      subst_low_cuid = INSN_CUID (i2);
1578
1579	      added_sets_2 = added_sets_1 = 0;
1580	      i2dest = SET_SRC (PATTERN (i3));
1581
1582	      /* Replace the dest in I2 with our dest and make the resulting
1583		 insn the new pattern for I3.  Then skip to where we
1584		 validate the pattern.  Everything was set up above.  */
1585	      SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1586		     SET_DEST (PATTERN (i3)));
1587
1588	      newpat = p2;
1589	      i3_subst_into_i2 = 1;
1590	      goto validate_replacement;
1591	    }
1592    }
1593
1594  /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1595     one of those words to another constant, merge them by making a new
1596     constant.  */
1597  if (i1 == 0
1598      && (temp = single_set (i2)) != 0
1599      && (GET_CODE (SET_SRC (temp)) == CONST_INT
1600	  || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1601      && GET_CODE (SET_DEST (temp)) == REG
1602      && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1603      && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1604      && GET_CODE (PATTERN (i3)) == SET
1605      && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1606      && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1607      && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1608      && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1609      && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1610    {
1611      HOST_WIDE_INT lo, hi;
1612
1613      if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1614	lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1615      else
1616	{
1617	  lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1618	  hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1619	}
1620
1621      if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
1622	{
1623	  /* We don't handle the case of the target word being wider
1624	     than a host wide int.  */
1625	  if (HOST_BITS_PER_WIDE_INT < BITS_PER_WORD)
1626	    abort ();
1627
1628	  lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
1629	  lo |= (INTVAL (SET_SRC (PATTERN (i3)))
1630		 & (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1631	}
1632      else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1633	hi = INTVAL (SET_SRC (PATTERN (i3)));
1634      else if (HOST_BITS_PER_WIDE_INT >= 2 * BITS_PER_WORD)
1635	{
1636	  int sign = -(int) ((unsigned HOST_WIDE_INT) lo
1637			     >> (HOST_BITS_PER_WIDE_INT - 1));
1638
1639	  lo &= ~ (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1640		   (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1641	  lo |= (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1642		 (INTVAL (SET_SRC (PATTERN (i3)))));
1643	  if (hi == sign)
1644	    hi = lo < 0 ? -1 : 0;
1645	}
1646      else
1647	/* We don't handle the case of the higher word not fitting
1648	   entirely in either hi or lo.  */
1649	abort ();
1650
1651      combine_merges++;
1652      subst_insn = i3;
1653      subst_low_cuid = INSN_CUID (i2);
1654      added_sets_2 = added_sets_1 = 0;
1655      i2dest = SET_DEST (temp);
1656
1657      SUBST (SET_SRC (temp),
1658	     immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1659
1660      newpat = PATTERN (i2);
1661      goto validate_replacement;
1662    }
1663
1664#ifndef HAVE_cc0
1665  /* If we have no I1 and I2 looks like:
1666	(parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1667		   (set Y OP)])
1668     make up a dummy I1 that is
1669	(set Y OP)
1670     and change I2 to be
1671        (set (reg:CC X) (compare:CC Y (const_int 0)))
1672
1673     (We can ignore any trailing CLOBBERs.)
1674
1675     This undoes a previous combination and allows us to match a branch-and-
1676     decrement insn.  */
1677
1678  if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1679      && XVECLEN (PATTERN (i2), 0) >= 2
1680      && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1681      && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1682	  == MODE_CC)
1683      && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1684      && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1685      && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1686      && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1687      && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1688		      SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1689    {
1690      for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1691	if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1692	  break;
1693
1694      if (i == 1)
1695	{
1696	  /* We make I1 with the same INSN_UID as I2.  This gives it
1697	     the same INSN_CUID for value tracking.  Our fake I1 will
1698	     never appear in the insn stream so giving it the same INSN_UID
1699	     as I2 will not cause a problem.  */
1700
1701	  i1 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1702			     BLOCK_FOR_INSN (i2), INSN_LOCATOR (i2),
1703			     XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1704			     NULL_RTX);
1705
1706	  SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1707	  SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1708		 SET_DEST (PATTERN (i1)));
1709	}
1710    }
1711#endif
1712
1713  /* Verify that I2 and I1 are valid for combining.  */
1714  if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1715      || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1716    {
1717      undo_all ();
1718      return 0;
1719    }
1720
1721  /* Record whether I2DEST is used in I2SRC and similarly for the other
1722     cases.  Knowing this will help in register status updating below.  */
1723  i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1724  i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1725  i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1726
1727  /* See if I1 directly feeds into I3.  It does if I1DEST is not used
1728     in I2SRC.  */
1729  i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1730
1731  /* Ensure that I3's pattern can be the destination of combines.  */
1732  if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1733			  i1 && i2dest_in_i1src && i1_feeds_i3,
1734			  &i3dest_killed))
1735    {
1736      undo_all ();
1737      return 0;
1738    }
1739
1740  /* See if any of the insns is a MULT operation.  Unless one is, we will
1741     reject a combination that is, since it must be slower.  Be conservative
1742     here.  */
1743  if (GET_CODE (i2src) == MULT
1744      || (i1 != 0 && GET_CODE (i1src) == MULT)
1745      || (GET_CODE (PATTERN (i3)) == SET
1746	  && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1747    have_mult = 1;
1748
1749  /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1750     We used to do this EXCEPT in one case: I3 has a post-inc in an
1751     output operand.  However, that exception can give rise to insns like
1752	mov r3,(r3)+
1753     which is a famous insn on the PDP-11 where the value of r3 used as the
1754     source was model-dependent.  Avoid this sort of thing.  */
1755
1756#if 0
1757  if (!(GET_CODE (PATTERN (i3)) == SET
1758	&& GET_CODE (SET_SRC (PATTERN (i3))) == REG
1759	&& GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1760	&& (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1761	    || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1762    /* It's not the exception.  */
1763#endif
1764#ifdef AUTO_INC_DEC
1765    for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1766      if (REG_NOTE_KIND (link) == REG_INC
1767	  && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1768	      || (i1 != 0
1769		  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1770	{
1771	  undo_all ();
1772	  return 0;
1773	}
1774#endif
1775
1776  /* See if the SETs in I1 or I2 need to be kept around in the merged
1777     instruction: whenever the value set there is still needed past I3.
1778     For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1779
1780     For the SET in I1, we have two cases:  If I1 and I2 independently
1781     feed into I3, the set in I1 needs to be kept around if I1DEST dies
1782     or is set in I3.  Otherwise (if I1 feeds I2 which feeds I3), the set
1783     in I1 needs to be kept around unless I1DEST dies or is set in either
1784     I2 or I3.  We can distinguish these cases by seeing if I2SRC mentions
1785     I1DEST.  If so, we know I1 feeds into I2.  */
1786
1787  added_sets_2 = ! dead_or_set_p (i3, i2dest);
1788
1789  added_sets_1
1790    = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1791	       : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1792
1793  /* If the set in I2 needs to be kept around, we must make a copy of
1794     PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1795     PATTERN (I2), we are only substituting for the original I1DEST, not into
1796     an already-substituted copy.  This also prevents making self-referential
1797     rtx.  If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1798     I2DEST.  */
1799
1800  i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1801	   ? gen_rtx_SET (VOIDmode, i2dest, i2src)
1802	   : PATTERN (i2));
1803
1804  if (added_sets_2)
1805    i2pat = copy_rtx (i2pat);
1806
1807  combine_merges++;
1808
1809  /* Substitute in the latest insn for the regs set by the earlier ones.  */
1810
1811  maxreg = max_reg_num ();
1812
1813  subst_insn = i3;
1814
1815  /* It is possible that the source of I2 or I1 may be performing an
1816     unneeded operation, such as a ZERO_EXTEND of something that is known
1817     to have the high part zero.  Handle that case by letting subst look at
1818     the innermost one of them.
1819
1820     Another way to do this would be to have a function that tries to
1821     simplify a single insn instead of merging two or more insns.  We don't
1822     do this because of the potential of infinite loops and because
1823     of the potential extra memory required.  However, doing it the way
1824     we are is a bit of a kludge and doesn't catch all cases.
1825
1826     But only do this if -fexpensive-optimizations since it slows things down
1827     and doesn't usually win.  */
1828
1829  if (flag_expensive_optimizations)
1830    {
1831      /* Pass pc_rtx so no substitutions are done, just simplifications.
1832	 The cases that we are interested in here do not involve the few
1833	 cases were is_replaced is checked.  */
1834      if (i1)
1835	{
1836	  subst_low_cuid = INSN_CUID (i1);
1837	  i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1838	}
1839      else
1840	{
1841	  subst_low_cuid = INSN_CUID (i2);
1842	  i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1843	}
1844    }
1845
1846#ifndef HAVE_cc0
1847  /* Many machines that don't use CC0 have insns that can both perform an
1848     arithmetic operation and set the condition code.  These operations will
1849     be represented as a PARALLEL with the first element of the vector
1850     being a COMPARE of an arithmetic operation with the constant zero.
1851     The second element of the vector will set some pseudo to the result
1852     of the same arithmetic operation.  If we simplify the COMPARE, we won't
1853     match such a pattern and so will generate an extra insn.   Here we test
1854     for this case, where both the comparison and the operation result are
1855     needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1856     I2SRC.  Later we will make the PARALLEL that contains I2.  */
1857
1858  if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1859      && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1860      && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1861      && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1862    {
1863#ifdef SELECT_CC_MODE
1864      rtx *cc_use;
1865      enum machine_mode compare_mode;
1866#endif
1867
1868      newpat = PATTERN (i3);
1869      SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1870
1871      i2_is_used = 1;
1872
1873#ifdef SELECT_CC_MODE
1874      /* See if a COMPARE with the operand we substituted in should be done
1875	 with the mode that is currently being used.  If not, do the same
1876	 processing we do in `subst' for a SET; namely, if the destination
1877	 is used only once, try to replace it with a register of the proper
1878	 mode and also replace the COMPARE.  */
1879      if (undobuf.other_insn == 0
1880	  && (cc_use = find_single_use (SET_DEST (newpat), i3,
1881					&undobuf.other_insn))
1882	  && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1883					      i2src, const0_rtx))
1884	      != GET_MODE (SET_DEST (newpat))))
1885	{
1886	  unsigned int regno = REGNO (SET_DEST (newpat));
1887	  rtx new_dest = gen_rtx_REG (compare_mode, regno);
1888
1889	  if (regno < FIRST_PSEUDO_REGISTER
1890	      || (REG_N_SETS (regno) == 1 && ! added_sets_2
1891		  && ! REG_USERVAR_P (SET_DEST (newpat))))
1892	    {
1893	      if (regno >= FIRST_PSEUDO_REGISTER)
1894		SUBST (regno_reg_rtx[regno], new_dest);
1895
1896	      SUBST (SET_DEST (newpat), new_dest);
1897	      SUBST (XEXP (*cc_use, 0), new_dest);
1898	      SUBST (SET_SRC (newpat),
1899		     gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
1900	    }
1901	  else
1902	    undobuf.other_insn = 0;
1903	}
1904#endif
1905    }
1906  else
1907#endif
1908    {
1909      n_occurrences = 0;		/* `subst' counts here */
1910
1911      /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1912	 need to make a unique copy of I2SRC each time we substitute it
1913	 to avoid self-referential rtl.  */
1914
1915      subst_low_cuid = INSN_CUID (i2);
1916      newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1917		      ! i1_feeds_i3 && i1dest_in_i1src);
1918      substed_i2 = 1;
1919
1920      /* Record whether i2's body now appears within i3's body.  */
1921      i2_is_used = n_occurrences;
1922    }
1923
1924  /* If we already got a failure, don't try to do more.  Otherwise,
1925     try to substitute in I1 if we have it.  */
1926
1927  if (i1 && GET_CODE (newpat) != CLOBBER)
1928    {
1929      /* Before we can do this substitution, we must redo the test done
1930	 above (see detailed comments there) that ensures  that I1DEST
1931	 isn't mentioned in any SETs in NEWPAT that are field assignments.  */
1932
1933      if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1934			      0, (rtx*) 0))
1935	{
1936	  undo_all ();
1937	  return 0;
1938	}
1939
1940      n_occurrences = 0;
1941      subst_low_cuid = INSN_CUID (i1);
1942      newpat = subst (newpat, i1dest, i1src, 0, 0);
1943      substed_i1 = 1;
1944    }
1945
1946  /* Fail if an autoincrement side-effect has been duplicated.  Be careful
1947     to count all the ways that I2SRC and I1SRC can be used.  */
1948  if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1949       && i2_is_used + added_sets_2 > 1)
1950      || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1951	  && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1952	      > 1))
1953      /* Fail if we tried to make a new register (we used to abort, but there's
1954	 really no reason to).  */
1955      || max_reg_num () != maxreg
1956      /* Fail if we couldn't do something and have a CLOBBER.  */
1957      || GET_CODE (newpat) == CLOBBER
1958      /* Fail if this new pattern is a MULT and we didn't have one before
1959	 at the outer level.  */
1960      || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1961	  && ! have_mult))
1962    {
1963      undo_all ();
1964      return 0;
1965    }
1966
1967  /* If the actions of the earlier insns must be kept
1968     in addition to substituting them into the latest one,
1969     we must make a new PARALLEL for the latest insn
1970     to hold additional the SETs.  */
1971
1972  if (added_sets_1 || added_sets_2)
1973    {
1974      combine_extras++;
1975
1976      if (GET_CODE (newpat) == PARALLEL)
1977	{
1978	  rtvec old = XVEC (newpat, 0);
1979	  total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1980	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
1981	  memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
1982		  sizeof (old->elem[0]) * old->num_elem);
1983	}
1984      else
1985	{
1986	  rtx old = newpat;
1987	  total_sets = 1 + added_sets_1 + added_sets_2;
1988	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
1989	  XVECEXP (newpat, 0, 0) = old;
1990	}
1991
1992      if (added_sets_1)
1993	XVECEXP (newpat, 0, --total_sets)
1994	  = (GET_CODE (PATTERN (i1)) == PARALLEL
1995	     ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
1996
1997      if (added_sets_2)
1998	{
1999	  /* If there is no I1, use I2's body as is.  We used to also not do
2000	     the subst call below if I2 was substituted into I3,
2001	     but that could lose a simplification.  */
2002	  if (i1 == 0)
2003	    XVECEXP (newpat, 0, --total_sets) = i2pat;
2004	  else
2005	    /* See comment where i2pat is assigned.  */
2006	    XVECEXP (newpat, 0, --total_sets)
2007	      = subst (i2pat, i1dest, i1src, 0, 0);
2008	}
2009    }
2010
2011  /* We come here when we are replacing a destination in I2 with the
2012     destination of I3.  */
2013 validate_replacement:
2014
2015  /* Note which hard regs this insn has as inputs.  */
2016  mark_used_regs_combine (newpat);
2017
2018  /* Is the result of combination a valid instruction?  */
2019  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2020
2021  /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2022     the second SET's destination is a register that is unused and isn't
2023     marked as an instruction that might trap in an EH region.  In that case,
2024     we just need the first SET.   This can occur when simplifying a divmod
2025     insn.  We *must* test for this case here because the code below that
2026     splits two independent SETs doesn't handle this case correctly when it
2027     updates the register status.  Also check the case where the first
2028     SET's destination is unused.  That would not cause incorrect code, but
2029     does cause an unneeded insn to remain.  */
2030
2031  if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2032      && XVECLEN (newpat, 0) == 2
2033      && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2034      && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2035      && asm_noperands (newpat) < 0)
2036    {
2037      rtx set0 = XVECEXP (newpat, 0, 0);
2038      rtx set1 = XVECEXP (newpat, 0, 1);
2039      rtx note;
2040
2041      if (((GET_CODE (SET_DEST (set1)) == REG
2042	    && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
2043	   || (GET_CODE (SET_DEST (set1)) == SUBREG
2044	       && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
2045	  && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX))
2046	      || INTVAL (XEXP (note, 0)) <= 0)
2047	  && ! side_effects_p (SET_SRC (set1)))
2048	{
2049	  newpat = set0;
2050	  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2051	}
2052
2053      else if (((GET_CODE (SET_DEST (set0)) == REG
2054		 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
2055		|| (GET_CODE (SET_DEST (set0)) == SUBREG
2056		    && find_reg_note (i3, REG_UNUSED,
2057				      SUBREG_REG (SET_DEST (set0)))))
2058	       && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX))
2059		   || INTVAL (XEXP (note, 0)) <= 0)
2060	       && ! side_effects_p (SET_SRC (set0)))
2061	{
2062	  newpat = set1;
2063	  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2064
2065	  if (insn_code_number >= 0)
2066	    {
2067	      /* If we will be able to accept this, we have made a
2068		 change to the destination of I3.  This requires us to
2069		 do a few adjustments.  */
2070
2071	      PATTERN (i3) = newpat;
2072	      adjust_for_new_dest (i3);
2073	    }
2074	}
2075    }
2076
2077  /* If we were combining three insns and the result is a simple SET
2078     with no ASM_OPERANDS that wasn't recognized, try to split it into two
2079     insns.  There are two ways to do this.  It can be split using a
2080     machine-specific method (like when you have an addition of a large
2081     constant) or by combine in the function find_split_point.  */
2082
2083  if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2084      && asm_noperands (newpat) < 0)
2085    {
2086      rtx m_split, *split;
2087      rtx ni2dest = i2dest;
2088
2089      /* See if the MD file can split NEWPAT.  If it can't, see if letting it
2090	 use I2DEST as a scratch register will help.  In the latter case,
2091	 convert I2DEST to the mode of the source of NEWPAT if we can.  */
2092
2093      m_split = split_insns (newpat, i3);
2094
2095      /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2096	 inputs of NEWPAT.  */
2097
2098      /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2099	 possible to try that as a scratch reg.  This would require adding
2100	 more code to make it work though.  */
2101
2102      if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
2103	{
2104	  /* If I2DEST is a hard register or the only use of a pseudo,
2105	     we can change its mode.  */
2106	  if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
2107	      && GET_MODE (SET_DEST (newpat)) != VOIDmode
2108	      && GET_CODE (i2dest) == REG
2109	      && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2110		  || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2111		      && ! REG_USERVAR_P (i2dest))))
2112	    ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
2113				   REGNO (i2dest));
2114
2115	  m_split = split_insns (gen_rtx_PARALLEL
2116				 (VOIDmode,
2117				  gen_rtvec (2, newpat,
2118					     gen_rtx_CLOBBER (VOIDmode,
2119							      ni2dest))),
2120				 i3);
2121	  /* If the split with the mode-changed register didn't work, try
2122	     the original register.  */
2123	  if (! m_split && ni2dest != i2dest)
2124	    {
2125	      ni2dest = i2dest;
2126	      m_split = split_insns (gen_rtx_PARALLEL
2127				     (VOIDmode,
2128				      gen_rtvec (2, newpat,
2129						 gen_rtx_CLOBBER (VOIDmode,
2130								  i2dest))),
2131				     i3);
2132	    }
2133	}
2134
2135      if (m_split && NEXT_INSN (m_split) == NULL_RTX)
2136	{
2137	  m_split = PATTERN (m_split);
2138	  insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2139	  if (insn_code_number >= 0)
2140	    newpat = m_split;
2141	}
2142      else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
2143	       && (next_real_insn (i2) == i3
2144		   || ! use_crosses_set_p (PATTERN (m_split), INSN_CUID (i2))))
2145	{
2146	  rtx i2set, i3set;
2147	  rtx newi3pat = PATTERN (NEXT_INSN (m_split));
2148	  newi2pat = PATTERN (m_split);
2149
2150	  i3set = single_set (NEXT_INSN (m_split));
2151	  i2set = single_set (m_split);
2152
2153	  /* In case we changed the mode of I2DEST, replace it in the
2154	     pseudo-register table here.  We can't do it above in case this
2155	     code doesn't get executed and we do a split the other way.  */
2156
2157	  if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2158	    SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2159
2160	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2161
2162	  /* If I2 or I3 has multiple SETs, we won't know how to track
2163	     register status, so don't use these insns.  If I2's destination
2164	     is used between I2 and I3, we also can't use these insns.  */
2165
2166	  if (i2_code_number >= 0 && i2set && i3set
2167	      && (next_real_insn (i2) == i3
2168		  || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
2169	    insn_code_number = recog_for_combine (&newi3pat, i3,
2170						  &new_i3_notes);
2171	  if (insn_code_number >= 0)
2172	    newpat = newi3pat;
2173
2174	  /* It is possible that both insns now set the destination of I3.
2175	     If so, we must show an extra use of it.  */
2176
2177	  if (insn_code_number >= 0)
2178	    {
2179	      rtx new_i3_dest = SET_DEST (i3set);
2180	      rtx new_i2_dest = SET_DEST (i2set);
2181
2182	      while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2183		     || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2184		     || GET_CODE (new_i3_dest) == SUBREG)
2185		new_i3_dest = XEXP (new_i3_dest, 0);
2186
2187	      while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2188		     || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2189		     || GET_CODE (new_i2_dest) == SUBREG)
2190		new_i2_dest = XEXP (new_i2_dest, 0);
2191
2192	      if (GET_CODE (new_i3_dest) == REG
2193		  && GET_CODE (new_i2_dest) == REG
2194		  && REGNO (new_i3_dest) == REGNO (new_i2_dest))
2195		REG_N_SETS (REGNO (new_i2_dest))++;
2196	    }
2197	}
2198
2199      /* If we can split it and use I2DEST, go ahead and see if that
2200	 helps things be recognized.  Verify that none of the registers
2201	 are set between I2 and I3.  */
2202      if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
2203#ifdef HAVE_cc0
2204	  && GET_CODE (i2dest) == REG
2205#endif
2206	  /* We need I2DEST in the proper mode.  If it is a hard register
2207	     or the only use of a pseudo, we can change its mode.  */
2208	  && (GET_MODE (*split) == GET_MODE (i2dest)
2209	      || GET_MODE (*split) == VOIDmode
2210	      || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2211	      || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2212		  && ! REG_USERVAR_P (i2dest)))
2213	  && (next_real_insn (i2) == i3
2214	      || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2215	  /* We can't overwrite I2DEST if its value is still used by
2216	     NEWPAT.  */
2217	  && ! reg_referenced_p (i2dest, newpat))
2218	{
2219	  rtx newdest = i2dest;
2220	  enum rtx_code split_code = GET_CODE (*split);
2221	  enum machine_mode split_mode = GET_MODE (*split);
2222
2223	  /* Get NEWDEST as a register in the proper mode.  We have already
2224	     validated that we can do this.  */
2225	  if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2226	    {
2227	      newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
2228
2229	      if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2230		SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2231	    }
2232
2233	  /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2234	     an ASHIFT.  This can occur if it was inside a PLUS and hence
2235	     appeared to be a memory address.  This is a kludge.  */
2236	  if (split_code == MULT
2237	      && GET_CODE (XEXP (*split, 1)) == CONST_INT
2238	      && INTVAL (XEXP (*split, 1)) > 0
2239	      && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2240	    {
2241	      SUBST (*split, gen_rtx_ASHIFT (split_mode,
2242					     XEXP (*split, 0), GEN_INT (i)));
2243	      /* Update split_code because we may not have a multiply
2244		 anymore.  */
2245	      split_code = GET_CODE (*split);
2246	    }
2247
2248#ifdef INSN_SCHEDULING
2249	  /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2250	     be written as a ZERO_EXTEND.  */
2251	  if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2252	    {
2253#ifdef LOAD_EXTEND_OP
2254	      /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
2255		 what it really is.  */
2256	      if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split)))
2257		  == SIGN_EXTEND)
2258		SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
2259						    SUBREG_REG (*split)));
2260	      else
2261#endif
2262		SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
2263						    SUBREG_REG (*split)));
2264	    }
2265#endif
2266
2267	  newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
2268	  SUBST (*split, newdest);
2269	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2270
2271	  /* If the split point was a MULT and we didn't have one before,
2272	     don't use one now.  */
2273	  if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2274	    insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2275	}
2276    }
2277
2278  /* Check for a case where we loaded from memory in a narrow mode and
2279     then sign extended it, but we need both registers.  In that case,
2280     we have a PARALLEL with both loads from the same memory location.
2281     We can split this into a load from memory followed by a register-register
2282     copy.  This saves at least one insn, more if register allocation can
2283     eliminate the copy.
2284
2285     We cannot do this if the destination of the first assignment is a
2286     condition code register or cc0.  We eliminate this case by making sure
2287     the SET_DEST and SET_SRC have the same mode.
2288
2289     We cannot do this if the destination of the second assignment is
2290     a register that we have already assumed is zero-extended.  Similarly
2291     for a SUBREG of such a register.  */
2292
2293  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2294	   && GET_CODE (newpat) == PARALLEL
2295	   && XVECLEN (newpat, 0) == 2
2296	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2297	   && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2298	   && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
2299	       == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
2300	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2301	   && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2302			   XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2303	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2304				   INSN_CUID (i2))
2305	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2306	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2307	   && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2308		 (GET_CODE (temp) == REG
2309		  && reg_nonzero_bits[REGNO (temp)] != 0
2310		  && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2311		  && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2312		  && (reg_nonzero_bits[REGNO (temp)]
2313		      != GET_MODE_MASK (word_mode))))
2314	   && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2315		 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2316		     (GET_CODE (temp) == REG
2317		      && reg_nonzero_bits[REGNO (temp)] != 0
2318		      && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2319		      && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2320		      && (reg_nonzero_bits[REGNO (temp)]
2321			  != GET_MODE_MASK (word_mode)))))
2322	   && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2323					 SET_SRC (XVECEXP (newpat, 0, 1)))
2324	   && ! find_reg_note (i3, REG_UNUSED,
2325			       SET_DEST (XVECEXP (newpat, 0, 0))))
2326    {
2327      rtx ni2dest;
2328
2329      newi2pat = XVECEXP (newpat, 0, 0);
2330      ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2331      newpat = XVECEXP (newpat, 0, 1);
2332      SUBST (SET_SRC (newpat),
2333	     gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
2334      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2335
2336      if (i2_code_number >= 0)
2337	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2338
2339      if (insn_code_number >= 0)
2340	{
2341	  rtx insn;
2342	  rtx link;
2343
2344	  /* If we will be able to accept this, we have made a change to the
2345	     destination of I3.  This requires us to do a few adjustments.  */
2346	  PATTERN (i3) = newpat;
2347	  adjust_for_new_dest (i3);
2348
2349	  /* I3 now uses what used to be its destination and which is
2350	     now I2's destination.  That means we need a LOG_LINK from
2351	     I3 to I2.  But we used to have one, so we still will.
2352
2353	     However, some later insn might be using I2's dest and have
2354	     a LOG_LINK pointing at I3.  We must remove this link.
2355	     The simplest way to remove the link is to point it at I1,
2356	     which we know will be a NOTE.  */
2357
2358	  for (insn = NEXT_INSN (i3);
2359	       insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2360			|| insn != BB_HEAD (this_basic_block->next_bb));
2361	       insn = NEXT_INSN (insn))
2362	    {
2363	      if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
2364		{
2365		  for (link = LOG_LINKS (insn); link;
2366		       link = XEXP (link, 1))
2367		    if (XEXP (link, 0) == i3)
2368		      XEXP (link, 0) = i1;
2369
2370		  break;
2371		}
2372	    }
2373	}
2374    }
2375
2376  /* Similarly, check for a case where we have a PARALLEL of two independent
2377     SETs but we started with three insns.  In this case, we can do the sets
2378     as two separate insns.  This case occurs when some SET allows two
2379     other insns to combine, but the destination of that SET is still live.  */
2380
2381  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2382	   && GET_CODE (newpat) == PARALLEL
2383	   && XVECLEN (newpat, 0) == 2
2384	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2385	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2386	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2387	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2388	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2389	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2390	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2391				   INSN_CUID (i2))
2392	   /* Don't pass sets with (USE (MEM ...)) dests to the following.  */
2393	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2394	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2395	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2396				  XVECEXP (newpat, 0, 0))
2397	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2398				  XVECEXP (newpat, 0, 1))
2399	   && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2400		 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
2401    {
2402      /* Normally, it doesn't matter which of the two is done first,
2403	 but it does if one references cc0.  In that case, it has to
2404	 be first.  */
2405#ifdef HAVE_cc0
2406      if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2407	{
2408	  newi2pat = XVECEXP (newpat, 0, 0);
2409	  newpat = XVECEXP (newpat, 0, 1);
2410	}
2411      else
2412#endif
2413	{
2414	  newi2pat = XVECEXP (newpat, 0, 1);
2415	  newpat = XVECEXP (newpat, 0, 0);
2416	}
2417
2418      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2419
2420      if (i2_code_number >= 0)
2421	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2422    }
2423
2424  /* If it still isn't recognized, fail and change things back the way they
2425     were.  */
2426  if ((insn_code_number < 0
2427       /* Is the result a reasonable ASM_OPERANDS?  */
2428       && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2429    {
2430      undo_all ();
2431      return 0;
2432    }
2433
2434  /* If we had to change another insn, make sure it is valid also.  */
2435  if (undobuf.other_insn)
2436    {
2437      rtx other_pat = PATTERN (undobuf.other_insn);
2438      rtx new_other_notes;
2439      rtx note, next;
2440
2441      CLEAR_HARD_REG_SET (newpat_used_regs);
2442
2443      other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2444					     &new_other_notes);
2445
2446      if (other_code_number < 0 && ! check_asm_operands (other_pat))
2447	{
2448	  undo_all ();
2449	  return 0;
2450	}
2451
2452      PATTERN (undobuf.other_insn) = other_pat;
2453
2454      /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2455	 are still valid.  Then add any non-duplicate notes added by
2456	 recog_for_combine.  */
2457      for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2458	{
2459	  next = XEXP (note, 1);
2460
2461	  if (REG_NOTE_KIND (note) == REG_UNUSED
2462	      && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2463	    {
2464	      if (GET_CODE (XEXP (note, 0)) == REG)
2465		REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2466
2467	      remove_note (undobuf.other_insn, note);
2468	    }
2469	}
2470
2471      for (note = new_other_notes; note; note = XEXP (note, 1))
2472	if (GET_CODE (XEXP (note, 0)) == REG)
2473	  REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
2474
2475      distribute_notes (new_other_notes, undobuf.other_insn,
2476			undobuf.other_insn, NULL_RTX);
2477    }
2478#ifdef HAVE_cc0
2479  /* If I2 is the setter CC0 and I3 is the user CC0 then check whether
2480     they are adjacent to each other or not.  */
2481  {
2482    rtx p = prev_nonnote_insn (i3);
2483    if (p && p != i2 && GET_CODE (p) == INSN && newi2pat
2484	&& sets_cc0_p (newi2pat))
2485      {
2486	undo_all ();
2487	return 0;
2488      }
2489  }
2490#endif
2491
2492  /* We now know that we can do this combination.  Merge the insns and
2493     update the status of registers and LOG_LINKS.  */
2494
2495  {
2496    rtx i3notes, i2notes, i1notes = 0;
2497    rtx i3links, i2links, i1links = 0;
2498    rtx midnotes = 0;
2499    unsigned int regno;
2500
2501    /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2502       clear them.  */
2503    i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2504    i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2505    if (i1)
2506      i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2507
2508    /* Ensure that we do not have something that should not be shared but
2509       occurs multiple times in the new insns.  Check this by first
2510       resetting all the `used' flags and then copying anything is shared.  */
2511
2512    reset_used_flags (i3notes);
2513    reset_used_flags (i2notes);
2514    reset_used_flags (i1notes);
2515    reset_used_flags (newpat);
2516    reset_used_flags (newi2pat);
2517    if (undobuf.other_insn)
2518      reset_used_flags (PATTERN (undobuf.other_insn));
2519
2520    i3notes = copy_rtx_if_shared (i3notes);
2521    i2notes = copy_rtx_if_shared (i2notes);
2522    i1notes = copy_rtx_if_shared (i1notes);
2523    newpat = copy_rtx_if_shared (newpat);
2524    newi2pat = copy_rtx_if_shared (newi2pat);
2525    if (undobuf.other_insn)
2526      reset_used_flags (PATTERN (undobuf.other_insn));
2527
2528    INSN_CODE (i3) = insn_code_number;
2529    PATTERN (i3) = newpat;
2530
2531    if (GET_CODE (i3) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (i3))
2532      {
2533	rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
2534
2535	reset_used_flags (call_usage);
2536	call_usage = copy_rtx (call_usage);
2537
2538	if (substed_i2)
2539	  replace_rtx (call_usage, i2dest, i2src);
2540
2541	if (substed_i1)
2542	  replace_rtx (call_usage, i1dest, i1src);
2543
2544	CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
2545      }
2546
2547    if (undobuf.other_insn)
2548      INSN_CODE (undobuf.other_insn) = other_code_number;
2549
2550    /* We had one special case above where I2 had more than one set and
2551       we replaced a destination of one of those sets with the destination
2552       of I3.  In that case, we have to update LOG_LINKS of insns later
2553       in this basic block.  Note that this (expensive) case is rare.
2554
2555       Also, in this case, we must pretend that all REG_NOTEs for I2
2556       actually came from I3, so that REG_UNUSED notes from I2 will be
2557       properly handled.  */
2558
2559    if (i3_subst_into_i2)
2560      {
2561	for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2562	  if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != USE
2563	      && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2564	      && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2565	      && ! find_reg_note (i2, REG_UNUSED,
2566				  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2567	    for (temp = NEXT_INSN (i2);
2568		 temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2569			  || BB_HEAD (this_basic_block) != temp);
2570		 temp = NEXT_INSN (temp))
2571	      if (temp != i3 && INSN_P (temp))
2572		for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2573		  if (XEXP (link, 0) == i2)
2574		    XEXP (link, 0) = i3;
2575
2576	if (i3notes)
2577	  {
2578	    rtx link = i3notes;
2579	    while (XEXP (link, 1))
2580	      link = XEXP (link, 1);
2581	    XEXP (link, 1) = i2notes;
2582	  }
2583	else
2584	  i3notes = i2notes;
2585	i2notes = 0;
2586      }
2587
2588    LOG_LINKS (i3) = 0;
2589    REG_NOTES (i3) = 0;
2590    LOG_LINKS (i2) = 0;
2591    REG_NOTES (i2) = 0;
2592
2593    if (newi2pat)
2594      {
2595	INSN_CODE (i2) = i2_code_number;
2596	PATTERN (i2) = newi2pat;
2597      }
2598    else
2599      {
2600	PUT_CODE (i2, NOTE);
2601	NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2602	NOTE_SOURCE_FILE (i2) = 0;
2603      }
2604
2605    if (i1)
2606      {
2607	LOG_LINKS (i1) = 0;
2608	REG_NOTES (i1) = 0;
2609	PUT_CODE (i1, NOTE);
2610	NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2611	NOTE_SOURCE_FILE (i1) = 0;
2612      }
2613
2614    /* Get death notes for everything that is now used in either I3 or
2615       I2 and used to die in a previous insn.  If we built two new
2616       patterns, move from I1 to I2 then I2 to I3 so that we get the
2617       proper movement on registers that I2 modifies.  */
2618
2619    if (newi2pat)
2620      {
2621	move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2622	move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2623      }
2624    else
2625      move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2626		   i3, &midnotes);
2627
2628    /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3.  */
2629    if (i3notes)
2630      distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX);
2631    if (i2notes)
2632      distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX);
2633    if (i1notes)
2634      distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX);
2635    if (midnotes)
2636      distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX);
2637
2638    /* Distribute any notes added to I2 or I3 by recog_for_combine.  We
2639       know these are REG_UNUSED and want them to go to the desired insn,
2640       so we always pass it as i3.  We have not counted the notes in
2641       reg_n_deaths yet, so we need to do so now.  */
2642
2643    if (newi2pat && new_i2_notes)
2644      {
2645	for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2646	  if (GET_CODE (XEXP (temp, 0)) == REG)
2647	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2648
2649	distribute_notes (new_i2_notes, i2, i2, NULL_RTX);
2650      }
2651
2652    if (new_i3_notes)
2653      {
2654	for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2655	  if (GET_CODE (XEXP (temp, 0)) == REG)
2656	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2657
2658	distribute_notes (new_i3_notes, i3, i3, NULL_RTX);
2659      }
2660
2661    /* If I3DEST was used in I3SRC, it really died in I3.  We may need to
2662       put a REG_DEAD note for it somewhere.  If NEWI2PAT exists and sets
2663       I3DEST, the death must be somewhere before I2, not I3.  If we passed I3
2664       in that case, it might delete I2.  Similarly for I2 and I1.
2665       Show an additional death due to the REG_DEAD note we make here.  If
2666       we discard it in distribute_notes, we will decrement it again.  */
2667
2668    if (i3dest_killed)
2669      {
2670	if (GET_CODE (i3dest_killed) == REG)
2671	  REG_N_DEATHS (REGNO (i3dest_killed))++;
2672
2673	if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
2674	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2675					       NULL_RTX),
2676			    NULL_RTX, i2, NULL_RTX);
2677	else
2678	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2679					       NULL_RTX),
2680			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX);
2681      }
2682
2683    if (i2dest_in_i2src)
2684      {
2685	if (GET_CODE (i2dest) == REG)
2686	  REG_N_DEATHS (REGNO (i2dest))++;
2687
2688	if (newi2pat && reg_set_p (i2dest, newi2pat))
2689	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2690			    NULL_RTX, i2, NULL_RTX);
2691	else
2692	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2693			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX);
2694      }
2695
2696    if (i1dest_in_i1src)
2697      {
2698	if (GET_CODE (i1dest) == REG)
2699	  REG_N_DEATHS (REGNO (i1dest))++;
2700
2701	if (newi2pat && reg_set_p (i1dest, newi2pat))
2702	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2703			    NULL_RTX, i2, NULL_RTX);
2704	else
2705	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2706			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX);
2707      }
2708
2709    distribute_links (i3links);
2710    distribute_links (i2links);
2711    distribute_links (i1links);
2712
2713    if (GET_CODE (i2dest) == REG)
2714      {
2715	rtx link;
2716	rtx i2_insn = 0, i2_val = 0, set;
2717
2718	/* The insn that used to set this register doesn't exist, and
2719	   this life of the register may not exist either.  See if one of
2720	   I3's links points to an insn that sets I2DEST.  If it does,
2721	   that is now the last known value for I2DEST. If we don't update
2722	   this and I2 set the register to a value that depended on its old
2723	   contents, we will get confused.  If this insn is used, thing
2724	   will be set correctly in combine_instructions.  */
2725
2726	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2727	  if ((set = single_set (XEXP (link, 0))) != 0
2728	      && rtx_equal_p (i2dest, SET_DEST (set)))
2729	    i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2730
2731	record_value_for_reg (i2dest, i2_insn, i2_val);
2732
2733	/* If the reg formerly set in I2 died only once and that was in I3,
2734	   zero its use count so it won't make `reload' do any work.  */
2735	if (! added_sets_2
2736	    && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2737	    && ! i2dest_in_i2src)
2738	  {
2739	    regno = REGNO (i2dest);
2740	    REG_N_SETS (regno)--;
2741	  }
2742      }
2743
2744    if (i1 && GET_CODE (i1dest) == REG)
2745      {
2746	rtx link;
2747	rtx i1_insn = 0, i1_val = 0, set;
2748
2749	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2750	  if ((set = single_set (XEXP (link, 0))) != 0
2751	      && rtx_equal_p (i1dest, SET_DEST (set)))
2752	    i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2753
2754	record_value_for_reg (i1dest, i1_insn, i1_val);
2755
2756	regno = REGNO (i1dest);
2757	if (! added_sets_1 && ! i1dest_in_i1src)
2758	  REG_N_SETS (regno)--;
2759      }
2760
2761    /* Update reg_nonzero_bits et al for any changes that may have been made
2762       to this insn.  The order of set_nonzero_bits_and_sign_copies() is
2763       important.  Because newi2pat can affect nonzero_bits of newpat */
2764    if (newi2pat)
2765      note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
2766    note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
2767
2768    /* Set new_direct_jump_p if a new return or simple jump instruction
2769       has been created.
2770
2771       If I3 is now an unconditional jump, ensure that it has a
2772       BARRIER following it since it may have initially been a
2773       conditional jump.  It may also be the last nonnote insn.  */
2774
2775    if (returnjump_p (i3) || any_uncondjump_p (i3))
2776      {
2777	*new_direct_jump_p = 1;
2778	mark_jump_label (PATTERN (i3), i3, 0);
2779
2780	if ((temp = next_nonnote_insn (i3)) == NULL_RTX
2781	    || GET_CODE (temp) != BARRIER)
2782	  emit_barrier_after (i3);
2783      }
2784
2785    if (undobuf.other_insn != NULL_RTX
2786	&& (returnjump_p (undobuf.other_insn)
2787	    || any_uncondjump_p (undobuf.other_insn)))
2788      {
2789	*new_direct_jump_p = 1;
2790
2791	if ((temp = next_nonnote_insn (undobuf.other_insn)) == NULL_RTX
2792	    || GET_CODE (temp) != BARRIER)
2793	  emit_barrier_after (undobuf.other_insn);
2794      }
2795
2796    /* An NOOP jump does not need barrier, but it does need cleaning up
2797       of CFG.  */
2798    if (GET_CODE (newpat) == SET
2799	&& SET_SRC (newpat) == pc_rtx
2800	&& SET_DEST (newpat) == pc_rtx)
2801      *new_direct_jump_p = 1;
2802  }
2803
2804  combine_successes++;
2805  undo_commit ();
2806
2807  if (added_links_insn
2808      && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2809      && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2810    return added_links_insn;
2811  else
2812    return newi2pat ? i2 : i3;
2813}
2814
2815/* Undo all the modifications recorded in undobuf.  */
2816
2817static void
2818undo_all (void)
2819{
2820  struct undo *undo, *next;
2821
2822  for (undo = undobuf.undos; undo; undo = next)
2823    {
2824      next = undo->next;
2825      if (undo->is_int)
2826	*undo->where.i = undo->old_contents.i;
2827      else
2828	*undo->where.r = undo->old_contents.r;
2829
2830      undo->next = undobuf.frees;
2831      undobuf.frees = undo;
2832    }
2833
2834  undobuf.undos = 0;
2835}
2836
2837/* We've committed to accepting the changes we made.  Move all
2838   of the undos to the free list.  */
2839
2840static void
2841undo_commit (void)
2842{
2843  struct undo *undo, *next;
2844
2845  for (undo = undobuf.undos; undo; undo = next)
2846    {
2847      next = undo->next;
2848      undo->next = undobuf.frees;
2849      undobuf.frees = undo;
2850    }
2851  undobuf.undos = 0;
2852}
2853
2854
2855/* Find the innermost point within the rtx at LOC, possibly LOC itself,
2856   where we have an arithmetic expression and return that point.  LOC will
2857   be inside INSN.
2858
2859   try_combine will call this function to see if an insn can be split into
2860   two insns.  */
2861
2862static rtx *
2863find_split_point (rtx *loc, rtx insn)
2864{
2865  rtx x = *loc;
2866  enum rtx_code code = GET_CODE (x);
2867  rtx *split;
2868  unsigned HOST_WIDE_INT len = 0;
2869  HOST_WIDE_INT pos = 0;
2870  int unsignedp = 0;
2871  rtx inner = NULL_RTX;
2872
2873  /* First special-case some codes.  */
2874  switch (code)
2875    {
2876    case SUBREG:
2877#ifdef INSN_SCHEDULING
2878      /* If we are making a paradoxical SUBREG invalid, it becomes a split
2879	 point.  */
2880      if (GET_CODE (SUBREG_REG (x)) == MEM)
2881	return loc;
2882#endif
2883      return find_split_point (&SUBREG_REG (x), insn);
2884
2885    case MEM:
2886#ifdef HAVE_lo_sum
2887      /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2888	 using LO_SUM and HIGH.  */
2889      if (GET_CODE (XEXP (x, 0)) == CONST
2890	  || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2891	{
2892	  SUBST (XEXP (x, 0),
2893		 gen_rtx_LO_SUM (Pmode,
2894				 gen_rtx_HIGH (Pmode, XEXP (x, 0)),
2895				 XEXP (x, 0)));
2896	  return &XEXP (XEXP (x, 0), 0);
2897	}
2898#endif
2899
2900      /* If we have a PLUS whose second operand is a constant and the
2901	 address is not valid, perhaps will can split it up using
2902	 the machine-specific way to split large constants.  We use
2903	 the first pseudo-reg (one of the virtual regs) as a placeholder;
2904	 it will not remain in the result.  */
2905      if (GET_CODE (XEXP (x, 0)) == PLUS
2906	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2907	  && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2908	{
2909	  rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2910	  rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
2911				 subst_insn);
2912
2913	  /* This should have produced two insns, each of which sets our
2914	     placeholder.  If the source of the second is a valid address,
2915	     we can make put both sources together and make a split point
2916	     in the middle.  */
2917
2918	  if (seq
2919	      && NEXT_INSN (seq) != NULL_RTX
2920	      && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
2921	      && GET_CODE (seq) == INSN
2922	      && GET_CODE (PATTERN (seq)) == SET
2923	      && SET_DEST (PATTERN (seq)) == reg
2924	      && ! reg_mentioned_p (reg,
2925				    SET_SRC (PATTERN (seq)))
2926	      && GET_CODE (NEXT_INSN (seq)) == INSN
2927	      && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
2928	      && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
2929	      && memory_address_p (GET_MODE (x),
2930				   SET_SRC (PATTERN (NEXT_INSN (seq)))))
2931	    {
2932	      rtx src1 = SET_SRC (PATTERN (seq));
2933	      rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
2934
2935	      /* Replace the placeholder in SRC2 with SRC1.  If we can
2936		 find where in SRC2 it was placed, that can become our
2937		 split point and we can replace this address with SRC2.
2938		 Just try two obvious places.  */
2939
2940	      src2 = replace_rtx (src2, reg, src1);
2941	      split = 0;
2942	      if (XEXP (src2, 0) == src1)
2943		split = &XEXP (src2, 0);
2944	      else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2945		       && XEXP (XEXP (src2, 0), 0) == src1)
2946		split = &XEXP (XEXP (src2, 0), 0);
2947
2948	      if (split)
2949		{
2950		  SUBST (XEXP (x, 0), src2);
2951		  return split;
2952		}
2953	    }
2954
2955	  /* If that didn't work, perhaps the first operand is complex and
2956	     needs to be computed separately, so make a split point there.
2957	     This will occur on machines that just support REG + CONST
2958	     and have a constant moved through some previous computation.  */
2959
2960	  else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2961		   && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2962			 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2963			     == 'o')))
2964	    return &XEXP (XEXP (x, 0), 0);
2965	}
2966      break;
2967
2968    case SET:
2969#ifdef HAVE_cc0
2970      /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2971	 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2972	 we need to put the operand into a register.  So split at that
2973	 point.  */
2974
2975      if (SET_DEST (x) == cc0_rtx
2976	  && GET_CODE (SET_SRC (x)) != COMPARE
2977	  && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2978	  && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2979	  && ! (GET_CODE (SET_SRC (x)) == SUBREG
2980		&& GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2981	return &SET_SRC (x);
2982#endif
2983
2984      /* See if we can split SET_SRC as it stands.  */
2985      split = find_split_point (&SET_SRC (x), insn);
2986      if (split && split != &SET_SRC (x))
2987	return split;
2988
2989      /* See if we can split SET_DEST as it stands.  */
2990      split = find_split_point (&SET_DEST (x), insn);
2991      if (split && split != &SET_DEST (x))
2992	return split;
2993
2994      /* See if this is a bitfield assignment with everything constant.  If
2995	 so, this is an IOR of an AND, so split it into that.  */
2996      if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2997	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2998	      <= HOST_BITS_PER_WIDE_INT)
2999	  && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
3000	  && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
3001	  && GET_CODE (SET_SRC (x)) == CONST_INT
3002	  && ((INTVAL (XEXP (SET_DEST (x), 1))
3003	       + INTVAL (XEXP (SET_DEST (x), 2)))
3004	      <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
3005	  && ! side_effects_p (XEXP (SET_DEST (x), 0)))
3006	{
3007	  HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
3008	  unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
3009	  unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
3010	  rtx dest = XEXP (SET_DEST (x), 0);
3011	  enum machine_mode mode = GET_MODE (dest);
3012	  unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
3013
3014	  if (BITS_BIG_ENDIAN)
3015	    pos = GET_MODE_BITSIZE (mode) - len - pos;
3016
3017	  if (src == mask)
3018	    SUBST (SET_SRC (x),
3019		   gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
3020	  else
3021	    SUBST (SET_SRC (x),
3022		   gen_binary (IOR, mode,
3023			       gen_binary (AND, mode, dest,
3024					   gen_int_mode (~(mask << pos),
3025							 mode)),
3026			       GEN_INT (src << pos)));
3027
3028	  SUBST (SET_DEST (x), dest);
3029
3030	  split = find_split_point (&SET_SRC (x), insn);
3031	  if (split && split != &SET_SRC (x))
3032	    return split;
3033	}
3034
3035      /* Otherwise, see if this is an operation that we can split into two.
3036	 If so, try to split that.  */
3037      code = GET_CODE (SET_SRC (x));
3038
3039      switch (code)
3040	{
3041	case AND:
3042	  /* If we are AND'ing with a large constant that is only a single
3043	     bit and the result is only being used in a context where we
3044	     need to know if it is zero or nonzero, replace it with a bit
3045	     extraction.  This will avoid the large constant, which might
3046	     have taken more than one insn to make.  If the constant were
3047	     not a valid argument to the AND but took only one insn to make,
3048	     this is no worse, but if it took more than one insn, it will
3049	     be better.  */
3050
3051	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3052	      && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
3053	      && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3054	      && GET_CODE (SET_DEST (x)) == REG
3055	      && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
3056	      && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3057	      && XEXP (*split, 0) == SET_DEST (x)
3058	      && XEXP (*split, 1) == const0_rtx)
3059	    {
3060	      rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3061						XEXP (SET_SRC (x), 0),
3062						pos, NULL_RTX, 1, 1, 0, 0);
3063	      if (extraction != 0)
3064		{
3065		  SUBST (SET_SRC (x), extraction);
3066		  return find_split_point (loc, insn);
3067		}
3068	    }
3069	  break;
3070
3071	case NE:
3072	  /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3073	     is known to be on, this can be converted into a NEG of a shift.  */
3074	  if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3075	      && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
3076	      && 1 <= (pos = exact_log2
3077		       (nonzero_bits (XEXP (SET_SRC (x), 0),
3078				      GET_MODE (XEXP (SET_SRC (x), 0))))))
3079	    {
3080	      enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3081
3082	      SUBST (SET_SRC (x),
3083		     gen_rtx_NEG (mode,
3084				  gen_rtx_LSHIFTRT (mode,
3085						    XEXP (SET_SRC (x), 0),
3086						    GEN_INT (pos))));
3087
3088	      split = find_split_point (&SET_SRC (x), insn);
3089	      if (split && split != &SET_SRC (x))
3090		return split;
3091	    }
3092	  break;
3093
3094	case SIGN_EXTEND:
3095	  inner = XEXP (SET_SRC (x), 0);
3096
3097	  /* We can't optimize if either mode is a partial integer
3098	     mode as we don't know how many bits are significant
3099	     in those modes.  */
3100	  if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3101	      || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3102	    break;
3103
3104	  pos = 0;
3105	  len = GET_MODE_BITSIZE (GET_MODE (inner));
3106	  unsignedp = 0;
3107	  break;
3108
3109	case SIGN_EXTRACT:
3110	case ZERO_EXTRACT:
3111	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3112	      && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3113	    {
3114	      inner = XEXP (SET_SRC (x), 0);
3115	      len = INTVAL (XEXP (SET_SRC (x), 1));
3116	      pos = INTVAL (XEXP (SET_SRC (x), 2));
3117
3118	      if (BITS_BIG_ENDIAN)
3119		pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
3120	      unsignedp = (code == ZERO_EXTRACT);
3121	    }
3122	  break;
3123
3124	default:
3125	  break;
3126	}
3127
3128      if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3129	{
3130	  enum machine_mode mode = GET_MODE (SET_SRC (x));
3131
3132	  /* For unsigned, we have a choice of a shift followed by an
3133	     AND or two shifts.  Use two shifts for field sizes where the
3134	     constant might be too large.  We assume here that we can
3135	     always at least get 8-bit constants in an AND insn, which is
3136	     true for every current RISC.  */
3137
3138	  if (unsignedp && len <= 8)
3139	    {
3140	      SUBST (SET_SRC (x),
3141		     gen_rtx_AND (mode,
3142				  gen_rtx_LSHIFTRT
3143				  (mode, gen_lowpart_for_combine (mode, inner),
3144				   GEN_INT (pos)),
3145				  GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
3146
3147	      split = find_split_point (&SET_SRC (x), insn);
3148	      if (split && split != &SET_SRC (x))
3149		return split;
3150	    }
3151	  else
3152	    {
3153	      SUBST (SET_SRC (x),
3154		     gen_rtx_fmt_ee
3155		     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
3156		      gen_rtx_ASHIFT (mode,
3157				      gen_lowpart_for_combine (mode, inner),
3158				      GEN_INT (GET_MODE_BITSIZE (mode)
3159					       - len - pos)),
3160		      GEN_INT (GET_MODE_BITSIZE (mode) - len)));
3161
3162	      split = find_split_point (&SET_SRC (x), insn);
3163	      if (split && split != &SET_SRC (x))
3164		return split;
3165	    }
3166	}
3167
3168      /* See if this is a simple operation with a constant as the second
3169	 operand.  It might be that this constant is out of range and hence
3170	 could be used as a split point.  */
3171      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3172	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3173	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
3174	  && CONSTANT_P (XEXP (SET_SRC (x), 1))
3175	  && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
3176	      || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3177		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
3178		      == 'o'))))
3179	return &XEXP (SET_SRC (x), 1);
3180
3181      /* Finally, see if this is a simple operation with its first operand
3182	 not in a register.  The operation might require this operand in a
3183	 register, so return it as a split point.  We can always do this
3184	 because if the first operand were another operation, we would have
3185	 already found it as a split point.  */
3186      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3187	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3188	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
3189	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
3190	  && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3191	return &XEXP (SET_SRC (x), 0);
3192
3193      return 0;
3194
3195    case AND:
3196    case IOR:
3197      /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3198	 it is better to write this as (not (ior A B)) so we can split it.
3199	 Similarly for IOR.  */
3200      if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3201	{
3202	  SUBST (*loc,
3203		 gen_rtx_NOT (GET_MODE (x),
3204			      gen_rtx_fmt_ee (code == IOR ? AND : IOR,
3205					      GET_MODE (x),
3206					      XEXP (XEXP (x, 0), 0),
3207					      XEXP (XEXP (x, 1), 0))));
3208	  return find_split_point (loc, insn);
3209	}
3210
3211      /* Many RISC machines have a large set of logical insns.  If the
3212	 second operand is a NOT, put it first so we will try to split the
3213	 other operand first.  */
3214      if (GET_CODE (XEXP (x, 1)) == NOT)
3215	{
3216	  rtx tem = XEXP (x, 0);
3217	  SUBST (XEXP (x, 0), XEXP (x, 1));
3218	  SUBST (XEXP (x, 1), tem);
3219	}
3220      break;
3221
3222    default:
3223      break;
3224    }
3225
3226  /* Otherwise, select our actions depending on our rtx class.  */
3227  switch (GET_RTX_CLASS (code))
3228    {
3229    case 'b':			/* This is ZERO_EXTRACT and SIGN_EXTRACT.  */
3230    case '3':
3231      split = find_split_point (&XEXP (x, 2), insn);
3232      if (split)
3233	return split;
3234      /* ... fall through ...  */
3235    case '2':
3236    case 'c':
3237    case '<':
3238      split = find_split_point (&XEXP (x, 1), insn);
3239      if (split)
3240	return split;
3241      /* ... fall through ...  */
3242    case '1':
3243      /* Some machines have (and (shift ...) ...) insns.  If X is not
3244	 an AND, but XEXP (X, 0) is, use it as our split point.  */
3245      if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3246	return &XEXP (x, 0);
3247
3248      split = find_split_point (&XEXP (x, 0), insn);
3249      if (split)
3250	return split;
3251      return loc;
3252    }
3253
3254  /* Otherwise, we don't have a split point.  */
3255  return 0;
3256}
3257
3258/* Throughout X, replace FROM with TO, and return the result.
3259   The result is TO if X is FROM;
3260   otherwise the result is X, but its contents may have been modified.
3261   If they were modified, a record was made in undobuf so that
3262   undo_all will (among other things) return X to its original state.
3263
3264   If the number of changes necessary is too much to record to undo,
3265   the excess changes are not made, so the result is invalid.
3266   The changes already made can still be undone.
3267   undobuf.num_undo is incremented for such changes, so by testing that
3268   the caller can tell whether the result is valid.
3269
3270   `n_occurrences' is incremented each time FROM is replaced.
3271
3272   IN_DEST is nonzero if we are processing the SET_DEST of a SET.
3273
3274   UNIQUE_COPY is nonzero if each substitution must be unique.  We do this
3275   by copying if `n_occurrences' is nonzero.  */
3276
3277static rtx
3278subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
3279{
3280  enum rtx_code code = GET_CODE (x);
3281  enum machine_mode op0_mode = VOIDmode;
3282  const char *fmt;
3283  int len, i;
3284  rtx new;
3285
3286/* Two expressions are equal if they are identical copies of a shared
3287   RTX or if they are both registers with the same register number
3288   and mode.  */
3289
3290#define COMBINE_RTX_EQUAL_P(X,Y)			\
3291  ((X) == (Y)						\
3292   || (GET_CODE (X) == REG && GET_CODE (Y) == REG	\
3293       && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3294
3295  if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3296    {
3297      n_occurrences++;
3298      return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3299    }
3300
3301  /* If X and FROM are the same register but different modes, they will
3302     not have been seen as equal above.  However, flow.c will make a
3303     LOG_LINKS entry for that case.  If we do nothing, we will try to
3304     rerecognize our original insn and, when it succeeds, we will
3305     delete the feeding insn, which is incorrect.
3306
3307     So force this insn not to match in this (rare) case.  */
3308  if (! in_dest && code == REG && GET_CODE (from) == REG
3309      && REGNO (x) == REGNO (from))
3310    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
3311
3312  /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3313     of which may contain things that can be combined.  */
3314  if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3315    return x;
3316
3317  /* It is possible to have a subexpression appear twice in the insn.
3318     Suppose that FROM is a register that appears within TO.
3319     Then, after that subexpression has been scanned once by `subst',
3320     the second time it is scanned, TO may be found.  If we were
3321     to scan TO here, we would find FROM within it and create a
3322     self-referent rtl structure which is completely wrong.  */
3323  if (COMBINE_RTX_EQUAL_P (x, to))
3324    return to;
3325
3326  /* Parallel asm_operands need special attention because all of the
3327     inputs are shared across the arms.  Furthermore, unsharing the
3328     rtl results in recognition failures.  Failure to handle this case
3329     specially can result in circular rtl.
3330
3331     Solve this by doing a normal pass across the first entry of the
3332     parallel, and only processing the SET_DESTs of the subsequent
3333     entries.  Ug.  */
3334
3335  if (code == PARALLEL
3336      && GET_CODE (XVECEXP (x, 0, 0)) == SET
3337      && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
3338    {
3339      new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3340
3341      /* If this substitution failed, this whole thing fails.  */
3342      if (GET_CODE (new) == CLOBBER
3343	  && XEXP (new, 0) == const0_rtx)
3344	return new;
3345
3346      SUBST (XVECEXP (x, 0, 0), new);
3347
3348      for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
3349	{
3350	  rtx dest = SET_DEST (XVECEXP (x, 0, i));
3351
3352	  if (GET_CODE (dest) != REG
3353	      && GET_CODE (dest) != CC0
3354	      && GET_CODE (dest) != PC)
3355	    {
3356	      new = subst (dest, from, to, 0, unique_copy);
3357
3358	      /* If this substitution failed, this whole thing fails.  */
3359	      if (GET_CODE (new) == CLOBBER
3360		  && XEXP (new, 0) == const0_rtx)
3361		return new;
3362
3363	      SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
3364	    }
3365	}
3366    }
3367  else
3368    {
3369      len = GET_RTX_LENGTH (code);
3370      fmt = GET_RTX_FORMAT (code);
3371
3372      /* We don't need to process a SET_DEST that is a register, CC0,
3373	 or PC, so set up to skip this common case.  All other cases
3374	 where we want to suppress replacing something inside a
3375	 SET_SRC are handled via the IN_DEST operand.  */
3376      if (code == SET
3377	  && (GET_CODE (SET_DEST (x)) == REG
3378	      || GET_CODE (SET_DEST (x)) == CC0
3379	      || GET_CODE (SET_DEST (x)) == PC))
3380	fmt = "ie";
3381
3382      /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3383	 constant.  */
3384      if (fmt[0] == 'e')
3385	op0_mode = GET_MODE (XEXP (x, 0));
3386
3387      for (i = 0; i < len; i++)
3388	{
3389	  if (fmt[i] == 'E')
3390	    {
3391	      int j;
3392	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3393		{
3394		  if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3395		    {
3396		      new = (unique_copy && n_occurrences
3397			     ? copy_rtx (to) : to);
3398		      n_occurrences++;
3399		    }
3400		  else
3401		    {
3402		      new = subst (XVECEXP (x, i, j), from, to, 0,
3403				   unique_copy);
3404
3405		      /* If this substitution failed, this whole thing
3406			 fails.  */
3407		      if (GET_CODE (new) == CLOBBER
3408			  && XEXP (new, 0) == const0_rtx)
3409			return new;
3410		    }
3411
3412		  SUBST (XVECEXP (x, i, j), new);
3413		}
3414	    }
3415	  else if (fmt[i] == 'e')
3416	    {
3417	      /* If this is a register being set, ignore it.  */
3418	      new = XEXP (x, i);
3419	      if (in_dest
3420		  && (code == SUBREG || code == STRICT_LOW_PART
3421		      || code == ZERO_EXTRACT)
3422		  && i == 0
3423		  && GET_CODE (new) == REG)
3424		;
3425
3426	      else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3427		{
3428		  /* In general, don't install a subreg involving two
3429		     modes not tieable.  It can worsen register
3430		     allocation, and can even make invalid reload
3431		     insns, since the reg inside may need to be copied
3432		     from in the outside mode, and that may be invalid
3433		     if it is an fp reg copied in integer mode.
3434
3435		     We allow two exceptions to this: It is valid if
3436		     it is inside another SUBREG and the mode of that
3437		     SUBREG and the mode of the inside of TO is
3438		     tieable and it is valid if X is a SET that copies
3439		     FROM to CC0.  */
3440
3441		  if (GET_CODE (to) == SUBREG
3442		      && ! MODES_TIEABLE_P (GET_MODE (to),
3443					    GET_MODE (SUBREG_REG (to)))
3444		      && ! (code == SUBREG
3445			    && MODES_TIEABLE_P (GET_MODE (x),
3446						GET_MODE (SUBREG_REG (to))))
3447#ifdef HAVE_cc0
3448		      && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3449#endif
3450		      )
3451		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3452
3453#ifdef CANNOT_CHANGE_MODE_CLASS
3454		  if (code == SUBREG
3455		      && GET_CODE (to) == REG
3456		      && REGNO (to) < FIRST_PSEUDO_REGISTER
3457		      && REG_CANNOT_CHANGE_MODE_P (REGNO (to),
3458						   GET_MODE (to),
3459						   GET_MODE (x)))
3460		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3461#endif
3462
3463		  new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3464		  n_occurrences++;
3465		}
3466	      else
3467		/* If we are in a SET_DEST, suppress most cases unless we
3468		   have gone inside a MEM, in which case we want to
3469		   simplify the address.  We assume here that things that
3470		   are actually part of the destination have their inner
3471		   parts in the first expression.  This is true for SUBREG,
3472		   STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3473		   things aside from REG and MEM that should appear in a
3474		   SET_DEST.  */
3475		new = subst (XEXP (x, i), from, to,
3476			     (((in_dest
3477				&& (code == SUBREG || code == STRICT_LOW_PART
3478				    || code == ZERO_EXTRACT))
3479			       || code == SET)
3480			      && i == 0), unique_copy);
3481
3482	      /* If we found that we will have to reject this combination,
3483		 indicate that by returning the CLOBBER ourselves, rather than
3484		 an expression containing it.  This will speed things up as
3485		 well as prevent accidents where two CLOBBERs are considered
3486		 to be equal, thus producing an incorrect simplification.  */
3487
3488	      if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3489		return new;
3490
3491	      if (GET_CODE (x) == SUBREG
3492		  && (GET_CODE (new) == CONST_INT
3493		      || GET_CODE (new) == CONST_DOUBLE))
3494		{
3495		  enum machine_mode mode = GET_MODE (x);
3496
3497		  x = simplify_subreg (GET_MODE (x), new,
3498				       GET_MODE (SUBREG_REG (x)),
3499				       SUBREG_BYTE (x));
3500		  if (! x)
3501		    x = gen_rtx_CLOBBER (mode, const0_rtx);
3502		}
3503	      else if (GET_CODE (new) == CONST_INT
3504		       && GET_CODE (x) == ZERO_EXTEND)
3505		{
3506		  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
3507						new, GET_MODE (XEXP (x, 0)));
3508		  if (! x)
3509		    abort ();
3510		}
3511	      else
3512		SUBST (XEXP (x, i), new);
3513	    }
3514	}
3515    }
3516
3517  /* Try to simplify X.  If the simplification changed the code, it is likely
3518     that further simplification will help, so loop, but limit the number
3519     of repetitions that will be performed.  */
3520
3521  for (i = 0; i < 4; i++)
3522    {
3523      /* If X is sufficiently simple, don't bother trying to do anything
3524	 with it.  */
3525      if (code != CONST_INT && code != REG && code != CLOBBER)
3526	x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest);
3527
3528      if (GET_CODE (x) == code)
3529	break;
3530
3531      code = GET_CODE (x);
3532
3533      /* We no longer know the original mode of operand 0 since we
3534	 have changed the form of X)  */
3535      op0_mode = VOIDmode;
3536    }
3537
3538  return x;
3539}
3540
3541/* Simplify X, a piece of RTL.  We just operate on the expression at the
3542   outer level; call `subst' to simplify recursively.  Return the new
3543   expression.
3544
3545   OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3546   will be the iteration even if an expression with a code different from
3547   X is returned; IN_DEST is nonzero if we are inside a SET_DEST.  */
3548
3549static rtx
3550combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int last,
3551		      int in_dest)
3552{
3553  enum rtx_code code = GET_CODE (x);
3554  enum machine_mode mode = GET_MODE (x);
3555  rtx temp;
3556  rtx reversed;
3557  int i;
3558
3559  /* If this is a commutative operation, put a constant last and a complex
3560     expression first.  We don't need to do this for comparisons here.  */
3561  if (GET_RTX_CLASS (code) == 'c'
3562      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
3563    {
3564      temp = XEXP (x, 0);
3565      SUBST (XEXP (x, 0), XEXP (x, 1));
3566      SUBST (XEXP (x, 1), temp);
3567    }
3568
3569  /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3570     sign extension of a PLUS with a constant, reverse the order of the sign
3571     extension and the addition. Note that this not the same as the original
3572     code, but overflow is undefined for signed values.  Also note that the
3573     PLUS will have been partially moved "inside" the sign-extension, so that
3574     the first operand of X will really look like:
3575         (ashiftrt (plus (ashift A C4) C5) C4).
3576     We convert this to
3577         (plus (ashiftrt (ashift A C4) C2) C4)
3578     and replace the first operand of X with that expression.  Later parts
3579     of this function may simplify the expression further.
3580
3581     For example, if we start with (mult (sign_extend (plus A C1)) C2),
3582     we swap the SIGN_EXTEND and PLUS.  Later code will apply the
3583     distributive law to produce (plus (mult (sign_extend X) C1) C3).
3584
3585     We do this to simplify address expressions.  */
3586
3587  if ((code == PLUS || code == MINUS || code == MULT)
3588      && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3589      && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3590      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3591      && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3592      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3593      && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3594      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3595      && (temp = simplify_binary_operation (ASHIFTRT, mode,
3596					    XEXP (XEXP (XEXP (x, 0), 0), 1),
3597					    XEXP (XEXP (x, 0), 1))) != 0)
3598    {
3599      rtx new
3600	= simplify_shift_const (NULL_RTX, ASHIFT, mode,
3601				XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3602				INTVAL (XEXP (XEXP (x, 0), 1)));
3603
3604      new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3605				  INTVAL (XEXP (XEXP (x, 0), 1)));
3606
3607      SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3608    }
3609
3610  /* If this is a simple operation applied to an IF_THEN_ELSE, try
3611     applying it to the arms of the IF_THEN_ELSE.  This often simplifies
3612     things.  Check for cases where both arms are testing the same
3613     condition.
3614
3615     Don't do anything if all operands are very simple.  */
3616
3617  if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3618	|| GET_RTX_CLASS (code) == '<')
3619       && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3620	    && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3621		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3622		      == 'o')))
3623	   || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3624	       && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3625		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3626			 == 'o')))))
3627      || (GET_RTX_CLASS (code) == '1'
3628	  && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3629	       && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3630		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3631			 == 'o'))))))
3632    {
3633      rtx cond, true_rtx, false_rtx;
3634
3635      cond = if_then_else_cond (x, &true_rtx, &false_rtx);
3636      if (cond != 0
3637	  /* If everything is a comparison, what we have is highly unlikely
3638	     to be simpler, so don't use it.  */
3639	  && ! (GET_RTX_CLASS (code) == '<'
3640		&& (GET_RTX_CLASS (GET_CODE (true_rtx)) == '<'
3641		    || GET_RTX_CLASS (GET_CODE (false_rtx)) == '<')))
3642	{
3643	  rtx cop1 = const0_rtx;
3644	  enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3645
3646	  if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3647	    return x;
3648
3649	  /* Simplify the alternative arms; this may collapse the true and
3650	     false arms to store-flag values.  Be careful to use copy_rtx
3651	     here since true_rtx or false_rtx might share RTL with x as a
3652	     result of the if_then_else_cond call above.  */
3653	  true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx, 0, 0);
3654	  false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx, 0, 0);
3655
3656	  /* If true_rtx and false_rtx are not general_operands, an if_then_else
3657	     is unlikely to be simpler.  */
3658	  if (general_operand (true_rtx, VOIDmode)
3659	      && general_operand (false_rtx, VOIDmode))
3660	    {
3661	      enum rtx_code reversed;
3662
3663	      /* Restarting if we generate a store-flag expression will cause
3664		 us to loop.  Just drop through in this case.  */
3665
3666	      /* If the result values are STORE_FLAG_VALUE and zero, we can
3667		 just make the comparison operation.  */
3668	      if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
3669		x = gen_binary (cond_code, mode, cond, cop1);
3670	      else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
3671		       && ((reversed = reversed_comparison_code_parts
3672					(cond_code, cond, cop1, NULL))
3673		           != UNKNOWN))
3674		x = gen_binary (reversed, mode, cond, cop1);
3675
3676	      /* Likewise, we can make the negate of a comparison operation
3677		 if the result values are - STORE_FLAG_VALUE and zero.  */
3678	      else if (GET_CODE (true_rtx) == CONST_INT
3679		       && INTVAL (true_rtx) == - STORE_FLAG_VALUE
3680		       && false_rtx == const0_rtx)
3681		x = simplify_gen_unary (NEG, mode,
3682					gen_binary (cond_code, mode, cond,
3683						    cop1),
3684					mode);
3685	      else if (GET_CODE (false_rtx) == CONST_INT
3686		       && INTVAL (false_rtx) == - STORE_FLAG_VALUE
3687		       && true_rtx == const0_rtx
3688		       && ((reversed = reversed_comparison_code_parts
3689					(cond_code, cond, cop1, NULL))
3690		           != UNKNOWN))
3691		x = simplify_gen_unary (NEG, mode,
3692					gen_binary (reversed, mode,
3693						    cond, cop1),
3694					mode);
3695	      else
3696		return gen_rtx_IF_THEN_ELSE (mode,
3697					     gen_binary (cond_code, VOIDmode,
3698							 cond, cop1),
3699					     true_rtx, false_rtx);
3700
3701	      code = GET_CODE (x);
3702	      op0_mode = VOIDmode;
3703	    }
3704	}
3705    }
3706
3707  /* Try to fold this expression in case we have constants that weren't
3708     present before.  */
3709  temp = 0;
3710  switch (GET_RTX_CLASS (code))
3711    {
3712    case '1':
3713      if (op0_mode == VOIDmode)
3714	op0_mode = GET_MODE (XEXP (x, 0));
3715      temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3716      break;
3717    case '<':
3718      {
3719	enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
3720	if (cmp_mode == VOIDmode)
3721	  {
3722	    cmp_mode = GET_MODE (XEXP (x, 1));
3723	    if (cmp_mode == VOIDmode)
3724	      cmp_mode = op0_mode;
3725	  }
3726	temp = simplify_relational_operation (code, cmp_mode,
3727					      XEXP (x, 0), XEXP (x, 1));
3728      }
3729#ifdef FLOAT_STORE_FLAG_VALUE
3730      if (temp != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3731	{
3732	  if (temp == const0_rtx)
3733	    temp = CONST0_RTX (mode);
3734	  else
3735	    temp = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE (mode),
3736						 mode);
3737	}
3738#endif
3739      break;
3740    case 'c':
3741    case '2':
3742      temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3743      break;
3744    case 'b':
3745    case '3':
3746      temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3747					 XEXP (x, 1), XEXP (x, 2));
3748      break;
3749    }
3750
3751  if (temp)
3752    {
3753      x = temp;
3754      code = GET_CODE (temp);
3755      op0_mode = VOIDmode;
3756      mode = GET_MODE (temp);
3757    }
3758
3759  /* First see if we can apply the inverse distributive law.  */
3760  if (code == PLUS || code == MINUS
3761      || code == AND || code == IOR || code == XOR)
3762    {
3763      x = apply_distributive_law (x);
3764      code = GET_CODE (x);
3765      op0_mode = VOIDmode;
3766    }
3767
3768  /* If CODE is an associative operation not otherwise handled, see if we
3769     can associate some operands.  This can win if they are constants or
3770     if they are logically related (i.e. (a & b) & a).  */
3771  if ((code == PLUS || code == MINUS || code == MULT || code == DIV
3772       || code == AND || code == IOR || code == XOR
3773       || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3774      && ((INTEGRAL_MODE_P (mode) && code != DIV)
3775	  || (flag_unsafe_math_optimizations && FLOAT_MODE_P (mode))))
3776    {
3777      if (GET_CODE (XEXP (x, 0)) == code)
3778	{
3779	  rtx other = XEXP (XEXP (x, 0), 0);
3780	  rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3781	  rtx inner_op1 = XEXP (x, 1);
3782	  rtx inner;
3783
3784	  /* Make sure we pass the constant operand if any as the second
3785	     one if this is a commutative operation.  */
3786	  if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3787	    {
3788	      rtx tem = inner_op0;
3789	      inner_op0 = inner_op1;
3790	      inner_op1 = tem;
3791	    }
3792	  inner = simplify_binary_operation (code == MINUS ? PLUS
3793					     : code == DIV ? MULT
3794					     : code,
3795					     mode, inner_op0, inner_op1);
3796
3797	  /* For commutative operations, try the other pair if that one
3798	     didn't simplify.  */
3799	  if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3800	    {
3801	      other = XEXP (XEXP (x, 0), 1);
3802	      inner = simplify_binary_operation (code, mode,
3803						 XEXP (XEXP (x, 0), 0),
3804						 XEXP (x, 1));
3805	    }
3806
3807	  if (inner)
3808	    return gen_binary (code, mode, other, inner);
3809	}
3810    }
3811
3812  /* A little bit of algebraic simplification here.  */
3813  switch (code)
3814    {
3815    case MEM:
3816      /* Ensure that our address has any ASHIFTs converted to MULT in case
3817	 address-recognizing predicates are called later.  */
3818      temp = make_compound_operation (XEXP (x, 0), MEM);
3819      SUBST (XEXP (x, 0), temp);
3820      break;
3821
3822    case SUBREG:
3823      if (op0_mode == VOIDmode)
3824	op0_mode = GET_MODE (SUBREG_REG (x));
3825
3826      /* simplify_subreg can't use gen_lowpart_for_combine.  */
3827      if (CONSTANT_P (SUBREG_REG (x))
3828	  && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
3829	     /* Don't call gen_lowpart_for_combine if the inner mode
3830		is VOIDmode and we cannot simplify it, as SUBREG without
3831		inner mode is invalid.  */
3832	  && (GET_MODE (SUBREG_REG (x)) != VOIDmode
3833	      || gen_lowpart_common (mode, SUBREG_REG (x))))
3834	return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3835
3836      if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
3837        break;
3838      {
3839	rtx temp;
3840	temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
3841				SUBREG_BYTE (x));
3842	if (temp)
3843	  return temp;
3844      }
3845
3846      /* Don't change the mode of the MEM if that would change the meaning
3847	 of the address.  */
3848      if (GET_CODE (SUBREG_REG (x)) == MEM
3849	  && (MEM_VOLATILE_P (SUBREG_REG (x))
3850	      || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
3851	return gen_rtx_CLOBBER (mode, const0_rtx);
3852
3853      /* Note that we cannot do any narrowing for non-constants since
3854	 we might have been counting on using the fact that some bits were
3855	 zero.  We now do this in the SET.  */
3856
3857      break;
3858
3859    case NOT:
3860      if (GET_CODE (XEXP (x, 0)) == SUBREG
3861	  && subreg_lowpart_p (XEXP (x, 0))
3862	  && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3863	      < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3864	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3865	  && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3866	{
3867	  enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3868
3869	  x = gen_rtx_ROTATE (inner_mode,
3870			      simplify_gen_unary (NOT, inner_mode, const1_rtx,
3871						  inner_mode),
3872			      XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3873	  return gen_lowpart_for_combine (mode, x);
3874	}
3875
3876      /* Apply De Morgan's laws to reduce number of patterns for machines
3877	 with negating logical insns (and-not, nand, etc.).  If result has
3878	 only one NOT, put it first, since that is how the patterns are
3879	 coded.  */
3880
3881      if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3882	{
3883	  rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3884	  enum machine_mode op_mode;
3885
3886	  op_mode = GET_MODE (in1);
3887	  in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
3888
3889	  op_mode = GET_MODE (in2);
3890	  if (op_mode == VOIDmode)
3891	    op_mode = mode;
3892	  in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
3893
3894	  if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
3895	    {
3896	      rtx tem = in2;
3897	      in2 = in1; in1 = tem;
3898	    }
3899
3900	  return gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3901				 mode, in1, in2);
3902	}
3903      break;
3904
3905    case NEG:
3906      /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1.  */
3907      if (GET_CODE (XEXP (x, 0)) == XOR
3908	  && XEXP (XEXP (x, 0), 1) == const1_rtx
3909	  && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3910	return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3911
3912      temp = expand_compound_operation (XEXP (x, 0));
3913
3914      /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3915	 replaced by (lshiftrt X C).  This will convert
3916	 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y).  */
3917
3918      if (GET_CODE (temp) == ASHIFTRT
3919	  && GET_CODE (XEXP (temp, 1)) == CONST_INT
3920	  && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3921	return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3922				     INTVAL (XEXP (temp, 1)));
3923
3924      /* If X has only a single bit that might be nonzero, say, bit I, convert
3925	 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3926	 MODE minus 1.  This will convert (neg (zero_extract X 1 Y)) to
3927	 (sign_extract X 1 Y).  But only do this if TEMP isn't a register
3928	 or a SUBREG of one since we'd be making the expression more
3929	 complex if it was just a register.  */
3930
3931      if (GET_CODE (temp) != REG
3932	  && ! (GET_CODE (temp) == SUBREG
3933		&& GET_CODE (SUBREG_REG (temp)) == REG)
3934	  && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
3935	{
3936	  rtx temp1 = simplify_shift_const
3937	    (NULL_RTX, ASHIFTRT, mode,
3938	     simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3939				   GET_MODE_BITSIZE (mode) - 1 - i),
3940	     GET_MODE_BITSIZE (mode) - 1 - i);
3941
3942	  /* If all we did was surround TEMP with the two shifts, we
3943	     haven't improved anything, so don't use it.  Otherwise,
3944	     we are better off with TEMP1.  */
3945	  if (GET_CODE (temp1) != ASHIFTRT
3946	      || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3947	      || XEXP (XEXP (temp1, 0), 0) != temp)
3948	    return temp1;
3949	}
3950      break;
3951
3952    case TRUNCATE:
3953      /* We can't handle truncation to a partial integer mode here
3954	 because we don't know the real bitsize of the partial
3955	 integer mode.  */
3956      if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3957	break;
3958
3959      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3960	  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3961				    GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
3962	SUBST (XEXP (x, 0),
3963	       force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3964			      GET_MODE_MASK (mode), NULL_RTX, 0));
3965
3966      /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI.  */
3967      if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3968	   || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3969	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3970	return XEXP (XEXP (x, 0), 0);
3971
3972      /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3973	 (OP:SI foo:SI) if OP is NEG or ABS.  */
3974      if ((GET_CODE (XEXP (x, 0)) == ABS
3975	   || GET_CODE (XEXP (x, 0)) == NEG)
3976	  && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3977	      || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
3978	  && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3979	return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
3980				   XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
3981
3982      /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
3983	 (truncate:SI x).  */
3984      if (GET_CODE (XEXP (x, 0)) == SUBREG
3985	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
3986	  && subreg_lowpart_p (XEXP (x, 0)))
3987	return SUBREG_REG (XEXP (x, 0));
3988
3989      /* If we know that the value is already truncated, we can
3990         replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
3991         is nonzero for the corresponding modes.  But don't do this
3992         for an (LSHIFTRT (MULT ...)) since this will cause problems
3993         with the umulXi3_highpart patterns.  */
3994      if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3995				 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
3996	  && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3997	     >= (unsigned int) (GET_MODE_BITSIZE (mode) + 1)
3998	  && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
3999		&& GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
4000	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4001
4002      /* A truncate of a comparison can be replaced with a subreg if
4003         STORE_FLAG_VALUE permits.  This is like the previous test,
4004         but it works even if the comparison is done in a mode larger
4005         than HOST_BITS_PER_WIDE_INT.  */
4006      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4007	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4008	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
4009	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4010
4011      /* Similarly, a truncate of a register whose value is a
4012         comparison can be replaced with a subreg if STORE_FLAG_VALUE
4013         permits.  */
4014      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4015	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
4016	  && (temp = get_last_value (XEXP (x, 0)))
4017	  && GET_RTX_CLASS (GET_CODE (temp)) == '<')
4018	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4019
4020      break;
4021
4022    case FLOAT_TRUNCATE:
4023      /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF.  */
4024      if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4025	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4026	return XEXP (XEXP (x, 0), 0);
4027
4028      /* (float_truncate:SF (float_truncate:DF foo:XF))
4029         = (float_truncate:SF foo:XF).
4030	 This may eliminate double rounding, so it is unsafe.
4031
4032         (float_truncate:SF (float_extend:XF foo:DF))
4033         = (float_truncate:SF foo:DF).
4034
4035         (float_truncate:DF (float_extend:XF foo:SF))
4036         = (float_extend:SF foo:DF).  */
4037      if ((GET_CODE (XEXP (x, 0)) == FLOAT_TRUNCATE
4038	   && flag_unsafe_math_optimizations)
4039	  || GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND)
4040	return simplify_gen_unary (GET_MODE_SIZE (GET_MODE (XEXP (XEXP (x, 0),
4041							    0)))
4042				   > GET_MODE_SIZE (mode)
4043				   ? FLOAT_TRUNCATE : FLOAT_EXTEND,
4044				   mode,
4045				   XEXP (XEXP (x, 0), 0), mode);
4046
4047      /*  (float_truncate (float x)) is (float x)  */
4048      if (GET_CODE (XEXP (x, 0)) == FLOAT
4049	  && (flag_unsafe_math_optimizations
4050	      || ((unsigned)significand_size (GET_MODE (XEXP (x, 0)))
4051		  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
4052		      - num_sign_bit_copies (XEXP (XEXP (x, 0), 0),
4053					     GET_MODE (XEXP (XEXP (x, 0), 0)))))))
4054	return simplify_gen_unary (FLOAT, mode,
4055				   XEXP (XEXP (x, 0), 0),
4056				   GET_MODE (XEXP (XEXP (x, 0), 0)));
4057
4058      /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4059	 (OP:SF foo:SF) if OP is NEG or ABS.  */
4060      if ((GET_CODE (XEXP (x, 0)) == ABS
4061	   || GET_CODE (XEXP (x, 0)) == NEG)
4062	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4063	  && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4064	return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4065				   XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4066
4067      /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4068	 is (float_truncate:SF x).  */
4069      if (GET_CODE (XEXP (x, 0)) == SUBREG
4070	  && subreg_lowpart_p (XEXP (x, 0))
4071	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4072	return SUBREG_REG (XEXP (x, 0));
4073      break;
4074    case FLOAT_EXTEND:
4075      /*  (float_extend (float_extend x)) is (float_extend x)
4076
4077	  (float_extend (float x)) is (float x) assuming that double
4078	  rounding can't happen.
4079          */
4080      if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4081	  || (GET_CODE (XEXP (x, 0)) == FLOAT
4082	      && ((unsigned)significand_size (GET_MODE (XEXP (x, 0)))
4083		  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
4084		      - num_sign_bit_copies (XEXP (XEXP (x, 0), 0),
4085					     GET_MODE (XEXP (XEXP (x, 0), 0)))))))
4086	return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4087				   XEXP (XEXP (x, 0), 0),
4088				   GET_MODE (XEXP (XEXP (x, 0), 0)));
4089
4090      break;
4091#ifdef HAVE_cc0
4092    case COMPARE:
4093      /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4094	 using cc0, in which case we want to leave it as a COMPARE
4095	 so we can distinguish it from a register-register-copy.  */
4096      if (XEXP (x, 1) == const0_rtx)
4097	return XEXP (x, 0);
4098
4099      /* x - 0 is the same as x unless x's mode has signed zeros and
4100	 allows rounding towards -infinity.  Under those conditions,
4101	 0 - 0 is -0.  */
4102      if (!(HONOR_SIGNED_ZEROS (GET_MODE (XEXP (x, 0)))
4103	    && HONOR_SIGN_DEPENDENT_ROUNDING (GET_MODE (XEXP (x, 0))))
4104	  && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4105	return XEXP (x, 0);
4106      break;
4107#endif
4108
4109    case CONST:
4110      /* (const (const X)) can become (const X).  Do it this way rather than
4111	 returning the inner CONST since CONST can be shared with a
4112	 REG_EQUAL note.  */
4113      if (GET_CODE (XEXP (x, 0)) == CONST)
4114	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4115      break;
4116
4117#ifdef HAVE_lo_sum
4118    case LO_SUM:
4119      /* Convert (lo_sum (high FOO) FOO) to FOO.  This is necessary so we
4120	 can add in an offset.  find_split_point will split this address up
4121	 again if it doesn't match.  */
4122      if (GET_CODE (XEXP (x, 0)) == HIGH
4123	  && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4124	return XEXP (x, 1);
4125      break;
4126#endif
4127
4128    case PLUS:
4129      /* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)).
4130       */
4131      if (GET_CODE (XEXP (x, 0)) == MULT
4132	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == NEG)
4133	{
4134	  rtx in1, in2;
4135
4136	  in1 = XEXP (XEXP (XEXP (x, 0), 0), 0);
4137	  in2 = XEXP (XEXP (x, 0), 1);
4138	  return gen_binary (MINUS, mode, XEXP (x, 1),
4139			     gen_binary (MULT, mode, in1, in2));
4140	}
4141
4142      /* If we have (plus (plus (A const) B)), associate it so that CONST is
4143	 outermost.  That's because that's the way indexed addresses are
4144	 supposed to appear.  This code used to check many more cases, but
4145	 they are now checked elsewhere.  */
4146      if (GET_CODE (XEXP (x, 0)) == PLUS
4147	  && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4148	return gen_binary (PLUS, mode,
4149			   gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4150				       XEXP (x, 1)),
4151			   XEXP (XEXP (x, 0), 1));
4152
4153      /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4154	 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4155	 bit-field and can be replaced by either a sign_extend or a
4156	 sign_extract.  The `and' may be a zero_extend and the two
4157	 <c>, -<c> constants may be reversed.  */
4158      if (GET_CODE (XEXP (x, 0)) == XOR
4159	  && GET_CODE (XEXP (x, 1)) == CONST_INT
4160	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4161	  && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
4162	  && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4163	      || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4164	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4165	  && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4166	       && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4167	       && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4168		   == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
4169	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4170		  && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
4171		      == (unsigned int) i + 1))))
4172	return simplify_shift_const
4173	  (NULL_RTX, ASHIFTRT, mode,
4174	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
4175				 XEXP (XEXP (XEXP (x, 0), 0), 0),
4176				 GET_MODE_BITSIZE (mode) - (i + 1)),
4177	   GET_MODE_BITSIZE (mode) - (i + 1));
4178
4179      /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4180	 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4181	 is 1.  This produces better code than the alternative immediately
4182	 below.  */
4183      if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4184	  && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
4185	      || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))
4186	  && (reversed = reversed_comparison (XEXP (x, 0), mode,
4187					      XEXP (XEXP (x, 0), 0),
4188					      XEXP (XEXP (x, 0), 1))))
4189	return
4190	  simplify_gen_unary (NEG, mode, reversed, mode);
4191
4192      /* If only the low-order bit of X is possibly nonzero, (plus x -1)
4193	 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4194	 the bitsize of the mode - 1.  This allows simplification of
4195	 "a = (b & 8) == 0;"  */
4196      if (XEXP (x, 1) == constm1_rtx
4197	  && GET_CODE (XEXP (x, 0)) != REG
4198	  && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4199		&& GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
4200	  && nonzero_bits (XEXP (x, 0), mode) == 1)
4201	return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4202	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
4203				 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
4204				 GET_MODE_BITSIZE (mode) - 1),
4205	   GET_MODE_BITSIZE (mode) - 1);
4206
4207      /* If we are adding two things that have no bits in common, convert
4208	 the addition into an IOR.  This will often be further simplified,
4209	 for example in cases like ((a & 1) + (a & 2)), which can
4210	 become a & 3.  */
4211
4212      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4213	  && (nonzero_bits (XEXP (x, 0), mode)
4214	      & nonzero_bits (XEXP (x, 1), mode)) == 0)
4215	{
4216	  /* Try to simplify the expression further.  */
4217	  rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4218	  temp = combine_simplify_rtx (tor, mode, last, in_dest);
4219
4220	  /* If we could, great.  If not, do not go ahead with the IOR
4221	     replacement, since PLUS appears in many special purpose
4222	     address arithmetic instructions.  */
4223	  if (GET_CODE (temp) != CLOBBER && temp != tor)
4224	    return temp;
4225	}
4226      break;
4227
4228    case MINUS:
4229      /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4230	 by reversing the comparison code if valid.  */
4231      if (STORE_FLAG_VALUE == 1
4232	  && XEXP (x, 0) == const1_rtx
4233	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
4234	  && (reversed = reversed_comparison (XEXP (x, 1), mode,
4235					      XEXP (XEXP (x, 1), 0),
4236					      XEXP (XEXP (x, 1), 1))))
4237	return reversed;
4238
4239      /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4240	 (and <foo> (const_int pow2-1))  */
4241      if (GET_CODE (XEXP (x, 1)) == AND
4242	  && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4243	  && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4244	  && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4245	return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4246				       -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4247
4248      /* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A).
4249       */
4250      if (GET_CODE (XEXP (x, 1)) == MULT
4251	  && GET_CODE (XEXP (XEXP (x, 1), 0)) == NEG)
4252	{
4253	  rtx in1, in2;
4254
4255	  in1 = XEXP (XEXP (XEXP (x, 1), 0), 0);
4256	  in2 = XEXP (XEXP (x, 1), 1);
4257	  return gen_binary (PLUS, mode, gen_binary (MULT, mode, in1, in2),
4258			     XEXP (x, 0));
4259	}
4260
4261      /* Canonicalize (minus (neg A) (mult B C)) to
4262	 (minus (mult (neg B) C) A).  */
4263      if (GET_CODE (XEXP (x, 1)) == MULT
4264	  && GET_CODE (XEXP (x, 0)) == NEG)
4265	{
4266	  rtx in1, in2;
4267
4268	  in1 = simplify_gen_unary (NEG, mode, XEXP (XEXP (x, 1), 0), mode);
4269	  in2 = XEXP (XEXP (x, 1), 1);
4270	  return gen_binary (MINUS, mode, gen_binary (MULT, mode, in1, in2),
4271			     XEXP (XEXP (x, 0), 0));
4272	}
4273
4274      /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4275	 integers.  */
4276      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
4277	return gen_binary (MINUS, mode,
4278			   gen_binary (MINUS, mode, XEXP (x, 0),
4279				       XEXP (XEXP (x, 1), 0)),
4280			   XEXP (XEXP (x, 1), 1));
4281      break;
4282
4283    case MULT:
4284      /* If we have (mult (plus A B) C), apply the distributive law and then
4285	 the inverse distributive law to see if things simplify.  This
4286	 occurs mostly in addresses, often when unrolling loops.  */
4287
4288      if (GET_CODE (XEXP (x, 0)) == PLUS)
4289	{
4290	  x = apply_distributive_law
4291	    (gen_binary (PLUS, mode,
4292			 gen_binary (MULT, mode,
4293				     XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4294			 gen_binary (MULT, mode,
4295				     XEXP (XEXP (x, 0), 1),
4296				     copy_rtx (XEXP (x, 1)))));
4297
4298	  if (GET_CODE (x) != MULT)
4299	    return x;
4300	}
4301      /* Try simplify a*(b/c) as (a*b)/c.  */
4302      if (FLOAT_MODE_P (mode) && flag_unsafe_math_optimizations
4303	  && GET_CODE (XEXP (x, 0)) == DIV)
4304	{
4305	  rtx tem = simplify_binary_operation (MULT, mode,
4306					       XEXP (XEXP (x, 0), 0),
4307					       XEXP (x, 1));
4308	  if (tem)
4309	    return gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
4310	}
4311      break;
4312
4313    case UDIV:
4314      /* If this is a divide by a power of two, treat it as a shift if
4315	 its first operand is a shift.  */
4316      if (GET_CODE (XEXP (x, 1)) == CONST_INT
4317	  && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4318	  && (GET_CODE (XEXP (x, 0)) == ASHIFT
4319	      || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4320	      || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4321	      || GET_CODE (XEXP (x, 0)) == ROTATE
4322	      || GET_CODE (XEXP (x, 0)) == ROTATERT))
4323	return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4324      break;
4325
4326    case EQ:  case NE:
4327    case GT:  case GTU:  case GE:  case GEU:
4328    case LT:  case LTU:  case LE:  case LEU:
4329    case UNEQ:  case LTGT:
4330    case UNGT:  case UNGE:
4331    case UNLT:  case UNLE:
4332    case UNORDERED: case ORDERED:
4333      /* If the first operand is a condition code, we can't do anything
4334	 with it.  */
4335      if (GET_CODE (XEXP (x, 0)) == COMPARE
4336	  || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4337	      && ! CC0_P (XEXP (x, 0))))
4338	{
4339	  rtx op0 = XEXP (x, 0);
4340	  rtx op1 = XEXP (x, 1);
4341	  enum rtx_code new_code;
4342
4343	  if (GET_CODE (op0) == COMPARE)
4344	    op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4345
4346	  /* Simplify our comparison, if possible.  */
4347	  new_code = simplify_comparison (code, &op0, &op1);
4348
4349	  /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4350	     if only the low-order bit is possibly nonzero in X (such as when
4351	     X is a ZERO_EXTRACT of one bit).  Similarly, we can convert EQ to
4352	     (xor X 1) or (minus 1 X); we use the former.  Finally, if X is
4353	     known to be either 0 or -1, NE becomes a NEG and EQ becomes
4354	     (plus X 1).
4355
4356	     Remove any ZERO_EXTRACT we made when thinking this was a
4357	     comparison.  It may now be simpler to use, e.g., an AND.  If a
4358	     ZERO_EXTRACT is indeed appropriate, it will be placed back by
4359	     the call to make_compound_operation in the SET case.  */
4360
4361	  if (STORE_FLAG_VALUE == 1
4362	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4363	      && op1 == const0_rtx
4364	      && mode == GET_MODE (op0)
4365	      && nonzero_bits (op0, mode) == 1)
4366	    return gen_lowpart_for_combine (mode,
4367					    expand_compound_operation (op0));
4368
4369	  else if (STORE_FLAG_VALUE == 1
4370		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4371		   && op1 == const0_rtx
4372		   && mode == GET_MODE (op0)
4373		   && (num_sign_bit_copies (op0, mode)
4374		       == GET_MODE_BITSIZE (mode)))
4375	    {
4376	      op0 = expand_compound_operation (op0);
4377	      return simplify_gen_unary (NEG, mode,
4378					 gen_lowpart_for_combine (mode, op0),
4379					 mode);
4380	    }
4381
4382	  else if (STORE_FLAG_VALUE == 1
4383		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4384		   && op1 == const0_rtx
4385		   && mode == GET_MODE (op0)
4386		   && nonzero_bits (op0, mode) == 1)
4387	    {
4388	      op0 = expand_compound_operation (op0);
4389	      return gen_binary (XOR, mode,
4390				 gen_lowpart_for_combine (mode, op0),
4391				 const1_rtx);
4392	    }
4393
4394	  else if (STORE_FLAG_VALUE == 1
4395		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4396		   && op1 == const0_rtx
4397		   && mode == GET_MODE (op0)
4398		   && (num_sign_bit_copies (op0, mode)
4399		       == GET_MODE_BITSIZE (mode)))
4400	    {
4401	      op0 = expand_compound_operation (op0);
4402	      return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
4403	    }
4404
4405	  /* If STORE_FLAG_VALUE is -1, we have cases similar to
4406	     those above.  */
4407	  if (STORE_FLAG_VALUE == -1
4408	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4409	      && op1 == const0_rtx
4410	      && (num_sign_bit_copies (op0, mode)
4411		  == GET_MODE_BITSIZE (mode)))
4412	    return gen_lowpart_for_combine (mode,
4413					    expand_compound_operation (op0));
4414
4415	  else if (STORE_FLAG_VALUE == -1
4416		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4417		   && op1 == const0_rtx
4418		   && mode == GET_MODE (op0)
4419		   && nonzero_bits (op0, mode) == 1)
4420	    {
4421	      op0 = expand_compound_operation (op0);
4422	      return simplify_gen_unary (NEG, mode,
4423					 gen_lowpart_for_combine (mode, op0),
4424					 mode);
4425	    }
4426
4427	  else if (STORE_FLAG_VALUE == -1
4428		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4429		   && op1 == const0_rtx
4430		   && mode == GET_MODE (op0)
4431		   && (num_sign_bit_copies (op0, mode)
4432		       == GET_MODE_BITSIZE (mode)))
4433	    {
4434	      op0 = expand_compound_operation (op0);
4435	      return simplify_gen_unary (NOT, mode,
4436					 gen_lowpart_for_combine (mode, op0),
4437					 mode);
4438	    }
4439
4440	  /* If X is 0/1, (eq X 0) is X-1.  */
4441	  else if (STORE_FLAG_VALUE == -1
4442		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4443		   && op1 == const0_rtx
4444		   && mode == GET_MODE (op0)
4445		   && nonzero_bits (op0, mode) == 1)
4446	    {
4447	      op0 = expand_compound_operation (op0);
4448	      return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
4449	    }
4450
4451	  /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4452	     one bit that might be nonzero, we can convert (ne x 0) to
4453	     (ashift x c) where C puts the bit in the sign bit.  Remove any
4454	     AND with STORE_FLAG_VALUE when we are done, since we are only
4455	     going to test the sign bit.  */
4456	  if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4457	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4458	      && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4459		  == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4460	      && op1 == const0_rtx
4461	      && mode == GET_MODE (op0)
4462	      && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4463	    {
4464	      x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4465					expand_compound_operation (op0),
4466					GET_MODE_BITSIZE (mode) - 1 - i);
4467	      if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4468		return XEXP (x, 0);
4469	      else
4470		return x;
4471	    }
4472
4473	  /* If the code changed, return a whole new comparison.  */
4474	  if (new_code != code)
4475	    return gen_rtx_fmt_ee (new_code, mode, op0, op1);
4476
4477	  /* Otherwise, keep this operation, but maybe change its operands.
4478	     This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR).  */
4479	  SUBST (XEXP (x, 0), op0);
4480	  SUBST (XEXP (x, 1), op1);
4481	}
4482      break;
4483
4484    case IF_THEN_ELSE:
4485      return simplify_if_then_else (x);
4486
4487    case ZERO_EXTRACT:
4488    case SIGN_EXTRACT:
4489    case ZERO_EXTEND:
4490    case SIGN_EXTEND:
4491      /* If we are processing SET_DEST, we are done.  */
4492      if (in_dest)
4493	return x;
4494
4495      return expand_compound_operation (x);
4496
4497    case SET:
4498      return simplify_set (x);
4499
4500    case AND:
4501    case IOR:
4502    case XOR:
4503      return simplify_logical (x, last);
4504
4505    case ABS:
4506      /* (abs (neg <foo>)) -> (abs <foo>) */
4507      if (GET_CODE (XEXP (x, 0)) == NEG)
4508	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4509
4510      /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4511         do nothing.  */
4512      if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4513	break;
4514
4515      /* If operand is something known to be positive, ignore the ABS.  */
4516      if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4517	  || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4518	       <= HOST_BITS_PER_WIDE_INT)
4519	      && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4520		   & ((HOST_WIDE_INT) 1
4521		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4522		  == 0)))
4523	return XEXP (x, 0);
4524
4525      /* If operand is known to be only -1 or 0, convert ABS to NEG.  */
4526      if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4527	return gen_rtx_NEG (mode, XEXP (x, 0));
4528
4529      break;
4530
4531    case FFS:
4532      /* (ffs (*_extend <X>)) = (ffs <X>) */
4533      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4534	  || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4535	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4536      break;
4537
4538    case POPCOUNT:
4539    case PARITY:
4540      /* (pop* (zero_extend <X>)) = (pop* <X>) */
4541      if (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4542	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4543      break;
4544
4545    case FLOAT:
4546      /* (float (sign_extend <X>)) = (float <X>).  */
4547      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4548	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4549      break;
4550
4551    case ASHIFT:
4552    case LSHIFTRT:
4553    case ASHIFTRT:
4554    case ROTATE:
4555    case ROTATERT:
4556      /* If this is a shift by a constant amount, simplify it.  */
4557      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4558	return simplify_shift_const (x, code, mode, XEXP (x, 0),
4559				     INTVAL (XEXP (x, 1)));
4560
4561      else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4562	SUBST (XEXP (x, 1),
4563	       force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
4564			      ((HOST_WIDE_INT) 1
4565			       << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4566			      - 1,
4567			      NULL_RTX, 0));
4568      break;
4569
4570    case VEC_SELECT:
4571      {
4572	rtx op0 = XEXP (x, 0);
4573	rtx op1 = XEXP (x, 1);
4574	int len;
4575
4576	if (GET_CODE (op1) != PARALLEL)
4577	  abort ();
4578	len = XVECLEN (op1, 0);
4579	if (len == 1
4580	    && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
4581	    && GET_CODE (op0) == VEC_CONCAT)
4582	  {
4583	    int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
4584
4585	    /* Try to find the element in the VEC_CONCAT.  */
4586	    for (;;)
4587	      {
4588		if (GET_MODE (op0) == GET_MODE (x))
4589		  return op0;
4590		if (GET_CODE (op0) == VEC_CONCAT)
4591		  {
4592		    HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
4593		    if (op0_size < offset)
4594		      op0 = XEXP (op0, 0);
4595		    else
4596		      {
4597			offset -= op0_size;
4598			op0 = XEXP (op0, 1);
4599		      }
4600		  }
4601		else
4602		  break;
4603	      }
4604	  }
4605      }
4606
4607      break;
4608
4609    default:
4610      break;
4611    }
4612
4613  return x;
4614}
4615
4616/* Simplify X, an IF_THEN_ELSE expression.  Return the new expression.  */
4617
4618static rtx
4619simplify_if_then_else (rtx x)
4620{
4621  enum machine_mode mode = GET_MODE (x);
4622  rtx cond = XEXP (x, 0);
4623  rtx true_rtx = XEXP (x, 1);
4624  rtx false_rtx = XEXP (x, 2);
4625  enum rtx_code true_code = GET_CODE (cond);
4626  int comparison_p = GET_RTX_CLASS (true_code) == '<';
4627  rtx temp;
4628  int i;
4629  enum rtx_code false_code;
4630  rtx reversed;
4631
4632  /* Simplify storing of the truth value.  */
4633  if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
4634    return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4635
4636  /* Also when the truth value has to be reversed.  */
4637  if (comparison_p
4638      && true_rtx == const0_rtx && false_rtx == const_true_rtx
4639      && (reversed = reversed_comparison (cond, mode, XEXP (cond, 0),
4640					  XEXP (cond, 1))))
4641    return reversed;
4642
4643  /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4644     in it is being compared against certain values.  Get the true and false
4645     comparisons and see if that says anything about the value of each arm.  */
4646
4647  if (comparison_p
4648      && ((false_code = combine_reversed_comparison_code (cond))
4649	  != UNKNOWN)
4650      && GET_CODE (XEXP (cond, 0)) == REG)
4651    {
4652      HOST_WIDE_INT nzb;
4653      rtx from = XEXP (cond, 0);
4654      rtx true_val = XEXP (cond, 1);
4655      rtx false_val = true_val;
4656      int swapped = 0;
4657
4658      /* If FALSE_CODE is EQ, swap the codes and arms.  */
4659
4660      if (false_code == EQ)
4661	{
4662	  swapped = 1, true_code = EQ, false_code = NE;
4663	  temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4664	}
4665
4666      /* If we are comparing against zero and the expression being tested has
4667	 only a single bit that might be nonzero, that is its value when it is
4668	 not equal to zero.  Similarly if it is known to be -1 or 0.  */
4669
4670      if (true_code == EQ && true_val == const0_rtx
4671	  && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4672	false_code = EQ, false_val = GEN_INT (nzb);
4673      else if (true_code == EQ && true_val == const0_rtx
4674	       && (num_sign_bit_copies (from, GET_MODE (from))
4675		   == GET_MODE_BITSIZE (GET_MODE (from))))
4676	false_code = EQ, false_val = constm1_rtx;
4677
4678      /* Now simplify an arm if we know the value of the register in the
4679	 branch and it is used in the arm.  Be careful due to the potential
4680	 of locally-shared RTL.  */
4681
4682      if (reg_mentioned_p (from, true_rtx))
4683	true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
4684				      from, true_val),
4685		      pc_rtx, pc_rtx, 0, 0);
4686      if (reg_mentioned_p (from, false_rtx))
4687	false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
4688				   from, false_val),
4689		       pc_rtx, pc_rtx, 0, 0);
4690
4691      SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
4692      SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
4693
4694      true_rtx = XEXP (x, 1);
4695      false_rtx = XEXP (x, 2);
4696      true_code = GET_CODE (cond);
4697    }
4698
4699  /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4700     reversed, do so to avoid needing two sets of patterns for
4701     subtract-and-branch insns.  Similarly if we have a constant in the true
4702     arm, the false arm is the same as the first operand of the comparison, or
4703     the false arm is more complicated than the true arm.  */
4704
4705  if (comparison_p
4706      && combine_reversed_comparison_code (cond) != UNKNOWN
4707      && (true_rtx == pc_rtx
4708	  || (CONSTANT_P (true_rtx)
4709	      && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx)
4710	  || true_rtx == const0_rtx
4711	  || (GET_RTX_CLASS (GET_CODE (true_rtx)) == 'o'
4712	      && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4713	  || (GET_CODE (true_rtx) == SUBREG
4714	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true_rtx))) == 'o'
4715	      && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4716	  || reg_mentioned_p (true_rtx, false_rtx)
4717	  || rtx_equal_p (false_rtx, XEXP (cond, 0))))
4718    {
4719      true_code = reversed_comparison_code (cond, NULL);
4720      SUBST (XEXP (x, 0),
4721	     reversed_comparison (cond, GET_MODE (cond), XEXP (cond, 0),
4722				  XEXP (cond, 1)));
4723
4724      SUBST (XEXP (x, 1), false_rtx);
4725      SUBST (XEXP (x, 2), true_rtx);
4726
4727      temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4728      cond = XEXP (x, 0);
4729
4730      /* It is possible that the conditional has been simplified out.  */
4731      true_code = GET_CODE (cond);
4732      comparison_p = GET_RTX_CLASS (true_code) == '<';
4733    }
4734
4735  /* If the two arms are identical, we don't need the comparison.  */
4736
4737  if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
4738    return true_rtx;
4739
4740  /* Convert a == b ? b : a to "a".  */
4741  if (true_code == EQ && ! side_effects_p (cond)
4742      && !HONOR_NANS (mode)
4743      && rtx_equal_p (XEXP (cond, 0), false_rtx)
4744      && rtx_equal_p (XEXP (cond, 1), true_rtx))
4745    return false_rtx;
4746  else if (true_code == NE && ! side_effects_p (cond)
4747	   && !HONOR_NANS (mode)
4748	   && rtx_equal_p (XEXP (cond, 0), true_rtx)
4749	   && rtx_equal_p (XEXP (cond, 1), false_rtx))
4750    return true_rtx;
4751
4752  /* Look for cases where we have (abs x) or (neg (abs X)).  */
4753
4754  if (GET_MODE_CLASS (mode) == MODE_INT
4755      && GET_CODE (false_rtx) == NEG
4756      && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
4757      && comparison_p
4758      && rtx_equal_p (true_rtx, XEXP (cond, 0))
4759      && ! side_effects_p (true_rtx))
4760    switch (true_code)
4761      {
4762      case GT:
4763      case GE:
4764	return simplify_gen_unary (ABS, mode, true_rtx, mode);
4765      case LT:
4766      case LE:
4767	return
4768	  simplify_gen_unary (NEG, mode,
4769			      simplify_gen_unary (ABS, mode, true_rtx, mode),
4770			      mode);
4771      default:
4772	break;
4773      }
4774
4775  /* Look for MIN or MAX.  */
4776
4777  if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
4778      && comparison_p
4779      && rtx_equal_p (XEXP (cond, 0), true_rtx)
4780      && rtx_equal_p (XEXP (cond, 1), false_rtx)
4781      && ! side_effects_p (cond))
4782    switch (true_code)
4783      {
4784      case GE:
4785      case GT:
4786	return gen_binary (SMAX, mode, true_rtx, false_rtx);
4787      case LE:
4788      case LT:
4789	return gen_binary (SMIN, mode, true_rtx, false_rtx);
4790      case GEU:
4791      case GTU:
4792	return gen_binary (UMAX, mode, true_rtx, false_rtx);
4793      case LEU:
4794      case LTU:
4795	return gen_binary (UMIN, mode, true_rtx, false_rtx);
4796      default:
4797	break;
4798      }
4799
4800  /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4801     second operand is zero, this can be done as (OP Z (mult COND C2)) where
4802     C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4803     SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4804     We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4805     neither 1 or -1, but it isn't worth checking for.  */
4806
4807  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4808      && comparison_p
4809      && GET_MODE_CLASS (mode) == MODE_INT
4810      && ! side_effects_p (x))
4811    {
4812      rtx t = make_compound_operation (true_rtx, SET);
4813      rtx f = make_compound_operation (false_rtx, SET);
4814      rtx cond_op0 = XEXP (cond, 0);
4815      rtx cond_op1 = XEXP (cond, 1);
4816      enum rtx_code op = NIL, extend_op = NIL;
4817      enum machine_mode m = mode;
4818      rtx z = 0, c1 = NULL_RTX;
4819
4820      if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4821	   || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4822	   || GET_CODE (t) == ASHIFT
4823	   || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4824	  && rtx_equal_p (XEXP (t, 0), f))
4825	c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4826
4827      /* If an identity-zero op is commutative, check whether there
4828	 would be a match if we swapped the operands.  */
4829      else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4830		|| GET_CODE (t) == XOR)
4831	       && rtx_equal_p (XEXP (t, 1), f))
4832	c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4833      else if (GET_CODE (t) == SIGN_EXTEND
4834	       && (GET_CODE (XEXP (t, 0)) == PLUS
4835		   || GET_CODE (XEXP (t, 0)) == MINUS
4836		   || GET_CODE (XEXP (t, 0)) == IOR
4837		   || GET_CODE (XEXP (t, 0)) == XOR
4838		   || GET_CODE (XEXP (t, 0)) == ASHIFT
4839		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4840		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4841	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4842	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4843	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4844	       && (num_sign_bit_copies (f, GET_MODE (f))
4845		   > (unsigned int)
4846		     (GET_MODE_BITSIZE (mode)
4847		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4848	{
4849	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4850	  extend_op = SIGN_EXTEND;
4851	  m = GET_MODE (XEXP (t, 0));
4852	}
4853      else if (GET_CODE (t) == SIGN_EXTEND
4854	       && (GET_CODE (XEXP (t, 0)) == PLUS
4855		   || GET_CODE (XEXP (t, 0)) == IOR
4856		   || GET_CODE (XEXP (t, 0)) == XOR)
4857	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4858	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4859	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4860	       && (num_sign_bit_copies (f, GET_MODE (f))
4861		   > (unsigned int)
4862		     (GET_MODE_BITSIZE (mode)
4863		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4864	{
4865	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4866	  extend_op = SIGN_EXTEND;
4867	  m = GET_MODE (XEXP (t, 0));
4868	}
4869      else if (GET_CODE (t) == ZERO_EXTEND
4870	       && (GET_CODE (XEXP (t, 0)) == PLUS
4871		   || GET_CODE (XEXP (t, 0)) == MINUS
4872		   || GET_CODE (XEXP (t, 0)) == IOR
4873		   || GET_CODE (XEXP (t, 0)) == XOR
4874		   || GET_CODE (XEXP (t, 0)) == ASHIFT
4875		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4876		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4877	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4878	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4879	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4880	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4881	       && ((nonzero_bits (f, GET_MODE (f))
4882		    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4883		   == 0))
4884	{
4885	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4886	  extend_op = ZERO_EXTEND;
4887	  m = GET_MODE (XEXP (t, 0));
4888	}
4889      else if (GET_CODE (t) == ZERO_EXTEND
4890	       && (GET_CODE (XEXP (t, 0)) == PLUS
4891		   || GET_CODE (XEXP (t, 0)) == IOR
4892		   || GET_CODE (XEXP (t, 0)) == XOR)
4893	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4894	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4895	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4896	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4897	       && ((nonzero_bits (f, GET_MODE (f))
4898		    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4899		   == 0))
4900	{
4901	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4902	  extend_op = ZERO_EXTEND;
4903	  m = GET_MODE (XEXP (t, 0));
4904	}
4905
4906      if (z)
4907	{
4908	  temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4909			pc_rtx, pc_rtx, 0, 0);
4910	  temp = gen_binary (MULT, m, temp,
4911			     gen_binary (MULT, m, c1, const_true_rtx));
4912	  temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4913	  temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4914
4915	  if (extend_op != NIL)
4916	    temp = simplify_gen_unary (extend_op, mode, temp, m);
4917
4918	  return temp;
4919	}
4920    }
4921
4922  /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4923     1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4924     negation of a single bit, we can convert this operation to a shift.  We
4925     can actually do this more generally, but it doesn't seem worth it.  */
4926
4927  if (true_code == NE && XEXP (cond, 1) == const0_rtx
4928      && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
4929      && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4930	   && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
4931	  || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4932	       == GET_MODE_BITSIZE (mode))
4933	      && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
4934    return
4935      simplify_shift_const (NULL_RTX, ASHIFT, mode,
4936			    gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
4937
4938  /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8.  */
4939  if (true_code == NE && XEXP (cond, 1) == const0_rtx
4940      && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
4941      && GET_MODE (XEXP (cond, 0)) == mode
4942      && (INTVAL (true_rtx) & GET_MODE_MASK (mode))
4943	  == nonzero_bits (XEXP (cond, 0), mode)
4944      && (i = exact_log2 (INTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0)
4945    return XEXP (cond, 0);
4946
4947  return x;
4948}
4949
4950/* Simplify X, a SET expression.  Return the new expression.  */
4951
4952static rtx
4953simplify_set (rtx x)
4954{
4955  rtx src = SET_SRC (x);
4956  rtx dest = SET_DEST (x);
4957  enum machine_mode mode
4958    = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4959  rtx other_insn;
4960  rtx *cc_use;
4961
4962  /* (set (pc) (return)) gets written as (return).  */
4963  if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4964    return src;
4965
4966  /* Now that we know for sure which bits of SRC we are using, see if we can
4967     simplify the expression for the object knowing that we only need the
4968     low-order bits.  */
4969
4970  if (GET_MODE_CLASS (mode) == MODE_INT
4971      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4972    {
4973      src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
4974      SUBST (SET_SRC (x), src);
4975    }
4976
4977  /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4978     the comparison result and try to simplify it unless we already have used
4979     undobuf.other_insn.  */
4980  if ((GET_MODE_CLASS (mode) == MODE_CC
4981       || GET_CODE (src) == COMPARE
4982       || CC0_P (dest))
4983      && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4984      && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4985      && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4986      && rtx_equal_p (XEXP (*cc_use, 0), dest))
4987    {
4988      enum rtx_code old_code = GET_CODE (*cc_use);
4989      enum rtx_code new_code;
4990      rtx op0, op1, tmp;
4991      int other_changed = 0;
4992      enum machine_mode compare_mode = GET_MODE (dest);
4993      enum machine_mode tmp_mode;
4994
4995      if (GET_CODE (src) == COMPARE)
4996	op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4997      else
4998	op0 = src, op1 = const0_rtx;
4999
5000      /* Check whether the comparison is known at compile time.  */
5001      if (GET_MODE (op0) != VOIDmode)
5002	tmp_mode = GET_MODE (op0);
5003      else if (GET_MODE (op1) != VOIDmode)
5004	tmp_mode = GET_MODE (op1);
5005      else
5006	tmp_mode = compare_mode;
5007      tmp = simplify_relational_operation (old_code, tmp_mode, op0, op1);
5008      if (tmp != NULL_RTX)
5009	{
5010	  rtx pat = PATTERN (other_insn);
5011	  undobuf.other_insn = other_insn;
5012	  SUBST (*cc_use, tmp);
5013
5014	  /* Attempt to simplify CC user.  */
5015	  if (GET_CODE (pat) == SET)
5016	    {
5017	      rtx new = simplify_rtx (SET_SRC (pat));
5018	      if (new != NULL_RTX)
5019		SUBST (SET_SRC (pat), new);
5020	    }
5021
5022	  /* Convert X into a no-op move.  */
5023	  SUBST (SET_DEST (x), pc_rtx);
5024	  SUBST (SET_SRC (x), pc_rtx);
5025	  return x;
5026	}
5027
5028      /* Simplify our comparison, if possible.  */
5029      new_code = simplify_comparison (old_code, &op0, &op1);
5030
5031#ifdef SELECT_CC_MODE
5032      /* If this machine has CC modes other than CCmode, check to see if we
5033	 need to use a different CC mode here.  */
5034      compare_mode = SELECT_CC_MODE (new_code, op0, op1);
5035
5036#ifndef HAVE_cc0
5037      /* If the mode changed, we have to change SET_DEST, the mode in the
5038	 compare, and the mode in the place SET_DEST is used.  If SET_DEST is
5039	 a hard register, just build new versions with the proper mode.  If it
5040	 is a pseudo, we lose unless it is only time we set the pseudo, in
5041	 which case we can safely change its mode.  */
5042      if (compare_mode != GET_MODE (dest))
5043	{
5044	  unsigned int regno = REGNO (dest);
5045	  rtx new_dest = gen_rtx_REG (compare_mode, regno);
5046
5047	  if (regno < FIRST_PSEUDO_REGISTER
5048	      || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
5049	    {
5050	      if (regno >= FIRST_PSEUDO_REGISTER)
5051		SUBST (regno_reg_rtx[regno], new_dest);
5052
5053	      SUBST (SET_DEST (x), new_dest);
5054	      SUBST (XEXP (*cc_use, 0), new_dest);
5055	      other_changed = 1;
5056
5057	      dest = new_dest;
5058	    }
5059	}
5060#endif  /* cc0 */
5061#endif  /* SELECT_CC_MODE */
5062
5063      /* If the code changed, we have to build a new comparison in
5064	 undobuf.other_insn.  */
5065      if (new_code != old_code)
5066	{
5067	  int other_changed_previously = other_changed;
5068	  unsigned HOST_WIDE_INT mask;
5069
5070	  SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
5071					  dest, const0_rtx));
5072	  other_changed = 1;
5073
5074	  /* If the only change we made was to change an EQ into an NE or
5075	     vice versa, OP0 has only one bit that might be nonzero, and OP1
5076	     is zero, check if changing the user of the condition code will
5077	     produce a valid insn.  If it won't, we can keep the original code
5078	     in that insn by surrounding our operation with an XOR.  */
5079
5080	  if (((old_code == NE && new_code == EQ)
5081	       || (old_code == EQ && new_code == NE))
5082	      && ! other_changed_previously && op1 == const0_rtx
5083	      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
5084	      && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
5085	    {
5086	      rtx pat = PATTERN (other_insn), note = 0;
5087
5088	      if ((recog_for_combine (&pat, other_insn, &note) < 0
5089		   && ! check_asm_operands (pat)))
5090		{
5091		  PUT_CODE (*cc_use, old_code);
5092		  other_changed = 0;
5093
5094		  op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
5095		}
5096	    }
5097	}
5098
5099      if (other_changed)
5100	undobuf.other_insn = other_insn;
5101
5102#ifdef HAVE_cc0
5103      /* If we are now comparing against zero, change our source if
5104	 needed.  If we do not use cc0, we always have a COMPARE.  */
5105      if (op1 == const0_rtx && dest == cc0_rtx)
5106	{
5107	  SUBST (SET_SRC (x), op0);
5108	  src = op0;
5109	}
5110      else
5111#endif
5112
5113      /* Otherwise, if we didn't previously have a COMPARE in the
5114	 correct mode, we need one.  */
5115      if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5116	{
5117	  SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
5118	  src = SET_SRC (x);
5119	}
5120      else
5121	{
5122	  /* Otherwise, update the COMPARE if needed.  */
5123	  SUBST (XEXP (src, 0), op0);
5124	  SUBST (XEXP (src, 1), op1);
5125	}
5126    }
5127  else
5128    {
5129      /* Get SET_SRC in a form where we have placed back any
5130	 compound expressions.  Then do the checks below.  */
5131      src = make_compound_operation (src, SET);
5132      SUBST (SET_SRC (x), src);
5133    }
5134
5135  /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5136     and X being a REG or (subreg (reg)), we may be able to convert this to
5137     (set (subreg:m2 x) (op)).
5138
5139     We can always do this if M1 is narrower than M2 because that means that
5140     we only care about the low bits of the result.
5141
5142     However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
5143     perform a narrower operation than requested since the high-order bits will
5144     be undefined.  On machine where it is defined, this transformation is safe
5145     as long as M1 and M2 have the same number of words.  */
5146
5147  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5148      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
5149      && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5150	   / UNITS_PER_WORD)
5151	  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5152	       + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
5153#ifndef WORD_REGISTER_OPERATIONS
5154      && (GET_MODE_SIZE (GET_MODE (src))
5155        < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5156#endif
5157#ifdef CANNOT_CHANGE_MODE_CLASS
5158      && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
5159	    && REG_CANNOT_CHANGE_MODE_P (REGNO (dest),
5160					 GET_MODE (SUBREG_REG (src)),
5161					 GET_MODE (src)))
5162#endif
5163      && (GET_CODE (dest) == REG
5164	  || (GET_CODE (dest) == SUBREG
5165	      && GET_CODE (SUBREG_REG (dest)) == REG)))
5166    {
5167      SUBST (SET_DEST (x),
5168	     gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
5169				      dest));
5170      SUBST (SET_SRC (x), SUBREG_REG (src));
5171
5172      src = SET_SRC (x), dest = SET_DEST (x);
5173    }
5174
5175#ifdef HAVE_cc0
5176  /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
5177     in SRC.  */
5178  if (dest == cc0_rtx
5179      && GET_CODE (src) == SUBREG
5180      && subreg_lowpart_p (src)
5181      && (GET_MODE_BITSIZE (GET_MODE (src))
5182	  < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src)))))
5183    {
5184      rtx inner = SUBREG_REG (src);
5185      enum machine_mode inner_mode = GET_MODE (inner);
5186
5187      /* Here we make sure that we don't have a sign bit on.  */
5188      if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT
5189	  && (nonzero_bits (inner, inner_mode)
5190	      < ((unsigned HOST_WIDE_INT) 1
5191		 << (GET_MODE_BITSIZE (GET_MODE (src)) - 1))))
5192	{
5193	  SUBST (SET_SRC (x), inner);
5194	  src = SET_SRC (x);
5195	}
5196    }
5197#endif
5198
5199#ifdef LOAD_EXTEND_OP
5200  /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5201     would require a paradoxical subreg.  Replace the subreg with a
5202     zero_extend to avoid the reload that would otherwise be required.  */
5203
5204  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5205      && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
5206      && SUBREG_BYTE (src) == 0
5207      && (GET_MODE_SIZE (GET_MODE (src))
5208	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5209      && GET_CODE (SUBREG_REG (src)) == MEM)
5210    {
5211      SUBST (SET_SRC (x),
5212	     gen_rtx (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
5213		      GET_MODE (src), SUBREG_REG (src)));
5214
5215      src = SET_SRC (x);
5216    }
5217#endif
5218
5219  /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5220     are comparing an item known to be 0 or -1 against 0, use a logical
5221     operation instead. Check for one of the arms being an IOR of the other
5222     arm with some value.  We compute three terms to be IOR'ed together.  In
5223     practice, at most two will be nonzero.  Then we do the IOR's.  */
5224
5225  if (GET_CODE (dest) != PC
5226      && GET_CODE (src) == IF_THEN_ELSE
5227      && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
5228      && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5229      && XEXP (XEXP (src, 0), 1) == const0_rtx
5230      && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
5231#ifdef HAVE_conditional_move
5232      && ! can_conditionally_move_p (GET_MODE (src))
5233#endif
5234      && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5235			       GET_MODE (XEXP (XEXP (src, 0), 0)))
5236	  == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5237      && ! side_effects_p (src))
5238    {
5239      rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
5240		      ? XEXP (src, 1) : XEXP (src, 2));
5241      rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
5242		   ? XEXP (src, 2) : XEXP (src, 1));
5243      rtx term1 = const0_rtx, term2, term3;
5244
5245      if (GET_CODE (true_rtx) == IOR
5246	  && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
5247	term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
5248      else if (GET_CODE (true_rtx) == IOR
5249	       && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
5250	term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
5251      else if (GET_CODE (false_rtx) == IOR
5252	       && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
5253	term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
5254      else if (GET_CODE (false_rtx) == IOR
5255	       && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
5256	term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
5257
5258      term2 = gen_binary (AND, GET_MODE (src),
5259			  XEXP (XEXP (src, 0), 0), true_rtx);
5260      term3 = gen_binary (AND, GET_MODE (src),
5261			  simplify_gen_unary (NOT, GET_MODE (src),
5262					      XEXP (XEXP (src, 0), 0),
5263					      GET_MODE (src)),
5264			  false_rtx);
5265
5266      SUBST (SET_SRC (x),
5267	     gen_binary (IOR, GET_MODE (src),
5268			 gen_binary (IOR, GET_MODE (src), term1, term2),
5269			 term3));
5270
5271      src = SET_SRC (x);
5272    }
5273
5274  /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5275     whole thing fail.  */
5276  if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5277    return src;
5278  else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5279    return dest;
5280  else
5281    /* Convert this into a field assignment operation, if possible.  */
5282    return make_field_assignment (x);
5283}
5284
5285/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5286   result.  LAST is nonzero if this is the last retry.  */
5287
5288static rtx
5289simplify_logical (rtx x, int last)
5290{
5291  enum machine_mode mode = GET_MODE (x);
5292  rtx op0 = XEXP (x, 0);
5293  rtx op1 = XEXP (x, 1);
5294  rtx reversed;
5295
5296  switch (GET_CODE (x))
5297    {
5298    case AND:
5299      /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
5300	 insn (and may simplify more).  */
5301      if (GET_CODE (op0) == XOR
5302	  && rtx_equal_p (XEXP (op0, 0), op1)
5303	  && ! side_effects_p (op1))
5304	x = gen_binary (AND, mode,
5305			simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5306			op1);
5307
5308      if (GET_CODE (op0) == XOR
5309	  && rtx_equal_p (XEXP (op0, 1), op1)
5310	  && ! side_effects_p (op1))
5311	x = gen_binary (AND, mode,
5312			simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5313			op1);
5314
5315      /* Similarly for (~(A ^ B)) & A.  */
5316      if (GET_CODE (op0) == NOT
5317	  && GET_CODE (XEXP (op0, 0)) == XOR
5318	  && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5319	  && ! side_effects_p (op1))
5320	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5321
5322      if (GET_CODE (op0) == NOT
5323	  && GET_CODE (XEXP (op0, 0)) == XOR
5324	  && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5325	  && ! side_effects_p (op1))
5326	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5327
5328      /* We can call simplify_and_const_int only if we don't lose
5329	 any (sign) bits when converting INTVAL (op1) to
5330	 "unsigned HOST_WIDE_INT".  */
5331      if (GET_CODE (op1) == CONST_INT
5332	  && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5333	      || INTVAL (op1) > 0))
5334	{
5335	  x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
5336
5337	  /* If we have (ior (and (X C1) C2)) and the next restart would be
5338	     the last, simplify this by making C1 as small as possible
5339	     and then exit.  */
5340	  if (last
5341	      && GET_CODE (x) == IOR && GET_CODE (op0) == AND
5342	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
5343	      && GET_CODE (op1) == CONST_INT)
5344	    return gen_binary (IOR, mode,
5345			       gen_binary (AND, mode, XEXP (op0, 0),
5346					   GEN_INT (INTVAL (XEXP (op0, 1))
5347						    & ~INTVAL (op1))), op1);
5348
5349	  if (GET_CODE (x) != AND)
5350	    return x;
5351
5352	  if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
5353	      || GET_RTX_CLASS (GET_CODE (x)) == '2')
5354	    op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5355	}
5356
5357      /* Convert (A | B) & A to A.  */
5358      if (GET_CODE (op0) == IOR
5359	  && (rtx_equal_p (XEXP (op0, 0), op1)
5360	      || rtx_equal_p (XEXP (op0, 1), op1))
5361	  && ! side_effects_p (XEXP (op0, 0))
5362	  && ! side_effects_p (XEXP (op0, 1)))
5363	return op1;
5364
5365      /* In the following group of tests (and those in case IOR below),
5366	 we start with some combination of logical operations and apply
5367	 the distributive law followed by the inverse distributive law.
5368	 Most of the time, this results in no change.  However, if some of
5369	 the operands are the same or inverses of each other, simplifications
5370	 will result.
5371
5372	 For example, (and (ior A B) (not B)) can occur as the result of
5373	 expanding a bit field assignment.  When we apply the distributive
5374	 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
5375	 which then simplifies to (and (A (not B))).
5376
5377	 If we have (and (ior A B) C), apply the distributive law and then
5378	 the inverse distributive law to see if things simplify.  */
5379
5380      if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
5381	{
5382	  x = apply_distributive_law
5383	    (gen_binary (GET_CODE (op0), mode,
5384			 gen_binary (AND, mode, XEXP (op0, 0), op1),
5385			 gen_binary (AND, mode, XEXP (op0, 1),
5386				     copy_rtx (op1))));
5387	  if (GET_CODE (x) != AND)
5388	    return x;
5389	}
5390
5391      if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5392	return apply_distributive_law
5393	  (gen_binary (GET_CODE (op1), mode,
5394		       gen_binary (AND, mode, XEXP (op1, 0), op0),
5395		       gen_binary (AND, mode, XEXP (op1, 1),
5396				   copy_rtx (op0))));
5397
5398      /* Similarly, taking advantage of the fact that
5399	 (and (not A) (xor B C)) == (xor (ior A B) (ior A C))  */
5400
5401      if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
5402	return apply_distributive_law
5403	  (gen_binary (XOR, mode,
5404		       gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
5405		       gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)),
5406				   XEXP (op1, 1))));
5407
5408      else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
5409	return apply_distributive_law
5410	  (gen_binary (XOR, mode,
5411		       gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
5412		       gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1))));
5413      break;
5414
5415    case IOR:
5416      /* (ior A C) is C if all bits of A that might be nonzero are on in C.  */
5417      if (GET_CODE (op1) == CONST_INT
5418	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5419	  && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
5420	return op1;
5421
5422      /* Convert (A & B) | A to A.  */
5423      if (GET_CODE (op0) == AND
5424	  && (rtx_equal_p (XEXP (op0, 0), op1)
5425	      || rtx_equal_p (XEXP (op0, 1), op1))
5426	  && ! side_effects_p (XEXP (op0, 0))
5427	  && ! side_effects_p (XEXP (op0, 1)))
5428	return op1;
5429
5430      /* If we have (ior (and A B) C), apply the distributive law and then
5431	 the inverse distributive law to see if things simplify.  */
5432
5433      if (GET_CODE (op0) == AND)
5434	{
5435	  x = apply_distributive_law
5436	    (gen_binary (AND, mode,
5437			 gen_binary (IOR, mode, XEXP (op0, 0), op1),
5438			 gen_binary (IOR, mode, XEXP (op0, 1),
5439				     copy_rtx (op1))));
5440
5441	  if (GET_CODE (x) != IOR)
5442	    return x;
5443	}
5444
5445      if (GET_CODE (op1) == AND)
5446	{
5447	  x = apply_distributive_law
5448	    (gen_binary (AND, mode,
5449			 gen_binary (IOR, mode, XEXP (op1, 0), op0),
5450			 gen_binary (IOR, mode, XEXP (op1, 1),
5451				     copy_rtx (op0))));
5452
5453	  if (GET_CODE (x) != IOR)
5454	    return x;
5455	}
5456
5457      /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5458	 mode size to (rotate A CX).  */
5459
5460      if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5461	   || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5462	  && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5463	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5464	  && GET_CODE (XEXP (op1, 1)) == CONST_INT
5465	  && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
5466	      == GET_MODE_BITSIZE (mode)))
5467	return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5468			       (GET_CODE (op0) == ASHIFT
5469				? XEXP (op0, 1) : XEXP (op1, 1)));
5470
5471      /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5472	 a (sign_extend (plus ...)).  If so, OP1 is a CONST_INT, and the PLUS
5473	 does not affect any of the bits in OP1, it can really be done
5474	 as a PLUS and we can associate.  We do this by seeing if OP1
5475	 can be safely shifted left C bits.  */
5476      if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5477	  && GET_CODE (XEXP (op0, 0)) == PLUS
5478	  && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5479	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5480	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5481	{
5482	  int count = INTVAL (XEXP (op0, 1));
5483	  HOST_WIDE_INT mask = INTVAL (op1) << count;
5484
5485	  if (mask >> count == INTVAL (op1)
5486	      && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5487	    {
5488	      SUBST (XEXP (XEXP (op0, 0), 1),
5489		     GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5490	      return op0;
5491	    }
5492	}
5493      break;
5494
5495    case XOR:
5496      /* If we are XORing two things that have no bits in common,
5497	 convert them into an IOR.  This helps to detect rotation encoded
5498	 using those methods and possibly other simplifications.  */
5499
5500      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5501	  && (nonzero_bits (op0, mode)
5502	      & nonzero_bits (op1, mode)) == 0)
5503	return (gen_binary (IOR, mode, op0, op1));
5504
5505      /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5506	 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5507	 (NOT y).  */
5508      {
5509	int num_negated = 0;
5510
5511	if (GET_CODE (op0) == NOT)
5512	  num_negated++, op0 = XEXP (op0, 0);
5513	if (GET_CODE (op1) == NOT)
5514	  num_negated++, op1 = XEXP (op1, 0);
5515
5516	if (num_negated == 2)
5517	  {
5518	    SUBST (XEXP (x, 0), op0);
5519	    SUBST (XEXP (x, 1), op1);
5520	  }
5521	else if (num_negated == 1)
5522	  return
5523	    simplify_gen_unary (NOT, mode, gen_binary (XOR, mode, op0, op1),
5524				mode);
5525      }
5526
5527      /* Convert (xor (and A B) B) to (and (not A) B).  The latter may
5528	 correspond to a machine insn or result in further simplifications
5529	 if B is a constant.  */
5530
5531      if (GET_CODE (op0) == AND
5532	  && rtx_equal_p (XEXP (op0, 1), op1)
5533	  && ! side_effects_p (op1))
5534	return gen_binary (AND, mode,
5535			   simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5536			   op1);
5537
5538      else if (GET_CODE (op0) == AND
5539	       && rtx_equal_p (XEXP (op0, 0), op1)
5540	       && ! side_effects_p (op1))
5541	return gen_binary (AND, mode,
5542			   simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5543			   op1);
5544
5545      /* (xor (comparison foo bar) (const_int 1)) can become the reversed
5546	 comparison if STORE_FLAG_VALUE is 1.  */
5547      if (STORE_FLAG_VALUE == 1
5548	  && op1 == const1_rtx
5549	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5550	  && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5551					      XEXP (op0, 1))))
5552	return reversed;
5553
5554      /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5555	 is (lt foo (const_int 0)), so we can perform the above
5556	 simplification if STORE_FLAG_VALUE is 1.  */
5557
5558      if (STORE_FLAG_VALUE == 1
5559	  && op1 == const1_rtx
5560	  && GET_CODE (op0) == LSHIFTRT
5561	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5562	  && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5563	return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
5564
5565      /* (xor (comparison foo bar) (const_int sign-bit))
5566	 when STORE_FLAG_VALUE is the sign bit.  */
5567      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5568	  && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5569	      == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5570	  && op1 == const_true_rtx
5571	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5572	  && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5573					      XEXP (op0, 1))))
5574	return reversed;
5575
5576      break;
5577
5578    default:
5579      abort ();
5580    }
5581
5582  return x;
5583}
5584
5585/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5586   operations" because they can be replaced with two more basic operations.
5587   ZERO_EXTEND is also considered "compound" because it can be replaced with
5588   an AND operation, which is simpler, though only one operation.
5589
5590   The function expand_compound_operation is called with an rtx expression
5591   and will convert it to the appropriate shifts and AND operations,
5592   simplifying at each stage.
5593
5594   The function make_compound_operation is called to convert an expression
5595   consisting of shifts and ANDs into the equivalent compound expression.
5596   It is the inverse of this function, loosely speaking.  */
5597
5598static rtx
5599expand_compound_operation (rtx x)
5600{
5601  unsigned HOST_WIDE_INT pos = 0, len;
5602  int unsignedp = 0;
5603  unsigned int modewidth;
5604  rtx tem;
5605
5606  switch (GET_CODE (x))
5607    {
5608    case ZERO_EXTEND:
5609      unsignedp = 1;
5610    case SIGN_EXTEND:
5611      /* We can't necessarily use a const_int for a multiword mode;
5612	 it depends on implicitly extending the value.
5613	 Since we don't know the right way to extend it,
5614	 we can't tell whether the implicit way is right.
5615
5616	 Even for a mode that is no wider than a const_int,
5617	 we can't win, because we need to sign extend one of its bits through
5618	 the rest of it, and we don't know which bit.  */
5619      if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5620	return x;
5621
5622      /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5623	 (zero_extend:MODE FROM) or (sign_extend:MODE FROM).  It is for any MEM
5624	 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5625	 reloaded. If not for that, MEM's would very rarely be safe.
5626
5627	 Reject MODEs bigger than a word, because we might not be able
5628	 to reference a two-register group starting with an arbitrary register
5629	 (and currently gen_lowpart might crash for a SUBREG).  */
5630
5631      if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5632	return x;
5633
5634      /* Reject MODEs that aren't scalar integers because turning vector
5635	 or complex modes into shifts causes problems.  */
5636
5637      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5638	return x;
5639
5640      len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5641      /* If the inner object has VOIDmode (the only way this can happen
5642	 is if it is an ASM_OPERANDS), we can't do anything since we don't
5643	 know how much masking to do.  */
5644      if (len == 0)
5645	return x;
5646
5647      break;
5648
5649    case ZERO_EXTRACT:
5650      unsignedp = 1;
5651    case SIGN_EXTRACT:
5652      /* If the operand is a CLOBBER, just return it.  */
5653      if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5654	return XEXP (x, 0);
5655
5656      if (GET_CODE (XEXP (x, 1)) != CONST_INT
5657	  || GET_CODE (XEXP (x, 2)) != CONST_INT
5658	  || GET_MODE (XEXP (x, 0)) == VOIDmode)
5659	return x;
5660
5661      /* Reject MODEs that aren't scalar integers because turning vector
5662	 or complex modes into shifts causes problems.  */
5663
5664      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5665	return x;
5666
5667      len = INTVAL (XEXP (x, 1));
5668      pos = INTVAL (XEXP (x, 2));
5669
5670      /* If this goes outside the object being extracted, replace the object
5671	 with a (use (mem ...)) construct that only combine understands
5672	 and is used only for this purpose.  */
5673      if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5674	SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
5675
5676      if (BITS_BIG_ENDIAN)
5677	pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5678
5679      break;
5680
5681    default:
5682      return x;
5683    }
5684  /* Convert sign extension to zero extension, if we know that the high
5685     bit is not set, as this is easier to optimize.  It will be converted
5686     back to cheaper alternative in make_extraction.  */
5687  if (GET_CODE (x) == SIGN_EXTEND
5688      && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5689	  && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5690		& ~(((unsigned HOST_WIDE_INT)
5691		      GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5692		     >> 1))
5693	       == 0)))
5694    {
5695      rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5696      rtx temp2 = expand_compound_operation (temp);
5697
5698      /* Make sure this is a profitable operation.  */
5699      if (rtx_cost (x, SET) > rtx_cost (temp2, SET))
5700       return temp2;
5701      else if (rtx_cost (x, SET) > rtx_cost (temp, SET))
5702       return temp;
5703      else
5704       return x;
5705    }
5706
5707  /* We can optimize some special cases of ZERO_EXTEND.  */
5708  if (GET_CODE (x) == ZERO_EXTEND)
5709    {
5710      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5711         know that the last value didn't have any inappropriate bits
5712         set.  */
5713      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5714	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5715	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5716	  && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5717	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5718	return XEXP (XEXP (x, 0), 0);
5719
5720      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5721      if (GET_CODE (XEXP (x, 0)) == SUBREG
5722	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5723	  && subreg_lowpart_p (XEXP (x, 0))
5724	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5725	  && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5726	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5727	return SUBREG_REG (XEXP (x, 0));
5728
5729      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5730         is a comparison and STORE_FLAG_VALUE permits.  This is like
5731         the first case, but it works even when GET_MODE (x) is larger
5732         than HOST_WIDE_INT.  */
5733      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5734	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5735	  && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5736	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5737	      <= HOST_BITS_PER_WIDE_INT)
5738	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5739	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5740	return XEXP (XEXP (x, 0), 0);
5741
5742      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5743      if (GET_CODE (XEXP (x, 0)) == SUBREG
5744	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5745	  && subreg_lowpart_p (XEXP (x, 0))
5746	  && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5747	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5748	      <= HOST_BITS_PER_WIDE_INT)
5749	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5750	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5751	return SUBREG_REG (XEXP (x, 0));
5752
5753    }
5754
5755  /* If we reach here, we want to return a pair of shifts.  The inner
5756     shift is a left shift of BITSIZE - POS - LEN bits.  The outer
5757     shift is a right shift of BITSIZE - LEN bits.  It is arithmetic or
5758     logical depending on the value of UNSIGNEDP.
5759
5760     If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5761     converted into an AND of a shift.
5762
5763     We must check for the case where the left shift would have a negative
5764     count.  This can happen in a case like (x >> 31) & 255 on machines
5765     that can't shift by a constant.  On those machines, we would first
5766     combine the shift with the AND to produce a variable-position
5767     extraction.  Then the constant of 31 would be substituted in to produce
5768     a such a position.  */
5769
5770  modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5771  if (modewidth + len >= pos)
5772    tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
5773				GET_MODE (x),
5774				simplify_shift_const (NULL_RTX, ASHIFT,
5775						      GET_MODE (x),
5776						      XEXP (x, 0),
5777						      modewidth - pos - len),
5778				modewidth - len);
5779
5780  else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5781    tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5782				  simplify_shift_const (NULL_RTX, LSHIFTRT,
5783							GET_MODE (x),
5784							XEXP (x, 0), pos),
5785				  ((HOST_WIDE_INT) 1 << len) - 1);
5786  else
5787    /* Any other cases we can't handle.  */
5788    return x;
5789
5790  /* If we couldn't do this for some reason, return the original
5791     expression.  */
5792  if (GET_CODE (tem) == CLOBBER)
5793    return x;
5794
5795  return tem;
5796}
5797
5798/* X is a SET which contains an assignment of one object into
5799   a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5800   or certain SUBREGS). If possible, convert it into a series of
5801   logical operations.
5802
5803   We half-heartedly support variable positions, but do not at all
5804   support variable lengths.  */
5805
5806static rtx
5807expand_field_assignment (rtx x)
5808{
5809  rtx inner;
5810  rtx pos;			/* Always counts from low bit.  */
5811  int len;
5812  rtx mask;
5813  enum machine_mode compute_mode;
5814
5815  /* Loop until we find something we can't simplify.  */
5816  while (1)
5817    {
5818      if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5819	  && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5820	{
5821	  inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5822	  len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
5823	  pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
5824	}
5825      else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5826	       && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5827	{
5828	  inner = XEXP (SET_DEST (x), 0);
5829	  len = INTVAL (XEXP (SET_DEST (x), 1));
5830	  pos = XEXP (SET_DEST (x), 2);
5831
5832	  /* If the position is constant and spans the width of INNER,
5833	     surround INNER  with a USE to indicate this.  */
5834	  if (GET_CODE (pos) == CONST_INT
5835	      && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5836	    inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
5837
5838	  if (BITS_BIG_ENDIAN)
5839	    {
5840	      if (GET_CODE (pos) == CONST_INT)
5841		pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5842			       - INTVAL (pos));
5843	      else if (GET_CODE (pos) == MINUS
5844		       && GET_CODE (XEXP (pos, 1)) == CONST_INT
5845		       && (INTVAL (XEXP (pos, 1))
5846			   == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5847		/* If position is ADJUST - X, new position is X.  */
5848		pos = XEXP (pos, 0);
5849	      else
5850		pos = gen_binary (MINUS, GET_MODE (pos),
5851				  GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5852					   - len),
5853				  pos);
5854	    }
5855	}
5856
5857      /* A SUBREG between two modes that occupy the same numbers of words
5858	 can be done by moving the SUBREG to the source.  */
5859      else if (GET_CODE (SET_DEST (x)) == SUBREG
5860	       /* We need SUBREGs to compute nonzero_bits properly.  */
5861	       && nonzero_sign_valid
5862	       && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5863		     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5864		   == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5865			+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5866	{
5867	  x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
5868			   gen_lowpart_for_combine
5869			   (GET_MODE (SUBREG_REG (SET_DEST (x))),
5870			    SET_SRC (x)));
5871	  continue;
5872	}
5873      else
5874	break;
5875
5876      while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5877	inner = SUBREG_REG (inner);
5878
5879      compute_mode = GET_MODE (inner);
5880
5881      /* Don't attempt bitwise arithmetic on non scalar integer modes.  */
5882      if (! SCALAR_INT_MODE_P (compute_mode))
5883	{
5884	  enum machine_mode imode;
5885
5886	  /* Don't do anything for vector or complex integral types.  */
5887	  if (! FLOAT_MODE_P (compute_mode))
5888	    break;
5889
5890	  /* Try to find an integral mode to pun with.  */
5891	  imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5892	  if (imode == BLKmode)
5893	    break;
5894
5895	  compute_mode = imode;
5896	  inner = gen_lowpart_for_combine (imode, inner);
5897	}
5898
5899      /* Compute a mask of LEN bits, if we can do this on the host machine.  */
5900      if (len < HOST_BITS_PER_WIDE_INT)
5901	mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
5902      else
5903	break;
5904
5905      /* Now compute the equivalent expression.  Make a copy of INNER
5906	 for the SET_DEST in case it is a MEM into which we will substitute;
5907	 we don't want shared RTL in that case.  */
5908      x = gen_rtx_SET
5909	(VOIDmode, copy_rtx (inner),
5910	 gen_binary (IOR, compute_mode,
5911		     gen_binary (AND, compute_mode,
5912				 simplify_gen_unary (NOT, compute_mode,
5913						     gen_binary (ASHIFT,
5914								 compute_mode,
5915								 mask, pos),
5916						     compute_mode),
5917				 inner),
5918		     gen_binary (ASHIFT, compute_mode,
5919				 gen_binary (AND, compute_mode,
5920					     gen_lowpart_for_combine
5921					     (compute_mode, SET_SRC (x)),
5922					     mask),
5923				 pos)));
5924    }
5925
5926  return x;
5927}
5928
5929/* Return an RTX for a reference to LEN bits of INNER.  If POS_RTX is nonzero,
5930   it is an RTX that represents a variable starting position; otherwise,
5931   POS is the (constant) starting bit position (counted from the LSB).
5932
5933   INNER may be a USE.  This will occur when we started with a bitfield
5934   that went outside the boundary of the object in memory, which is
5935   allowed on most machines.  To isolate this case, we produce a USE
5936   whose mode is wide enough and surround the MEM with it.  The only
5937   code that understands the USE is this routine.  If it is not removed,
5938   it will cause the resulting insn not to match.
5939
5940   UNSIGNEDP is nonzero for an unsigned reference and zero for a
5941   signed reference.
5942
5943   IN_DEST is nonzero if this is a reference in the destination of a
5944   SET.  This is used when a ZERO_ or SIGN_EXTRACT isn't needed.  If nonzero,
5945   a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5946   be used.
5947
5948   IN_COMPARE is nonzero if we are in a COMPARE.  This means that a
5949   ZERO_EXTRACT should be built even for bits starting at bit 0.
5950
5951   MODE is the desired mode of the result (if IN_DEST == 0).
5952
5953   The result is an RTX for the extraction or NULL_RTX if the target
5954   can't handle it.  */
5955
5956static rtx
5957make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
5958		 rtx pos_rtx, unsigned HOST_WIDE_INT len, int unsignedp,
5959		 int in_dest, int in_compare)
5960{
5961  /* This mode describes the size of the storage area
5962     to fetch the overall value from.  Within that, we
5963     ignore the POS lowest bits, etc.  */
5964  enum machine_mode is_mode = GET_MODE (inner);
5965  enum machine_mode inner_mode;
5966  enum machine_mode wanted_inner_mode = byte_mode;
5967  enum machine_mode wanted_inner_reg_mode = word_mode;
5968  enum machine_mode pos_mode = word_mode;
5969  enum machine_mode extraction_mode = word_mode;
5970  enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5971  int spans_byte = 0;
5972  rtx new = 0;
5973  rtx orig_pos_rtx = pos_rtx;
5974  HOST_WIDE_INT orig_pos;
5975
5976  /* Get some information about INNER and get the innermost object.  */
5977  if (GET_CODE (inner) == USE)
5978    /* (use:SI (mem:QI foo)) stands for (mem:SI foo).  */
5979    /* We don't need to adjust the position because we set up the USE
5980       to pretend that it was a full-word object.  */
5981    spans_byte = 1, inner = XEXP (inner, 0);
5982  else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5983    {
5984      /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5985	 consider just the QI as the memory to extract from.
5986	 The subreg adds or removes high bits; its mode is
5987	 irrelevant to the meaning of this extraction,
5988	 since POS and LEN count from the lsb.  */
5989      if (GET_CODE (SUBREG_REG (inner)) == MEM)
5990	is_mode = GET_MODE (SUBREG_REG (inner));
5991      inner = SUBREG_REG (inner);
5992    }
5993  else if (GET_CODE (inner) == ASHIFT
5994	   && GET_CODE (XEXP (inner, 1)) == CONST_INT
5995	   && pos_rtx == 0 && pos == 0
5996	   && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1)))
5997    {
5998      /* We're extracting the least significant bits of an rtx
5999	 (ashift X (const_int C)), where LEN > C.  Extract the
6000	 least significant (LEN - C) bits of X, giving an rtx
6001	 whose mode is MODE, then shift it left C times.  */
6002      new = make_extraction (mode, XEXP (inner, 0),
6003			     0, 0, len - INTVAL (XEXP (inner, 1)),
6004			     unsignedp, in_dest, in_compare);
6005      if (new != 0)
6006	return gen_rtx_ASHIFT (mode, new, XEXP (inner, 1));
6007    }
6008
6009  inner_mode = GET_MODE (inner);
6010
6011  if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
6012    pos = INTVAL (pos_rtx), pos_rtx = 0;
6013
6014  /* See if this can be done without an extraction.  We never can if the
6015     width of the field is not the same as that of some integer mode. For
6016     registers, we can only avoid the extraction if the position is at the
6017     low-order bit and this is either not in the destination or we have the
6018     appropriate STRICT_LOW_PART operation available.
6019
6020     For MEM, we can avoid an extract if the field starts on an appropriate
6021     boundary and we can change the mode of the memory reference.  However,
6022     we cannot directly access the MEM if we have a USE and the underlying
6023     MEM is not TMODE.  This combination means that MEM was being used in a
6024     context where bits outside its mode were being referenced; that is only
6025     valid in bit-field insns.  */
6026
6027  if (tmode != BLKmode
6028      && ! (spans_byte && inner_mode != tmode)
6029      && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
6030	   && GET_CODE (inner) != MEM
6031	   && (! in_dest
6032	       || (GET_CODE (inner) == REG
6033		   && have_insn_for (STRICT_LOW_PART, tmode))))
6034	  || (GET_CODE (inner) == MEM && pos_rtx == 0
6035	      && (pos
6036		  % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
6037		     : BITS_PER_UNIT)) == 0
6038	      /* We can't do this if we are widening INNER_MODE (it
6039		 may not be aligned, for one thing).  */
6040	      && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
6041	      && (inner_mode == tmode
6042		  || (! mode_dependent_address_p (XEXP (inner, 0))
6043		      && ! MEM_VOLATILE_P (inner))))))
6044    {
6045      /* If INNER is a MEM, make a new MEM that encompasses just the desired
6046	 field.  If the original and current mode are the same, we need not
6047	 adjust the offset.  Otherwise, we do if bytes big endian.
6048
6049	 If INNER is not a MEM, get a piece consisting of just the field
6050	 of interest (in this case POS % BITS_PER_WORD must be 0).  */
6051
6052      if (GET_CODE (inner) == MEM)
6053	{
6054	  HOST_WIDE_INT offset;
6055
6056	  /* POS counts from lsb, but make OFFSET count in memory order.  */
6057	  if (BYTES_BIG_ENDIAN)
6058	    offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
6059	  else
6060	    offset = pos / BITS_PER_UNIT;
6061
6062	  new = adjust_address_nv (inner, tmode, offset);
6063	}
6064      else if (GET_CODE (inner) == REG)
6065	{
6066	  if (tmode != inner_mode)
6067	    {
6068	      /* We can't call gen_lowpart_for_combine in a DEST since we
6069		 always want a SUBREG (see below) and it would sometimes
6070		 return a new hard register.  */
6071	      if (pos || in_dest)
6072		{
6073		  HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6074
6075		  if (WORDS_BIG_ENDIAN
6076		      && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6077		    final_word = ((GET_MODE_SIZE (inner_mode)
6078				   - GET_MODE_SIZE (tmode))
6079				  / UNITS_PER_WORD) - final_word;
6080
6081		  final_word *= UNITS_PER_WORD;
6082		  if (BYTES_BIG_ENDIAN &&
6083		      GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6084		    final_word += (GET_MODE_SIZE (inner_mode)
6085				   - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6086
6087		  /* Avoid creating invalid subregs, for example when
6088		     simplifying (x>>32)&255.  */
6089		  if (final_word >= GET_MODE_SIZE (inner_mode))
6090		    return NULL_RTX;
6091
6092		  new = gen_rtx_SUBREG (tmode, inner, final_word);
6093		}
6094	      else
6095		new = gen_lowpart_for_combine (tmode, inner);
6096	    }
6097	  else
6098	    new = inner;
6099	}
6100      else
6101	new = force_to_mode (inner, tmode,
6102			     len >= HOST_BITS_PER_WIDE_INT
6103			     ? ~(unsigned HOST_WIDE_INT) 0
6104			     : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6105			     NULL_RTX, 0);
6106
6107      /* If this extraction is going into the destination of a SET,
6108	 make a STRICT_LOW_PART unless we made a MEM.  */
6109
6110      if (in_dest)
6111	return (GET_CODE (new) == MEM ? new
6112		: (GET_CODE (new) != SUBREG
6113		   ? gen_rtx_CLOBBER (tmode, const0_rtx)
6114		   : gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
6115
6116      if (mode == tmode)
6117	return new;
6118
6119      if (GET_CODE (new) == CONST_INT)
6120	return gen_int_mode (INTVAL (new), mode);
6121
6122      /* If we know that no extraneous bits are set, and that the high
6123	 bit is not set, convert the extraction to the cheaper of
6124	 sign and zero extension, that are equivalent in these cases.  */
6125      if (flag_expensive_optimizations
6126	  && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6127	      && ((nonzero_bits (new, tmode)
6128		   & ~(((unsigned HOST_WIDE_INT)
6129			GET_MODE_MASK (tmode))
6130		       >> 1))
6131		  == 0)))
6132	{
6133	  rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
6134	  rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
6135
6136	  /* Prefer ZERO_EXTENSION, since it gives more information to
6137	     backends.  */
6138	  if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
6139	    return temp;
6140	  return temp1;
6141	}
6142
6143      /* Otherwise, sign- or zero-extend unless we already are in the
6144	 proper mode.  */
6145
6146      return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6147			     mode, new));
6148    }
6149
6150  /* Unless this is a COMPARE or we have a funny memory reference,
6151     don't do anything with zero-extending field extracts starting at
6152     the low-order bit since they are simple AND operations.  */
6153  if (pos_rtx == 0 && pos == 0 && ! in_dest
6154      && ! in_compare && ! spans_byte && unsignedp)
6155    return 0;
6156
6157  /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
6158     we would be spanning bytes or if the position is not a constant and the
6159     length is not 1.  In all other cases, we would only be going outside
6160     our object in cases when an original shift would have been
6161     undefined.  */
6162  if (! spans_byte && GET_CODE (inner) == MEM
6163      && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6164	  || (pos_rtx != 0 && len != 1)))
6165    return 0;
6166
6167  /* Get the mode to use should INNER not be a MEM, the mode for the position,
6168     and the mode for the result.  */
6169  if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
6170    {
6171      wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6172      pos_mode = mode_for_extraction (EP_insv, 2);
6173      extraction_mode = mode_for_extraction (EP_insv, 3);
6174    }
6175
6176  if (! in_dest && unsignedp
6177      && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
6178    {
6179      wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6180      pos_mode = mode_for_extraction (EP_extzv, 3);
6181      extraction_mode = mode_for_extraction (EP_extzv, 0);
6182    }
6183
6184  if (! in_dest && ! unsignedp
6185      && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
6186    {
6187      wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6188      pos_mode = mode_for_extraction (EP_extv, 3);
6189      extraction_mode = mode_for_extraction (EP_extv, 0);
6190    }
6191
6192  /* Never narrow an object, since that might not be safe.  */
6193
6194  if (mode != VOIDmode
6195      && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6196    extraction_mode = mode;
6197
6198  if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6199      && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6200    pos_mode = GET_MODE (pos_rtx);
6201
6202  /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6203     if we have to change the mode of memory and cannot, the desired mode is
6204     EXTRACTION_MODE.  */
6205  if (GET_CODE (inner) != MEM)
6206    wanted_inner_mode = wanted_inner_reg_mode;
6207  else if (inner_mode != wanted_inner_mode
6208	   && (mode_dependent_address_p (XEXP (inner, 0))
6209	       || MEM_VOLATILE_P (inner)))
6210    wanted_inner_mode = extraction_mode;
6211
6212  orig_pos = pos;
6213
6214  if (BITS_BIG_ENDIAN)
6215    {
6216      /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6217	 BITS_BIG_ENDIAN style.  If position is constant, compute new
6218	 position.  Otherwise, build subtraction.
6219	 Note that POS is relative to the mode of the original argument.
6220	 If it's a MEM we need to recompute POS relative to that.
6221	 However, if we're extracting from (or inserting into) a register,
6222	 we want to recompute POS relative to wanted_inner_mode.  */
6223      int width = (GET_CODE (inner) == MEM
6224		   ? GET_MODE_BITSIZE (is_mode)
6225		   : GET_MODE_BITSIZE (wanted_inner_mode));
6226
6227      if (pos_rtx == 0)
6228	pos = width - len - pos;
6229      else
6230	pos_rtx
6231	  = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6232      /* POS may be less than 0 now, but we check for that below.
6233	 Note that it can only be less than 0 if GET_CODE (inner) != MEM.  */
6234    }
6235
6236  /* If INNER has a wider mode, make it smaller.  If this is a constant
6237     extract, try to adjust the byte to point to the byte containing
6238     the value.  */
6239  if (wanted_inner_mode != VOIDmode
6240      && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6241      && ((GET_CODE (inner) == MEM
6242	   && (inner_mode == wanted_inner_mode
6243	       || (! mode_dependent_address_p (XEXP (inner, 0))
6244		   && ! MEM_VOLATILE_P (inner))))))
6245    {
6246      int offset = 0;
6247
6248      /* The computations below will be correct if the machine is big
6249	 endian in both bits and bytes or little endian in bits and bytes.
6250	 If it is mixed, we must adjust.  */
6251
6252      /* If bytes are big endian and we had a paradoxical SUBREG, we must
6253	 adjust OFFSET to compensate.  */
6254      if (BYTES_BIG_ENDIAN
6255	  && ! spans_byte
6256	  && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6257	offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6258
6259      /* If this is a constant position, we can move to the desired byte.  */
6260      if (pos_rtx == 0)
6261	{
6262	  offset += pos / BITS_PER_UNIT;
6263	  pos %= GET_MODE_BITSIZE (wanted_inner_mode);
6264	}
6265
6266      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6267	  && ! spans_byte
6268	  && is_mode != wanted_inner_mode)
6269	offset = (GET_MODE_SIZE (is_mode)
6270		  - GET_MODE_SIZE (wanted_inner_mode) - offset);
6271
6272      if (offset != 0 || inner_mode != wanted_inner_mode)
6273	inner = adjust_address_nv (inner, wanted_inner_mode, offset);
6274    }
6275
6276  /* If INNER is not memory, we can always get it into the proper mode.  If we
6277     are changing its mode, POS must be a constant and smaller than the size
6278     of the new mode.  */
6279  else if (GET_CODE (inner) != MEM)
6280    {
6281      if (GET_MODE (inner) != wanted_inner_mode
6282	  && (pos_rtx != 0
6283	      || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6284	return 0;
6285
6286      inner = force_to_mode (inner, wanted_inner_mode,
6287			     pos_rtx
6288			     || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6289			     ? ~(unsigned HOST_WIDE_INT) 0
6290			     : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6291				<< orig_pos),
6292			     NULL_RTX, 0);
6293    }
6294
6295  /* Adjust mode of POS_RTX, if needed.  If we want a wider mode, we
6296     have to zero extend.  Otherwise, we can just use a SUBREG.  */
6297  if (pos_rtx != 0
6298      && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
6299    {
6300      rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
6301
6302      /* If we know that no extraneous bits are set, and that the high
6303	 bit is not set, convert extraction to cheaper one - either
6304	 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6305	 cases.  */
6306      if (flag_expensive_optimizations
6307	  && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6308	      && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
6309		   & ~(((unsigned HOST_WIDE_INT)
6310			GET_MODE_MASK (GET_MODE (pos_rtx)))
6311		       >> 1))
6312		  == 0)))
6313	{
6314	  rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6315
6316	  /* Prefer ZERO_EXTENSION, since it gives more information to
6317	     backends.  */
6318	  if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6319	    temp = temp1;
6320	}
6321      pos_rtx = temp;
6322    }
6323  else if (pos_rtx != 0
6324	   && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6325    pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
6326
6327  /* Make POS_RTX unless we already have it and it is correct.  If we don't
6328     have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
6329     be a CONST_INT.  */
6330  if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6331    pos_rtx = orig_pos_rtx;
6332
6333  else if (pos_rtx == 0)
6334    pos_rtx = GEN_INT (pos);
6335
6336  /* Make the required operation.  See if we can use existing rtx.  */
6337  new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
6338			 extraction_mode, inner, GEN_INT (len), pos_rtx);
6339  if (! in_dest)
6340    new = gen_lowpart_for_combine (mode, new);
6341
6342  return new;
6343}
6344
6345/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6346   with any other operations in X.  Return X without that shift if so.  */
6347
6348static rtx
6349extract_left_shift (rtx x, int count)
6350{
6351  enum rtx_code code = GET_CODE (x);
6352  enum machine_mode mode = GET_MODE (x);
6353  rtx tem;
6354
6355  switch (code)
6356    {
6357    case ASHIFT:
6358      /* This is the shift itself.  If it is wide enough, we will return
6359	 either the value being shifted if the shift count is equal to
6360	 COUNT or a shift for the difference.  */
6361      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6362	  && INTVAL (XEXP (x, 1)) >= count)
6363	return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6364				     INTVAL (XEXP (x, 1)) - count);
6365      break;
6366
6367    case NEG:  case NOT:
6368      if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6369	return simplify_gen_unary (code, mode, tem, mode);
6370
6371      break;
6372
6373    case PLUS:  case IOR:  case XOR:  case AND:
6374      /* If we can safely shift this constant and we find the inner shift,
6375	 make a new operation.  */
6376      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6377	  && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
6378	  && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6379	return gen_binary (code, mode, tem,
6380			   GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6381
6382      break;
6383
6384    default:
6385      break;
6386    }
6387
6388  return 0;
6389}
6390
6391/* Look at the expression rooted at X.  Look for expressions
6392   equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6393   Form these expressions.
6394
6395   Return the new rtx, usually just X.
6396
6397   Also, for machines like the VAX that don't have logical shift insns,
6398   try to convert logical to arithmetic shift operations in cases where
6399   they are equivalent.  This undoes the canonicalizations to logical
6400   shifts done elsewhere.
6401
6402   We try, as much as possible, to re-use rtl expressions to save memory.
6403
6404   IN_CODE says what kind of expression we are processing.  Normally, it is
6405   SET.  In a memory address (inside a MEM, PLUS or minus, the latter two
6406   being kludges), it is MEM.  When processing the arguments of a comparison
6407   or a COMPARE against zero, it is COMPARE.  */
6408
6409static rtx
6410make_compound_operation (rtx x, enum rtx_code in_code)
6411{
6412  enum rtx_code code = GET_CODE (x);
6413  enum machine_mode mode = GET_MODE (x);
6414  int mode_width = GET_MODE_BITSIZE (mode);
6415  rtx rhs, lhs;
6416  enum rtx_code next_code;
6417  int i;
6418  rtx new = 0;
6419  rtx tem;
6420  const char *fmt;
6421
6422  /* Select the code to be used in recursive calls.  Once we are inside an
6423     address, we stay there.  If we have a comparison, set to COMPARE,
6424     but once inside, go back to our default of SET.  */
6425
6426  next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
6427	       : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
6428		  && XEXP (x, 1) == const0_rtx) ? COMPARE
6429	       : in_code == COMPARE ? SET : in_code);
6430
6431  /* Process depending on the code of this operation.  If NEW is set
6432     nonzero, it will be returned.  */
6433
6434  switch (code)
6435    {
6436    case ASHIFT:
6437      /* Convert shifts by constants into multiplications if inside
6438	 an address.  */
6439      if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
6440	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6441	  && INTVAL (XEXP (x, 1)) >= 0)
6442	{
6443	  new = make_compound_operation (XEXP (x, 0), next_code);
6444	  new = gen_rtx_MULT (mode, new,
6445			      GEN_INT ((HOST_WIDE_INT) 1
6446				       << INTVAL (XEXP (x, 1))));
6447	}
6448      break;
6449
6450    case AND:
6451      /* If the second operand is not a constant, we can't do anything
6452	 with it.  */
6453      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6454	break;
6455
6456      /* If the constant is a power of two minus one and the first operand
6457	 is a logical right shift, make an extraction.  */
6458      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6459	  && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6460	{
6461	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6462	  new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6463				 0, in_code == COMPARE);
6464	}
6465
6466      /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
6467      else if (GET_CODE (XEXP (x, 0)) == SUBREG
6468	       && subreg_lowpart_p (XEXP (x, 0))
6469	       && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6470	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6471	{
6472	  new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6473					 next_code);
6474	  new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
6475				 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6476				 0, in_code == COMPARE);
6477	}
6478      /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)).  */
6479      else if ((GET_CODE (XEXP (x, 0)) == XOR
6480		|| GET_CODE (XEXP (x, 0)) == IOR)
6481	       && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6482	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6483	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6484	{
6485	  /* Apply the distributive law, and then try to make extractions.  */
6486	  new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
6487				gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6488					     XEXP (x, 1)),
6489				gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6490					     XEXP (x, 1)));
6491	  new = make_compound_operation (new, in_code);
6492	}
6493
6494      /* If we are have (and (rotate X C) M) and C is larger than the number
6495	 of bits in M, this is an extraction.  */
6496
6497      else if (GET_CODE (XEXP (x, 0)) == ROTATE
6498	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6499	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6500	       && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
6501	{
6502	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6503	  new = make_extraction (mode, new,
6504				 (GET_MODE_BITSIZE (mode)
6505				  - INTVAL (XEXP (XEXP (x, 0), 1))),
6506				 NULL_RTX, i, 1, 0, in_code == COMPARE);
6507	}
6508
6509      /* On machines without logical shifts, if the operand of the AND is
6510	 a logical shift and our mask turns off all the propagated sign
6511	 bits, we can replace the logical shift with an arithmetic shift.  */
6512      else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6513	       && !have_insn_for (LSHIFTRT, mode)
6514	       && have_insn_for (ASHIFTRT, mode)
6515	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6516	       && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6517	       && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6518	       && mode_width <= HOST_BITS_PER_WIDE_INT)
6519	{
6520	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6521
6522	  mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6523	  if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6524	    SUBST (XEXP (x, 0),
6525		   gen_rtx_ASHIFTRT (mode,
6526				     make_compound_operation
6527				     (XEXP (XEXP (x, 0), 0), next_code),
6528				     XEXP (XEXP (x, 0), 1)));
6529	}
6530
6531      /* If the constant is one less than a power of two, this might be
6532	 representable by an extraction even if no shift is present.
6533	 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6534	 we are in a COMPARE.  */
6535      else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6536	new = make_extraction (mode,
6537			       make_compound_operation (XEXP (x, 0),
6538							next_code),
6539			       0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6540
6541      /* If we are in a comparison and this is an AND with a power of two,
6542	 convert this into the appropriate bit extract.  */
6543      else if (in_code == COMPARE
6544	       && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6545	new = make_extraction (mode,
6546			       make_compound_operation (XEXP (x, 0),
6547							next_code),
6548			       i, NULL_RTX, 1, 1, 0, 1);
6549
6550      break;
6551
6552    case LSHIFTRT:
6553      /* If the sign bit is known to be zero, replace this with an
6554	 arithmetic shift.  */
6555      if (have_insn_for (ASHIFTRT, mode)
6556	  && ! have_insn_for (LSHIFTRT, mode)
6557	  && mode_width <= HOST_BITS_PER_WIDE_INT
6558	  && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6559	{
6560	  new = gen_rtx_ASHIFTRT (mode,
6561				  make_compound_operation (XEXP (x, 0),
6562							   next_code),
6563				  XEXP (x, 1));
6564	  break;
6565	}
6566
6567      /* ... fall through ...  */
6568
6569    case ASHIFTRT:
6570      lhs = XEXP (x, 0);
6571      rhs = XEXP (x, 1);
6572
6573      /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6574	 this is a SIGN_EXTRACT.  */
6575      if (GET_CODE (rhs) == CONST_INT
6576	  && GET_CODE (lhs) == ASHIFT
6577	  && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6578	  && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6579	{
6580	  new = make_compound_operation (XEXP (lhs, 0), next_code);
6581	  new = make_extraction (mode, new,
6582				 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6583				 NULL_RTX, mode_width - INTVAL (rhs),
6584				 code == LSHIFTRT, 0, in_code == COMPARE);
6585	  break;
6586	}
6587
6588      /* See if we have operations between an ASHIFTRT and an ASHIFT.
6589	 If so, try to merge the shifts into a SIGN_EXTEND.  We could
6590	 also do this for some cases of SIGN_EXTRACT, but it doesn't
6591	 seem worth the effort; the case checked for occurs on Alpha.  */
6592
6593      if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6594	  && ! (GET_CODE (lhs) == SUBREG
6595		&& (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6596	  && GET_CODE (rhs) == CONST_INT
6597	  && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6598	  && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6599	new = make_extraction (mode, make_compound_operation (new, next_code),
6600			       0, NULL_RTX, mode_width - INTVAL (rhs),
6601			       code == LSHIFTRT, 0, in_code == COMPARE);
6602
6603      break;
6604
6605    case SUBREG:
6606      /* Call ourselves recursively on the inner expression.  If we are
6607	 narrowing the object and it has a different RTL code from
6608	 what it originally did, do this SUBREG as a force_to_mode.  */
6609
6610      tem = make_compound_operation (SUBREG_REG (x), in_code);
6611      if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6612	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6613	  && subreg_lowpart_p (x))
6614	{
6615	  rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6616				     NULL_RTX, 0);
6617
6618	  /* If we have something other than a SUBREG, we might have
6619	     done an expansion, so rerun ourselves.  */
6620	  if (GET_CODE (newer) != SUBREG)
6621	    newer = make_compound_operation (newer, in_code);
6622
6623	  return newer;
6624	}
6625
6626      /* If this is a paradoxical subreg, and the new code is a sign or
6627	 zero extension, omit the subreg and widen the extension.  If it
6628	 is a regular subreg, we can still get rid of the subreg by not
6629	 widening so much, or in fact removing the extension entirely.  */
6630      if ((GET_CODE (tem) == SIGN_EXTEND
6631	   || GET_CODE (tem) == ZERO_EXTEND)
6632	  && subreg_lowpart_p (x))
6633	{
6634	  if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6635	      || (GET_MODE_SIZE (mode) >
6636		  GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6637	    {
6638	      if (! SCALAR_INT_MODE_P (mode))
6639		break;
6640	      tem = gen_rtx_fmt_e (GET_CODE (tem), mode, XEXP (tem, 0));
6641	    }
6642	  else
6643	    tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6644	  return tem;
6645	}
6646      break;
6647
6648    default:
6649      break;
6650    }
6651
6652  if (new)
6653    {
6654      x = gen_lowpart_for_combine (mode, new);
6655      code = GET_CODE (x);
6656    }
6657
6658  /* Now recursively process each operand of this operation.  */
6659  fmt = GET_RTX_FORMAT (code);
6660  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6661    if (fmt[i] == 'e')
6662      {
6663	new = make_compound_operation (XEXP (x, i), next_code);
6664	SUBST (XEXP (x, i), new);
6665      }
6666
6667  return x;
6668}
6669
6670/* Given M see if it is a value that would select a field of bits
6671   within an item, but not the entire word.  Return -1 if not.
6672   Otherwise, return the starting position of the field, where 0 is the
6673   low-order bit.
6674
6675   *PLEN is set to the length of the field.  */
6676
6677static int
6678get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
6679{
6680  /* Get the bit number of the first 1 bit from the right, -1 if none.  */
6681  int pos = exact_log2 (m & -m);
6682  int len;
6683
6684  if (pos < 0)
6685    return -1;
6686
6687  /* Now shift off the low-order zero bits and see if we have a power of
6688     two minus 1.  */
6689  len = exact_log2 ((m >> pos) + 1);
6690
6691  if (len <= 0)
6692    return -1;
6693
6694  *plen = len;
6695  return pos;
6696}
6697
6698/* See if X can be simplified knowing that we will only refer to it in
6699   MODE and will only refer to those bits that are nonzero in MASK.
6700   If other bits are being computed or if masking operations are done
6701   that select a superset of the bits in MASK, they can sometimes be
6702   ignored.
6703
6704   Return a possibly simplified expression, but always convert X to
6705   MODE.  If X is a CONST_INT, AND the CONST_INT with MASK.
6706
6707   Also, if REG is nonzero and X is a register equal in value to REG,
6708   replace X with REG.
6709
6710   If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6711   are all off in X.  This is used when X will be complemented, by either
6712   NOT, NEG, or XOR.  */
6713
6714static rtx
6715force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
6716	       rtx reg, int just_select)
6717{
6718  enum rtx_code code = GET_CODE (x);
6719  int next_select = just_select || code == XOR || code == NOT || code == NEG;
6720  enum machine_mode op_mode;
6721  unsigned HOST_WIDE_INT fuller_mask, nonzero;
6722  rtx op0, op1, temp;
6723
6724  /* If this is a CALL or ASM_OPERANDS, don't do anything.  Some of the
6725     code below will do the wrong thing since the mode of such an
6726     expression is VOIDmode.
6727
6728     Also do nothing if X is a CLOBBER; this can happen if X was
6729     the return value from a call to gen_lowpart_for_combine.  */
6730  if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
6731    return x;
6732
6733  /* We want to perform the operation is its present mode unless we know
6734     that the operation is valid in MODE, in which case we do the operation
6735     in MODE.  */
6736  op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6737	      && have_insn_for (code, mode))
6738	     ? mode : GET_MODE (x));
6739
6740  /* It is not valid to do a right-shift in a narrower mode
6741     than the one it came in with.  */
6742  if ((code == LSHIFTRT || code == ASHIFTRT)
6743      && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6744    op_mode = GET_MODE (x);
6745
6746  /* Truncate MASK to fit OP_MODE.  */
6747  if (op_mode)
6748    mask &= GET_MODE_MASK (op_mode);
6749
6750  /* When we have an arithmetic operation, or a shift whose count we
6751     do not know, we need to assume that all bits up to the highest-order
6752     bit in MASK will be needed.  This is how we form such a mask.  */
6753  if (mask & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
6754    fuller_mask = ~(unsigned HOST_WIDE_INT) 0;
6755  else
6756    fuller_mask = (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
6757		   - 1);
6758
6759  /* Determine what bits of X are guaranteed to be (non)zero.  */
6760  nonzero = nonzero_bits (x, mode);
6761
6762  /* If none of the bits in X are needed, return a zero.  */
6763  if (! just_select && (nonzero & mask) == 0)
6764    x = const0_rtx;
6765
6766  /* If X is a CONST_INT, return a new one.  Do this here since the
6767     test below will fail.  */
6768  if (GET_CODE (x) == CONST_INT)
6769    {
6770      if (SCALAR_INT_MODE_P (mode))
6771        return gen_int_mode (INTVAL (x) & mask, mode);
6772      else
6773	{
6774	  x = GEN_INT (INTVAL (x) & mask);
6775	  return gen_lowpart_common (mode, x);
6776	}
6777    }
6778
6779  /* If X is narrower than MODE and we want all the bits in X's mode, just
6780     get X in the proper mode.  */
6781  if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6782      && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
6783    return gen_lowpart_for_combine (mode, x);
6784
6785  /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6786     MASK are already known to be zero in X, we need not do anything.  */
6787  if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0)
6788    return x;
6789
6790  switch (code)
6791    {
6792    case CLOBBER:
6793      /* If X is a (clobber (const_int)), return it since we know we are
6794	 generating something that won't match.  */
6795      return x;
6796
6797    case USE:
6798      /* X is a (use (mem ..)) that was made from a bit-field extraction that
6799	 spanned the boundary of the MEM.  If we are now masking so it is
6800	 within that boundary, we don't need the USE any more.  */
6801      if (! BITS_BIG_ENDIAN
6802	  && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6803	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6804      break;
6805
6806    case SIGN_EXTEND:
6807    case ZERO_EXTEND:
6808    case ZERO_EXTRACT:
6809    case SIGN_EXTRACT:
6810      x = expand_compound_operation (x);
6811      if (GET_CODE (x) != code)
6812	return force_to_mode (x, mode, mask, reg, next_select);
6813      break;
6814
6815    case REG:
6816      if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6817		       || rtx_equal_p (reg, get_last_value (x))))
6818	x = reg;
6819      break;
6820
6821    case SUBREG:
6822      if (subreg_lowpart_p (x)
6823	  /* We can ignore the effect of this SUBREG if it narrows the mode or
6824	     if the constant masks to zero all the bits the mode doesn't
6825	     have.  */
6826	  && ((GET_MODE_SIZE (GET_MODE (x))
6827	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6828	      || (0 == (mask
6829			& GET_MODE_MASK (GET_MODE (x))
6830			& ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
6831	return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
6832      break;
6833
6834    case AND:
6835      /* If this is an AND with a constant, convert it into an AND
6836	 whose constant is the AND of that constant with MASK.  If it
6837	 remains an AND of MASK, delete it since it is redundant.  */
6838
6839      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6840	{
6841	  x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6842				      mask & INTVAL (XEXP (x, 1)));
6843
6844	  /* If X is still an AND, see if it is an AND with a mask that
6845	     is just some low-order bits.  If so, and it is MASK, we don't
6846	     need it.  */
6847
6848	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6849	      && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
6850		  == mask))
6851	    x = XEXP (x, 0);
6852
6853	  /* If it remains an AND, try making another AND with the bits
6854	     in the mode mask that aren't in MASK turned on.  If the
6855	     constant in the AND is wide enough, this might make a
6856	     cheaper constant.  */
6857
6858	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6859	      && GET_MODE_MASK (GET_MODE (x)) != mask
6860	      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
6861	    {
6862	      HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6863				    | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
6864	      int width = GET_MODE_BITSIZE (GET_MODE (x));
6865	      rtx y;
6866
6867	      /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6868		 number, sign extend it.  */
6869	      if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6870		  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6871		cval |= (HOST_WIDE_INT) -1 << width;
6872
6873	      y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6874	      if (rtx_cost (y, SET) < rtx_cost (x, SET))
6875		x = y;
6876	    }
6877
6878	  break;
6879	}
6880
6881      goto binop;
6882
6883    case PLUS:
6884      /* In (and (plus FOO C1) M), if M is a mask that just turns off
6885	 low-order bits (as in an alignment operation) and FOO is already
6886	 aligned to that boundary, mask C1 to that boundary as well.
6887	 This may eliminate that PLUS and, later, the AND.  */
6888
6889      {
6890	unsigned int width = GET_MODE_BITSIZE (mode);
6891	unsigned HOST_WIDE_INT smask = mask;
6892
6893	/* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6894	   number, sign extend it.  */
6895
6896	if (width < HOST_BITS_PER_WIDE_INT
6897	    && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6898	  smask |= (HOST_WIDE_INT) -1 << width;
6899
6900	if (GET_CODE (XEXP (x, 1)) == CONST_INT
6901	    && exact_log2 (- smask) >= 0
6902	    && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
6903	    && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
6904	  return force_to_mode (plus_constant (XEXP (x, 0),
6905					       (INTVAL (XEXP (x, 1)) & smask)),
6906				mode, smask, reg, next_select);
6907      }
6908
6909      /* ... fall through ...  */
6910
6911    case MULT:
6912      /* For PLUS, MINUS and MULT, we need any bits less significant than the
6913	 most significant bit in MASK since carries from those bits will
6914	 affect the bits we are interested in.  */
6915      mask = fuller_mask;
6916      goto binop;
6917
6918    case MINUS:
6919      /* If X is (minus C Y) where C's least set bit is larger than any bit
6920	 in the mask, then we may replace with (neg Y).  */
6921      if (GET_CODE (XEXP (x, 0)) == CONST_INT
6922	  && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
6923					& -INTVAL (XEXP (x, 0))))
6924	      > mask))
6925	{
6926	  x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
6927				  GET_MODE (x));
6928	  return force_to_mode (x, mode, mask, reg, next_select);
6929	}
6930
6931      /* Similarly, if C contains every bit in the fuller_mask, then we may
6932	 replace with (not Y).  */
6933      if (GET_CODE (XEXP (x, 0)) == CONST_INT
6934	  && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask)
6935	      == INTVAL (XEXP (x, 0))))
6936	{
6937	  x = simplify_gen_unary (NOT, GET_MODE (x),
6938				  XEXP (x, 1), GET_MODE (x));
6939	  return force_to_mode (x, mode, mask, reg, next_select);
6940	}
6941
6942      mask = fuller_mask;
6943      goto binop;
6944
6945    case IOR:
6946    case XOR:
6947      /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6948	 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6949	 operation which may be a bitfield extraction.  Ensure that the
6950	 constant we form is not wider than the mode of X.  */
6951
6952      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6953	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6954	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6955	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6956	  && GET_CODE (XEXP (x, 1)) == CONST_INT
6957	  && ((INTVAL (XEXP (XEXP (x, 0), 1))
6958	       + floor_log2 (INTVAL (XEXP (x, 1))))
6959	      < GET_MODE_BITSIZE (GET_MODE (x)))
6960	  && (INTVAL (XEXP (x, 1))
6961	      & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6962	{
6963	  temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6964			  << INTVAL (XEXP (XEXP (x, 0), 1)));
6965	  temp = gen_binary (GET_CODE (x), GET_MODE (x),
6966			     XEXP (XEXP (x, 0), 0), temp);
6967	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6968			  XEXP (XEXP (x, 0), 1));
6969	  return force_to_mode (x, mode, mask, reg, next_select);
6970	}
6971
6972    binop:
6973      /* For most binary operations, just propagate into the operation and
6974	 change the mode if we have an operation of that mode.  */
6975
6976      op0 = gen_lowpart_for_combine (op_mode,
6977				     force_to_mode (XEXP (x, 0), mode, mask,
6978						    reg, next_select));
6979      op1 = gen_lowpart_for_combine (op_mode,
6980				     force_to_mode (XEXP (x, 1), mode, mask,
6981						    reg, next_select));
6982
6983      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6984	x = gen_binary (code, op_mode, op0, op1);
6985      break;
6986
6987    case ASHIFT:
6988      /* For left shifts, do the same, but just for the first operand.
6989	 However, we cannot do anything with shifts where we cannot
6990	 guarantee that the counts are smaller than the size of the mode
6991	 because such a count will have a different meaning in a
6992	 wider mode.  */
6993
6994      if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6995	     && INTVAL (XEXP (x, 1)) >= 0
6996	     && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6997	  && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6998		&& (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
6999		    < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
7000	break;
7001
7002      /* If the shift count is a constant and we can do arithmetic in
7003	 the mode of the shift, refine which bits we need.  Otherwise, use the
7004	 conservative form of the mask.  */
7005      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7006	  && INTVAL (XEXP (x, 1)) >= 0
7007	  && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
7008	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7009	mask >>= INTVAL (XEXP (x, 1));
7010      else
7011	mask = fuller_mask;
7012
7013      op0 = gen_lowpart_for_combine (op_mode,
7014				     force_to_mode (XEXP (x, 0), op_mode,
7015						    mask, reg, next_select));
7016
7017      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7018	x = gen_binary (code, op_mode, op0, XEXP (x, 1));
7019      break;
7020
7021    case LSHIFTRT:
7022      /* Here we can only do something if the shift count is a constant,
7023	 this shift constant is valid for the host, and we can do arithmetic
7024	 in OP_MODE.  */
7025
7026      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7027	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7028	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7029	{
7030	  rtx inner = XEXP (x, 0);
7031	  unsigned HOST_WIDE_INT inner_mask;
7032
7033	  /* Select the mask of the bits we need for the shift operand.  */
7034	  inner_mask = mask << INTVAL (XEXP (x, 1));
7035
7036	  /* We can only change the mode of the shift if we can do arithmetic
7037	     in the mode of the shift and INNER_MASK is no wider than the
7038	     width of OP_MODE.  */
7039	  if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
7040	      || (inner_mask & ~GET_MODE_MASK (op_mode)) != 0)
7041	    op_mode = GET_MODE (x);
7042
7043	  inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
7044
7045	  if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7046	    x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
7047	}
7048
7049      /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7050	 shift and AND produces only copies of the sign bit (C2 is one less
7051	 than a power of two), we can do this with just a shift.  */
7052
7053      if (GET_CODE (x) == LSHIFTRT
7054	  && GET_CODE (XEXP (x, 1)) == CONST_INT
7055	  /* The shift puts one of the sign bit copies in the least significant
7056	     bit.  */
7057	  && ((INTVAL (XEXP (x, 1))
7058	       + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7059	      >= GET_MODE_BITSIZE (GET_MODE (x)))
7060	  && exact_log2 (mask + 1) >= 0
7061	  /* Number of bits left after the shift must be more than the mask
7062	     needs.  */
7063	  && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7064	      <= GET_MODE_BITSIZE (GET_MODE (x)))
7065	  /* Must be more sign bit copies than the mask needs.  */
7066	  && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
7067	      >= exact_log2 (mask + 1)))
7068	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7069			GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7070				 - exact_log2 (mask + 1)));
7071
7072      goto shiftrt;
7073
7074    case ASHIFTRT:
7075      /* If we are just looking for the sign bit, we don't need this shift at
7076	 all, even if it has a variable count.  */
7077      if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7078	  && (mask == ((unsigned HOST_WIDE_INT) 1
7079		       << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7080	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7081
7082      /* If this is a shift by a constant, get a mask that contains those bits
7083	 that are not copies of the sign bit.  We then have two cases:  If
7084	 MASK only includes those bits, this can be a logical shift, which may
7085	 allow simplifications.  If MASK is a single-bit field not within
7086	 those bits, we are requesting a copy of the sign bit and hence can
7087	 shift the sign bit to the appropriate location.  */
7088
7089      if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
7090	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7091	{
7092	  int i = -1;
7093
7094	  /* If the considered data is wider than HOST_WIDE_INT, we can't
7095	     represent a mask for all its bits in a single scalar.
7096	     But we only care about the lower bits, so calculate these.  */
7097
7098	  if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7099	    {
7100	      nonzero = ~(HOST_WIDE_INT) 0;
7101
7102	      /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7103		 is the number of bits a full-width mask would have set.
7104		 We need only shift if these are fewer than nonzero can
7105		 hold.  If not, we must keep all bits set in nonzero.  */
7106
7107	      if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7108		  < HOST_BITS_PER_WIDE_INT)
7109		nonzero >>= INTVAL (XEXP (x, 1))
7110			    + HOST_BITS_PER_WIDE_INT
7111			    - GET_MODE_BITSIZE (GET_MODE (x)) ;
7112	    }
7113	  else
7114	    {
7115	      nonzero = GET_MODE_MASK (GET_MODE (x));
7116	      nonzero >>= INTVAL (XEXP (x, 1));
7117	    }
7118
7119	  if ((mask & ~nonzero) == 0
7120	      || (i = exact_log2 (mask)) >= 0)
7121	    {
7122	      x = simplify_shift_const
7123		(x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7124		 i < 0 ? INTVAL (XEXP (x, 1))
7125		 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7126
7127	      if (GET_CODE (x) != ASHIFTRT)
7128		return force_to_mode (x, mode, mask, reg, next_select);
7129	    }
7130	}
7131
7132      /* If MASK is 1, convert this to an LSHIFTRT.  This can be done
7133	 even if the shift count isn't a constant.  */
7134      if (mask == 1)
7135	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
7136
7137    shiftrt:
7138
7139      /* If this is a zero- or sign-extension operation that just affects bits
7140	 we don't care about, remove it.  Be sure the call above returned
7141	 something that is still a shift.  */
7142
7143      if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7144	  && GET_CODE (XEXP (x, 1)) == CONST_INT
7145	  && INTVAL (XEXP (x, 1)) >= 0
7146	  && (INTVAL (XEXP (x, 1))
7147	      <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
7148	  && GET_CODE (XEXP (x, 0)) == ASHIFT
7149	  && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
7150	return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7151			      reg, next_select);
7152
7153      break;
7154
7155    case ROTATE:
7156    case ROTATERT:
7157      /* If the shift count is constant and we can do computations
7158	 in the mode of X, compute where the bits we care about are.
7159	 Otherwise, we can't do anything.  Don't change the mode of
7160	 the shift or propagate MODE into the shift, though.  */
7161      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7162	  && INTVAL (XEXP (x, 1)) >= 0)
7163	{
7164	  temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7165					    GET_MODE (x), GEN_INT (mask),
7166					    XEXP (x, 1));
7167	  if (temp && GET_CODE (temp) == CONST_INT)
7168	    SUBST (XEXP (x, 0),
7169		   force_to_mode (XEXP (x, 0), GET_MODE (x),
7170				  INTVAL (temp), reg, next_select));
7171	}
7172      break;
7173
7174    case NEG:
7175      /* If we just want the low-order bit, the NEG isn't needed since it
7176	 won't change the low-order bit.  */
7177      if (mask == 1)
7178	return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
7179
7180      /* We need any bits less significant than the most significant bit in
7181	 MASK since carries from those bits will affect the bits we are
7182	 interested in.  */
7183      mask = fuller_mask;
7184      goto unop;
7185
7186    case NOT:
7187      /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7188	 same as the XOR case above.  Ensure that the constant we form is not
7189	 wider than the mode of X.  */
7190
7191      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7192	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7193	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7194	  && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7195	      < GET_MODE_BITSIZE (GET_MODE (x)))
7196	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7197	{
7198	  temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)),
7199			       GET_MODE (x));
7200	  temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
7201	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
7202
7203	  return force_to_mode (x, mode, mask, reg, next_select);
7204	}
7205
7206      /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7207	 use the full mask inside the NOT.  */
7208      mask = fuller_mask;
7209
7210    unop:
7211      op0 = gen_lowpart_for_combine (op_mode,
7212				     force_to_mode (XEXP (x, 0), mode, mask,
7213						    reg, next_select));
7214      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7215	x = simplify_gen_unary (code, op_mode, op0, op_mode);
7216      break;
7217
7218    case NE:
7219      /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
7220	 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
7221	 which is equal to STORE_FLAG_VALUE.  */
7222      if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
7223	  && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
7224	  && (nonzero_bits (XEXP (x, 0), mode)
7225	      == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
7226	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7227
7228      break;
7229
7230    case IF_THEN_ELSE:
7231      /* We have no way of knowing if the IF_THEN_ELSE can itself be
7232	 written in a narrower mode.  We play it safe and do not do so.  */
7233
7234      SUBST (XEXP (x, 1),
7235	     gen_lowpart_for_combine (GET_MODE (x),
7236				      force_to_mode (XEXP (x, 1), mode,
7237						     mask, reg, next_select)));
7238      SUBST (XEXP (x, 2),
7239	     gen_lowpart_for_combine (GET_MODE (x),
7240				      force_to_mode (XEXP (x, 2), mode,
7241						     mask, reg, next_select)));
7242      break;
7243
7244    default:
7245      break;
7246    }
7247
7248  /* Ensure we return a value of the proper mode.  */
7249  return gen_lowpart_for_combine (mode, x);
7250}
7251
7252/* Return nonzero if X is an expression that has one of two values depending on
7253   whether some other value is zero or nonzero.  In that case, we return the
7254   value that is being tested, *PTRUE is set to the value if the rtx being
7255   returned has a nonzero value, and *PFALSE is set to the other alternative.
7256
7257   If we return zero, we set *PTRUE and *PFALSE to X.  */
7258
7259static rtx
7260if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
7261{
7262  enum machine_mode mode = GET_MODE (x);
7263  enum rtx_code code = GET_CODE (x);
7264  rtx cond0, cond1, true0, true1, false0, false1;
7265  unsigned HOST_WIDE_INT nz;
7266
7267  /* If we are comparing a value against zero, we are done.  */
7268  if ((code == NE || code == EQ)
7269      && XEXP (x, 1) == const0_rtx)
7270    {
7271      *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7272      *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
7273      return XEXP (x, 0);
7274    }
7275
7276  /* If this is a unary operation whose operand has one of two values, apply
7277     our opcode to compute those values.  */
7278  else if (GET_RTX_CLASS (code) == '1'
7279	   && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
7280    {
7281      *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
7282      *pfalse = simplify_gen_unary (code, mode, false0,
7283				    GET_MODE (XEXP (x, 0)));
7284      return cond0;
7285    }
7286
7287  /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
7288     make can't possibly match and would suppress other optimizations.  */
7289  else if (code == COMPARE)
7290    ;
7291
7292  /* If this is a binary operation, see if either side has only one of two
7293     values.  If either one does or if both do and they are conditional on
7294     the same value, compute the new true and false values.  */
7295  else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
7296	   || GET_RTX_CLASS (code) == '<')
7297    {
7298      cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7299      cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7300
7301      if ((cond0 != 0 || cond1 != 0)
7302	  && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7303	{
7304	  /* If if_then_else_cond returned zero, then true/false are the
7305	     same rtl.  We must copy one of them to prevent invalid rtl
7306	     sharing.  */
7307	  if (cond0 == 0)
7308	    true0 = copy_rtx (true0);
7309	  else if (cond1 == 0)
7310	    true1 = copy_rtx (true1);
7311
7312	  *ptrue = gen_binary (code, mode, true0, true1);
7313	  *pfalse = gen_binary (code, mode, false0, false1);
7314	  return cond0 ? cond0 : cond1;
7315	}
7316
7317      /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
7318	 operands is zero when the other is nonzero, and vice-versa,
7319	 and STORE_FLAG_VALUE is 1 or -1.  */
7320
7321      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7322	  && (code == PLUS || code == IOR || code == XOR || code == MINUS
7323	      || code == UMAX)
7324	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7325	{
7326	  rtx op0 = XEXP (XEXP (x, 0), 1);
7327	  rtx op1 = XEXP (XEXP (x, 1), 1);
7328
7329	  cond0 = XEXP (XEXP (x, 0), 0);
7330	  cond1 = XEXP (XEXP (x, 1), 0);
7331
7332	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7333	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7334	      && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7335		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7336		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7337		  || ((swap_condition (GET_CODE (cond0))
7338		       == combine_reversed_comparison_code (cond1))
7339		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7340		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7341	      && ! side_effects_p (x))
7342	    {
7343	      *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
7344	      *pfalse = gen_binary (MULT, mode,
7345				    (code == MINUS
7346				     ? simplify_gen_unary (NEG, mode, op1,
7347							   mode)
7348				     : op1),
7349				    const_true_rtx);
7350	      return cond0;
7351	    }
7352	}
7353
7354      /* Similarly for MULT, AND and UMIN, except that for these the result
7355	 is always zero.  */
7356      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7357	  && (code == MULT || code == AND || code == UMIN)
7358	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7359	{
7360	  cond0 = XEXP (XEXP (x, 0), 0);
7361	  cond1 = XEXP (XEXP (x, 1), 0);
7362
7363	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7364	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7365	      && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7366		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7367		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7368		  || ((swap_condition (GET_CODE (cond0))
7369		       == combine_reversed_comparison_code (cond1))
7370		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7371		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7372	      && ! side_effects_p (x))
7373	    {
7374	      *ptrue = *pfalse = const0_rtx;
7375	      return cond0;
7376	    }
7377	}
7378    }
7379
7380  else if (code == IF_THEN_ELSE)
7381    {
7382      /* If we have IF_THEN_ELSE already, extract the condition and
7383	 canonicalize it if it is NE or EQ.  */
7384      cond0 = XEXP (x, 0);
7385      *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7386      if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7387	return XEXP (cond0, 0);
7388      else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7389	{
7390	  *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7391	  return XEXP (cond0, 0);
7392	}
7393      else
7394	return cond0;
7395    }
7396
7397  /* If X is a SUBREG, we can narrow both the true and false values
7398     if the inner expression, if there is a condition.  */
7399  else if (code == SUBREG
7400	   && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7401					       &true0, &false0)))
7402    {
7403      true0 = simplify_gen_subreg (mode, true0,
7404				   GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7405      false0 = simplify_gen_subreg (mode, false0,
7406				    GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7407      if (true0 && false0)
7408	{
7409	  *ptrue = true0;
7410	  *pfalse = false0;
7411	  return cond0;
7412	}
7413    }
7414
7415  /* If X is a constant, this isn't special and will cause confusions
7416     if we treat it as such.  Likewise if it is equivalent to a constant.  */
7417  else if (CONSTANT_P (x)
7418	   || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7419    ;
7420
7421  /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7422     will be least confusing to the rest of the compiler.  */
7423  else if (mode == BImode)
7424    {
7425      *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7426      return x;
7427    }
7428
7429  /* If X is known to be either 0 or -1, those are the true and
7430     false values when testing X.  */
7431  else if (x == constm1_rtx || x == const0_rtx
7432	   || (mode != VOIDmode
7433	       && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
7434    {
7435      *ptrue = constm1_rtx, *pfalse = const0_rtx;
7436      return x;
7437    }
7438
7439  /* Likewise for 0 or a single bit.  */
7440  else if (SCALAR_INT_MODE_P (mode)
7441	   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7442	   && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
7443    {
7444      *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
7445      return x;
7446    }
7447
7448  /* Otherwise fail; show no condition with true and false values the same.  */
7449  *ptrue = *pfalse = x;
7450  return 0;
7451}
7452
7453/* Return the value of expression X given the fact that condition COND
7454   is known to be true when applied to REG as its first operand and VAL
7455   as its second.  X is known to not be shared and so can be modified in
7456   place.
7457
7458   We only handle the simplest cases, and specifically those cases that
7459   arise with IF_THEN_ELSE expressions.  */
7460
7461static rtx
7462known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
7463{
7464  enum rtx_code code = GET_CODE (x);
7465  rtx temp;
7466  const char *fmt;
7467  int i, j;
7468
7469  if (side_effects_p (x))
7470    return x;
7471
7472  /* If either operand of the condition is a floating point value,
7473     then we have to avoid collapsing an EQ comparison.  */
7474  if (cond == EQ
7475      && rtx_equal_p (x, reg)
7476      && ! FLOAT_MODE_P (GET_MODE (x))
7477      && ! FLOAT_MODE_P (GET_MODE (val)))
7478    return val;
7479
7480  if (cond == UNEQ && rtx_equal_p (x, reg))
7481    return val;
7482
7483  /* If X is (abs REG) and we know something about REG's relationship
7484     with zero, we may be able to simplify this.  */
7485
7486  if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7487    switch (cond)
7488      {
7489      case GE:  case GT:  case EQ:
7490	return XEXP (x, 0);
7491      case LT:  case LE:
7492	return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
7493				   XEXP (x, 0),
7494				   GET_MODE (XEXP (x, 0)));
7495      default:
7496	break;
7497      }
7498
7499  /* The only other cases we handle are MIN, MAX, and comparisons if the
7500     operands are the same as REG and VAL.  */
7501
7502  else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
7503    {
7504      if (rtx_equal_p (XEXP (x, 0), val))
7505	cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7506
7507      if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7508	{
7509	  if (GET_RTX_CLASS (code) == '<')
7510	    {
7511	      if (comparison_dominates_p (cond, code))
7512		return const_true_rtx;
7513
7514	      code = combine_reversed_comparison_code (x);
7515	      if (code != UNKNOWN
7516		  && comparison_dominates_p (cond, code))
7517		return const0_rtx;
7518	      else
7519		return x;
7520	    }
7521	  else if (code == SMAX || code == SMIN
7522		   || code == UMIN || code == UMAX)
7523	    {
7524	      int unsignedp = (code == UMIN || code == UMAX);
7525
7526	      /* Do not reverse the condition when it is NE or EQ.
7527		 This is because we cannot conclude anything about
7528		 the value of 'SMAX (x, y)' when x is not equal to y,
7529		 but we can when x equals y.  */
7530	      if ((code == SMAX || code == UMAX)
7531		  && ! (cond == EQ || cond == NE))
7532		cond = reverse_condition (cond);
7533
7534	      switch (cond)
7535		{
7536		case GE:   case GT:
7537		  return unsignedp ? x : XEXP (x, 1);
7538		case LE:   case LT:
7539		  return unsignedp ? x : XEXP (x, 0);
7540		case GEU:  case GTU:
7541		  return unsignedp ? XEXP (x, 1) : x;
7542		case LEU:  case LTU:
7543		  return unsignedp ? XEXP (x, 0) : x;
7544		default:
7545		  break;
7546		}
7547	    }
7548	}
7549    }
7550  else if (code == SUBREG)
7551    {
7552      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
7553      rtx new, r = known_cond (SUBREG_REG (x), cond, reg, val);
7554
7555      if (SUBREG_REG (x) != r)
7556	{
7557	  /* We must simplify subreg here, before we lose track of the
7558	     original inner_mode.  */
7559	  new = simplify_subreg (GET_MODE (x), r,
7560				 inner_mode, SUBREG_BYTE (x));
7561	  if (new)
7562	    return new;
7563	  else
7564	    SUBST (SUBREG_REG (x), r);
7565	}
7566
7567      return x;
7568    }
7569  /* We don't have to handle SIGN_EXTEND here, because even in the
7570     case of replacing something with a modeless CONST_INT, a
7571     CONST_INT is already (supposed to be) a valid sign extension for
7572     its narrower mode, which implies it's already properly
7573     sign-extended for the wider mode.  Now, for ZERO_EXTEND, the
7574     story is different.  */
7575  else if (code == ZERO_EXTEND)
7576    {
7577      enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
7578      rtx new, r = known_cond (XEXP (x, 0), cond, reg, val);
7579
7580      if (XEXP (x, 0) != r)
7581	{
7582	  /* We must simplify the zero_extend here, before we lose
7583             track of the original inner_mode.  */
7584	  new = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
7585					  r, inner_mode);
7586	  if (new)
7587	    return new;
7588	  else
7589	    SUBST (XEXP (x, 0), r);
7590	}
7591
7592      return x;
7593    }
7594
7595  fmt = GET_RTX_FORMAT (code);
7596  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7597    {
7598      if (fmt[i] == 'e')
7599	SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7600      else if (fmt[i] == 'E')
7601	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7602	  SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7603						cond, reg, val));
7604    }
7605
7606  return x;
7607}
7608
7609/* See if X and Y are equal for the purposes of seeing if we can rewrite an
7610   assignment as a field assignment.  */
7611
7612static int
7613rtx_equal_for_field_assignment_p (rtx x, rtx y)
7614{
7615  if (x == y || rtx_equal_p (x, y))
7616    return 1;
7617
7618  if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7619    return 0;
7620
7621  /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7622     Note that all SUBREGs of MEM are paradoxical; otherwise they
7623     would have been rewritten.  */
7624  if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7625      && GET_CODE (SUBREG_REG (y)) == MEM
7626      && rtx_equal_p (SUBREG_REG (y),
7627		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7628    return 1;
7629
7630  if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7631      && GET_CODE (SUBREG_REG (x)) == MEM
7632      && rtx_equal_p (SUBREG_REG (x),
7633		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7634    return 1;
7635
7636  /* We used to see if get_last_value of X and Y were the same but that's
7637     not correct.  In one direction, we'll cause the assignment to have
7638     the wrong destination and in the case, we'll import a register into this
7639     insn that might have already have been dead.   So fail if none of the
7640     above cases are true.  */
7641  return 0;
7642}
7643
7644/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7645   Return that assignment if so.
7646
7647   We only handle the most common cases.  */
7648
7649static rtx
7650make_field_assignment (rtx x)
7651{
7652  rtx dest = SET_DEST (x);
7653  rtx src = SET_SRC (x);
7654  rtx assign;
7655  rtx rhs, lhs;
7656  HOST_WIDE_INT c1;
7657  HOST_WIDE_INT pos;
7658  unsigned HOST_WIDE_INT len;
7659  rtx other;
7660  enum machine_mode mode;
7661
7662  /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7663     a clear of a one-bit field.  We will have changed it to
7664     (and (rotate (const_int -2) POS) DEST), so check for that.  Also check
7665     for a SUBREG.  */
7666
7667  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7668      && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7669      && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7670      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7671    {
7672      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7673				1, 1, 1, 0);
7674      if (assign != 0)
7675	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7676      return x;
7677    }
7678
7679  else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7680	   && subreg_lowpart_p (XEXP (src, 0))
7681	   && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7682	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7683	   && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7684	   && GET_CODE (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == CONST_INT
7685	   && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7686	   && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7687    {
7688      assign = make_extraction (VOIDmode, dest, 0,
7689				XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7690				1, 1, 1, 0);
7691      if (assign != 0)
7692	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7693      return x;
7694    }
7695
7696  /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
7697     one-bit field.  */
7698  else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7699	   && XEXP (XEXP (src, 0), 0) == const1_rtx
7700	   && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7701    {
7702      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7703				1, 1, 1, 0);
7704      if (assign != 0)
7705	return gen_rtx_SET (VOIDmode, assign, const1_rtx);
7706      return x;
7707    }
7708
7709  /* The other case we handle is assignments into a constant-position
7710     field.  They look like (ior/xor (and DEST C1) OTHER).  If C1 represents
7711     a mask that has all one bits except for a group of zero bits and
7712     OTHER is known to have zeros where C1 has ones, this is such an
7713     assignment.  Compute the position and length from C1.  Shift OTHER
7714     to the appropriate position, force it to the required mode, and
7715     make the extraction.  Check for the AND in both operands.  */
7716
7717  if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
7718    return x;
7719
7720  rhs = expand_compound_operation (XEXP (src, 0));
7721  lhs = expand_compound_operation (XEXP (src, 1));
7722
7723  if (GET_CODE (rhs) == AND
7724      && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7725      && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7726    c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7727  else if (GET_CODE (lhs) == AND
7728	   && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7729	   && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7730    c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
7731  else
7732    return x;
7733
7734  pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
7735  if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
7736      || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7737      || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
7738    return x;
7739
7740  assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
7741  if (assign == 0)
7742    return x;
7743
7744  /* The mode to use for the source is the mode of the assignment, or of
7745     what is inside a possible STRICT_LOW_PART.  */
7746  mode = (GET_CODE (assign) == STRICT_LOW_PART
7747	  ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
7748
7749  /* Shift OTHER right POS places and make it the source, restricting it
7750     to the proper length and mode.  */
7751
7752  src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7753					     GET_MODE (src), other, pos),
7754		       mode,
7755		       GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7756		       ? ~(unsigned HOST_WIDE_INT) 0
7757		       : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
7758		       dest, 0);
7759
7760  /* If SRC is masked by an AND that does not make a difference in
7761     the value being stored, strip it.  */
7762  if (GET_CODE (assign) == ZERO_EXTRACT
7763      && GET_CODE (XEXP (assign, 1)) == CONST_INT
7764      && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
7765      && GET_CODE (src) == AND
7766      && GET_CODE (XEXP (src, 1)) == CONST_INT
7767      && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1))
7768	  == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1))
7769    src = XEXP (src, 0);
7770
7771  return gen_rtx_SET (VOIDmode, assign, src);
7772}
7773
7774/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7775   if so.  */
7776
7777static rtx
7778apply_distributive_law (rtx x)
7779{
7780  enum rtx_code code = GET_CODE (x);
7781  enum rtx_code inner_code;
7782  rtx lhs, rhs, other;
7783  rtx tem;
7784
7785  /* Distributivity is not true for floating point as it can change the
7786     value.  So we don't do it unless -funsafe-math-optimizations.  */
7787  if (FLOAT_MODE_P (GET_MODE (x))
7788      && ! flag_unsafe_math_optimizations)
7789    return x;
7790
7791  /* The outer operation can only be one of the following:  */
7792  if (code != IOR && code != AND && code != XOR
7793      && code != PLUS && code != MINUS)
7794    return x;
7795
7796  lhs = XEXP (x, 0);
7797  rhs = XEXP (x, 1);
7798
7799  /* If either operand is a primitive we can't do anything, so get out
7800     fast.  */
7801  if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
7802      || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
7803    return x;
7804
7805  lhs = expand_compound_operation (lhs);
7806  rhs = expand_compound_operation (rhs);
7807  inner_code = GET_CODE (lhs);
7808  if (inner_code != GET_CODE (rhs))
7809    return x;
7810
7811  /* See if the inner and outer operations distribute.  */
7812  switch (inner_code)
7813    {
7814    case LSHIFTRT:
7815    case ASHIFTRT:
7816    case AND:
7817    case IOR:
7818      /* These all distribute except over PLUS.  */
7819      if (code == PLUS || code == MINUS)
7820	return x;
7821      break;
7822
7823    case MULT:
7824      if (code != PLUS && code != MINUS)
7825	return x;
7826      break;
7827
7828    case ASHIFT:
7829      /* This is also a multiply, so it distributes over everything.  */
7830      break;
7831
7832    case SUBREG:
7833      /* Non-paradoxical SUBREGs distributes over all operations, provided
7834	 the inner modes and byte offsets are the same, this is an extraction
7835	 of a low-order part, we don't convert an fp operation to int or
7836	 vice versa, and we would not be converting a single-word
7837	 operation into a multi-word operation.  The latter test is not
7838	 required, but it prevents generating unneeded multi-word operations.
7839	 Some of the previous tests are redundant given the latter test, but
7840	 are retained because they are required for correctness.
7841
7842	 We produce the result slightly differently in this case.  */
7843
7844      if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7845	  || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
7846	  || ! subreg_lowpart_p (lhs)
7847	  || (GET_MODE_CLASS (GET_MODE (lhs))
7848	      != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
7849	  || (GET_MODE_SIZE (GET_MODE (lhs))
7850	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
7851	  || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
7852	return x;
7853
7854      tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7855			SUBREG_REG (lhs), SUBREG_REG (rhs));
7856      return gen_lowpart_for_combine (GET_MODE (x), tem);
7857
7858    default:
7859      return x;
7860    }
7861
7862  /* Set LHS and RHS to the inner operands (A and B in the example
7863     above) and set OTHER to the common operand (C in the example).
7864     These is only one way to do this unless the inner operation is
7865     commutative.  */
7866  if (GET_RTX_CLASS (inner_code) == 'c'
7867      && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7868    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7869  else if (GET_RTX_CLASS (inner_code) == 'c'
7870	   && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7871    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7872  else if (GET_RTX_CLASS (inner_code) == 'c'
7873	   && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7874    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7875  else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7876    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7877  else
7878    return x;
7879
7880  /* Form the new inner operation, seeing if it simplifies first.  */
7881  tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7882
7883  /* There is one exception to the general way of distributing:
7884     (a | c) ^ (b | c) -> (a ^ b) & ~c  */
7885  if (code == XOR && inner_code == IOR)
7886    {
7887      inner_code = AND;
7888      other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
7889    }
7890
7891  /* We may be able to continuing distributing the result, so call
7892     ourselves recursively on the inner operation before forming the
7893     outer operation, which we return.  */
7894  return gen_binary (inner_code, GET_MODE (x),
7895		     apply_distributive_law (tem), other);
7896}
7897
7898/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7899   in MODE.
7900
7901   Return an equivalent form, if different from X.  Otherwise, return X.  If
7902   X is zero, we are to always construct the equivalent form.  */
7903
7904static rtx
7905simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop,
7906			unsigned HOST_WIDE_INT constop)
7907{
7908  unsigned HOST_WIDE_INT nonzero;
7909  int i;
7910
7911  /* Simplify VAROP knowing that we will be only looking at some of the
7912     bits in it.
7913
7914     Note by passing in CONSTOP, we guarantee that the bits not set in
7915     CONSTOP are not significant and will never be examined.  We must
7916     ensure that is the case by explicitly masking out those bits
7917     before returning.  */
7918  varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
7919
7920  /* If VAROP is a CLOBBER, we will fail so return it.  */
7921  if (GET_CODE (varop) == CLOBBER)
7922    return varop;
7923
7924  /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
7925     to VAROP and return the new constant.  */
7926  if (GET_CODE (varop) == CONST_INT)
7927    return GEN_INT (trunc_int_for_mode (INTVAL (varop) & constop, mode));
7928
7929  /* See what bits may be nonzero in VAROP.  Unlike the general case of
7930     a call to nonzero_bits, here we don't care about bits outside
7931     MODE.  */
7932
7933  nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7934
7935  /* Turn off all bits in the constant that are known to already be zero.
7936     Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
7937     which is tested below.  */
7938
7939  constop &= nonzero;
7940
7941  /* If we don't have any bits left, return zero.  */
7942  if (constop == 0)
7943    return const0_rtx;
7944
7945  /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7946     a power of two, we can replace this with an ASHIFT.  */
7947  if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7948      && (i = exact_log2 (constop)) >= 0)
7949    return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7950
7951  /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7952     or XOR, then try to apply the distributive law.  This may eliminate
7953     operations if either branch can be simplified because of the AND.
7954     It may also make some cases more complex, but those cases probably
7955     won't match a pattern either with or without this.  */
7956
7957  if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7958    return
7959      gen_lowpart_for_combine
7960	(mode,
7961	 apply_distributive_law
7962	 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7963		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7964					      XEXP (varop, 0), constop),
7965		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7966					      XEXP (varop, 1), constop))));
7967
7968  /* If VAROP is PLUS, and the constant is a mask of low bite, distribute
7969     the AND and see if one of the operands simplifies to zero.  If so, we
7970     may eliminate it.  */
7971
7972  if (GET_CODE (varop) == PLUS
7973      && exact_log2 (constop + 1) >= 0)
7974    {
7975      rtx o0, o1;
7976
7977      o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
7978      o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
7979      if (o0 == const0_rtx)
7980	return o1;
7981      if (o1 == const0_rtx)
7982	return o0;
7983    }
7984
7985  /* Get VAROP in MODE.  Try to get a SUBREG if not.  Don't make a new SUBREG
7986     if we already had one (just check for the simplest cases).  */
7987  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7988      && GET_MODE (XEXP (x, 0)) == mode
7989      && SUBREG_REG (XEXP (x, 0)) == varop)
7990    varop = XEXP (x, 0);
7991  else
7992    varop = gen_lowpart_for_combine (mode, varop);
7993
7994  /* If we can't make the SUBREG, try to return what we were given.  */
7995  if (GET_CODE (varop) == CLOBBER)
7996    return x ? x : varop;
7997
7998  /* If we are only masking insignificant bits, return VAROP.  */
7999  if (constop == nonzero)
8000    x = varop;
8001  else
8002    {
8003      /* Otherwise, return an AND.  */
8004      constop = trunc_int_for_mode (constop, mode);
8005      /* See how much, if any, of X we can use.  */
8006      if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
8007	x = gen_binary (AND, mode, varop, GEN_INT (constop));
8008
8009      else
8010	{
8011	  if (GET_CODE (XEXP (x, 1)) != CONST_INT
8012	      || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
8013	    SUBST (XEXP (x, 1), GEN_INT (constop));
8014
8015	  SUBST (XEXP (x, 0), varop);
8016	}
8017    }
8018
8019  return x;
8020}
8021
8022#define nonzero_bits_with_known(X, MODE) \
8023  cached_nonzero_bits (X, MODE, known_x, known_mode, known_ret)
8024
8025/* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
8026   It avoids exponential behavior in nonzero_bits1 when X has
8027   identical subexpressions on the first or the second level.  */
8028
8029static unsigned HOST_WIDE_INT
8030cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
8031		     enum machine_mode known_mode,
8032		     unsigned HOST_WIDE_INT known_ret)
8033{
8034  if (x == known_x && mode == known_mode)
8035    return known_ret;
8036
8037  /* Try to find identical subexpressions.  If found call
8038     nonzero_bits1 on X with the subexpressions as KNOWN_X and the
8039     precomputed value for the subexpression as KNOWN_RET.  */
8040
8041  if (GET_RTX_CLASS (GET_CODE (x)) == '2'
8042      || GET_RTX_CLASS (GET_CODE (x)) == 'c')
8043    {
8044      rtx x0 = XEXP (x, 0);
8045      rtx x1 = XEXP (x, 1);
8046
8047      /* Check the first level.  */
8048      if (x0 == x1)
8049	return nonzero_bits1 (x, mode, x0, mode,
8050			      nonzero_bits_with_known (x0, mode));
8051
8052      /* Check the second level.  */
8053      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
8054	   || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
8055	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
8056	return nonzero_bits1 (x, mode, x1, mode,
8057			      nonzero_bits_with_known (x1, mode));
8058
8059      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
8060	   || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
8061	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
8062	return nonzero_bits1 (x, mode, x0, mode,
8063			 nonzero_bits_with_known (x0, mode));
8064    }
8065
8066  return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
8067}
8068
8069/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
8070   We don't let nonzero_bits recur into num_sign_bit_copies, because that
8071   is less useful.  We can't allow both, because that results in exponential
8072   run time recursion.  There is a nullstone testcase that triggered
8073   this.  This macro avoids accidental uses of num_sign_bit_copies.  */
8074#define cached_num_sign_bit_copies()
8075
8076/* Given an expression, X, compute which bits in X can be nonzero.
8077   We don't care about bits outside of those defined in MODE.
8078
8079   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
8080   a shift, AND, or zero_extract, we can do better.  */
8081
8082static unsigned HOST_WIDE_INT
8083nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
8084	       enum machine_mode known_mode,
8085	       unsigned HOST_WIDE_INT known_ret)
8086{
8087  unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
8088  unsigned HOST_WIDE_INT inner_nz;
8089  enum rtx_code code;
8090  unsigned int mode_width = GET_MODE_BITSIZE (mode);
8091  rtx tem;
8092
8093  /* For floating-point values, assume all bits are needed.  */
8094  if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
8095    return nonzero;
8096
8097  /* If X is wider than MODE, use its mode instead.  */
8098  if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
8099    {
8100      mode = GET_MODE (x);
8101      nonzero = GET_MODE_MASK (mode);
8102      mode_width = GET_MODE_BITSIZE (mode);
8103    }
8104
8105  if (mode_width > HOST_BITS_PER_WIDE_INT)
8106    /* Our only callers in this case look for single bit values.  So
8107       just return the mode mask.  Those tests will then be false.  */
8108    return nonzero;
8109
8110#ifndef WORD_REGISTER_OPERATIONS
8111  /* If MODE is wider than X, but both are a single word for both the host
8112     and target machines, we can compute this from which bits of the
8113     object might be nonzero in its own mode, taking into account the fact
8114     that on many CISC machines, accessing an object in a wider mode
8115     causes the high-order bits to become undefined.  So they are
8116     not known to be zero.  */
8117
8118  if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
8119      && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
8120      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
8121      && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
8122    {
8123      nonzero &= nonzero_bits_with_known (x, GET_MODE (x));
8124      nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
8125      return nonzero;
8126    }
8127#endif
8128
8129  code = GET_CODE (x);
8130  switch (code)
8131    {
8132    case REG:
8133#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
8134      /* If pointers extend unsigned and this is a pointer in Pmode, say that
8135	 all the bits above ptr_mode are known to be zero.  */
8136      if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8137	  && REG_POINTER (x))
8138	nonzero &= GET_MODE_MASK (ptr_mode);
8139#endif
8140
8141      /* Include declared information about alignment of pointers.  */
8142      /* ??? We don't properly preserve REG_POINTER changes across
8143	 pointer-to-integer casts, so we can't trust it except for
8144	 things that we know must be pointers.  See execute/960116-1.c.  */
8145      if ((x == stack_pointer_rtx
8146	   || x == frame_pointer_rtx
8147	   || x == arg_pointer_rtx)
8148	  && REGNO_POINTER_ALIGN (REGNO (x)))
8149	{
8150	  unsigned HOST_WIDE_INT alignment
8151	    = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
8152
8153#ifdef PUSH_ROUNDING
8154	  /* If PUSH_ROUNDING is defined, it is possible for the
8155	     stack to be momentarily aligned only to that amount,
8156	     so we pick the least alignment.  */
8157	  if (x == stack_pointer_rtx && PUSH_ARGS)
8158	    alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
8159			     alignment);
8160#endif
8161
8162	  nonzero &= ~(alignment - 1);
8163	}
8164
8165      /* If X is a register whose nonzero bits value is current, use it.
8166	 Otherwise, if X is a register whose value we can find, use that
8167	 value.  Otherwise, use the previously-computed global nonzero bits
8168	 for this register.  */
8169
8170      if (reg_last_set_value[REGNO (x)] != 0
8171	  && (reg_last_set_mode[REGNO (x)] == mode
8172	      || (GET_MODE_CLASS (reg_last_set_mode[REGNO (x)]) == MODE_INT
8173		  && GET_MODE_CLASS (mode) == MODE_INT))
8174	  && (reg_last_set_label[REGNO (x)] == label_tick
8175	      || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8176		  && REG_N_SETS (REGNO (x)) == 1
8177		  && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start,
8178					REGNO (x))))
8179	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8180	return reg_last_set_nonzero_bits[REGNO (x)] & nonzero;
8181
8182      tem = get_last_value (x);
8183
8184      if (tem)
8185	{
8186#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8187	  /* If X is narrower than MODE and TEM is a non-negative
8188	     constant that would appear negative in the mode of X,
8189	     sign-extend it for use in reg_nonzero_bits because some
8190	     machines (maybe most) will actually do the sign-extension
8191	     and this is the conservative approach.
8192
8193	     ??? For 2.5, try to tighten up the MD files in this regard
8194	     instead of this kludge.  */
8195
8196	  if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
8197	      && GET_CODE (tem) == CONST_INT
8198	      && INTVAL (tem) > 0
8199	      && 0 != (INTVAL (tem)
8200		       & ((HOST_WIDE_INT) 1
8201			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
8202	    tem = GEN_INT (INTVAL (tem)
8203			   | ((HOST_WIDE_INT) (-1)
8204			      << GET_MODE_BITSIZE (GET_MODE (x))));
8205#endif
8206	  return nonzero_bits_with_known (tem, mode) & nonzero;
8207	}
8208      else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
8209	{
8210	  unsigned HOST_WIDE_INT mask = reg_nonzero_bits[REGNO (x)];
8211
8212	  if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width)
8213	    /* We don't know anything about the upper bits.  */
8214	    mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
8215	  return nonzero & mask;
8216	}
8217      else
8218	return nonzero;
8219
8220    case CONST_INT:
8221#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8222      /* If X is negative in MODE, sign-extend the value.  */
8223      if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
8224	  && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
8225	return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
8226#endif
8227
8228      return INTVAL (x);
8229
8230    case MEM:
8231#ifdef LOAD_EXTEND_OP
8232      /* In many, if not most, RISC machines, reading a byte from memory
8233	 zeros the rest of the register.  Noticing that fact saves a lot
8234	 of extra zero-extends.  */
8235      if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
8236	nonzero &= GET_MODE_MASK (GET_MODE (x));
8237#endif
8238      break;
8239
8240    case EQ:  case NE:
8241    case UNEQ:  case LTGT:
8242    case GT:  case GTU:  case UNGT:
8243    case LT:  case LTU:  case UNLT:
8244    case GE:  case GEU:  case UNGE:
8245    case LE:  case LEU:  case UNLE:
8246    case UNORDERED: case ORDERED:
8247
8248      /* If this produces an integer result, we know which bits are set.
8249	 Code here used to clear bits outside the mode of X, but that is
8250	 now done above.  */
8251
8252      if (GET_MODE_CLASS (mode) == MODE_INT
8253	  && mode_width <= HOST_BITS_PER_WIDE_INT)
8254	nonzero = STORE_FLAG_VALUE;
8255      break;
8256
8257    case NEG:
8258#if 0
8259      /* Disabled to avoid exponential mutual recursion between nonzero_bits
8260	 and num_sign_bit_copies.  */
8261      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8262	  == GET_MODE_BITSIZE (GET_MODE (x)))
8263	nonzero = 1;
8264#endif
8265
8266      if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
8267	nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
8268      break;
8269
8270    case ABS:
8271#if 0
8272      /* Disabled to avoid exponential mutual recursion between nonzero_bits
8273	 and num_sign_bit_copies.  */
8274      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8275	  == GET_MODE_BITSIZE (GET_MODE (x)))
8276	nonzero = 1;
8277#endif
8278      break;
8279
8280    case TRUNCATE:
8281      nonzero &= (nonzero_bits_with_known (XEXP (x, 0), mode)
8282		  & GET_MODE_MASK (mode));
8283      break;
8284
8285    case ZERO_EXTEND:
8286      nonzero &= nonzero_bits_with_known (XEXP (x, 0), mode);
8287      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8288	nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8289      break;
8290
8291    case SIGN_EXTEND:
8292      /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
8293	 Otherwise, show all the bits in the outer mode but not the inner
8294	 may be nonzero.  */
8295      inner_nz = nonzero_bits_with_known (XEXP (x, 0), mode);
8296      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8297	{
8298	  inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8299	  if (inner_nz
8300	      & (((HOST_WIDE_INT) 1
8301		  << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
8302	    inner_nz |= (GET_MODE_MASK (mode)
8303			 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
8304	}
8305
8306      nonzero &= inner_nz;
8307      break;
8308
8309    case AND:
8310      nonzero &= (nonzero_bits_with_known (XEXP (x, 0), mode)
8311		  & nonzero_bits_with_known (XEXP (x, 1), mode));
8312      break;
8313
8314    case XOR:   case IOR:
8315    case UMIN:  case UMAX:  case SMIN:  case SMAX:
8316      {
8317	unsigned HOST_WIDE_INT nonzero0 =
8318	  nonzero_bits_with_known (XEXP (x, 0), mode);
8319
8320	/* Don't call nonzero_bits for the second time if it cannot change
8321	   anything.  */
8322	if ((nonzero & nonzero0) != nonzero)
8323	  nonzero &= (nonzero0
8324		      | nonzero_bits_with_known (XEXP (x, 1), mode));
8325      }
8326      break;
8327
8328    case PLUS:  case MINUS:
8329    case MULT:
8330    case DIV:   case UDIV:
8331    case MOD:   case UMOD:
8332      /* We can apply the rules of arithmetic to compute the number of
8333	 high- and low-order zero bits of these operations.  We start by
8334	 computing the width (position of the highest-order nonzero bit)
8335	 and the number of low-order zero bits for each value.  */
8336      {
8337	unsigned HOST_WIDE_INT nz0 =
8338	  nonzero_bits_with_known (XEXP (x, 0), mode);
8339	unsigned HOST_WIDE_INT nz1 =
8340	  nonzero_bits_with_known (XEXP (x, 1), mode);
8341	int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
8342	int width0 = floor_log2 (nz0) + 1;
8343	int width1 = floor_log2 (nz1) + 1;
8344	int low0 = floor_log2 (nz0 & -nz0);
8345	int low1 = floor_log2 (nz1 & -nz1);
8346	HOST_WIDE_INT op0_maybe_minusp
8347	  = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
8348	HOST_WIDE_INT op1_maybe_minusp
8349	  = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
8350	unsigned int result_width = mode_width;
8351	int result_low = 0;
8352
8353	switch (code)
8354	  {
8355	  case PLUS:
8356	    result_width = MAX (width0, width1) + 1;
8357	    result_low = MIN (low0, low1);
8358	    break;
8359	  case MINUS:
8360	    result_low = MIN (low0, low1);
8361	    break;
8362	  case MULT:
8363	    result_width = width0 + width1;
8364	    result_low = low0 + low1;
8365	    break;
8366	  case DIV:
8367	    if (width1 == 0)
8368	      break;
8369	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
8370	      result_width = width0;
8371	    break;
8372	  case UDIV:
8373	    if (width1 == 0)
8374	      break;
8375	    result_width = width0;
8376	    break;
8377	  case MOD:
8378	    if (width1 == 0)
8379	      break;
8380	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
8381	      result_width = MIN (width0, width1);
8382	    result_low = MIN (low0, low1);
8383	    break;
8384	  case UMOD:
8385	    if (width1 == 0)
8386	      break;
8387	    result_width = MIN (width0, width1);
8388	    result_low = MIN (low0, low1);
8389	    break;
8390	  default:
8391	    abort ();
8392	  }
8393
8394	if (result_width < mode_width)
8395	  nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
8396
8397	if (result_low > 0)
8398	  nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
8399
8400#ifdef POINTERS_EXTEND_UNSIGNED
8401	/* If pointers extend unsigned and this is an addition or subtraction
8402	   to a pointer in Pmode, all the bits above ptr_mode are known to be
8403	   zero.  */
8404	if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
8405	    && (code == PLUS || code == MINUS)
8406	    && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8407	  nonzero &= GET_MODE_MASK (ptr_mode);
8408#endif
8409      }
8410      break;
8411
8412    case ZERO_EXTRACT:
8413      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8414	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8415	nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
8416      break;
8417
8418    case SUBREG:
8419      /* If this is a SUBREG formed for a promoted variable that has
8420	 been zero-extended, we know that at least the high-order bits
8421	 are zero, though others might be too.  */
8422
8423      if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
8424	nonzero = (GET_MODE_MASK (GET_MODE (x))
8425		   & nonzero_bits_with_known (SUBREG_REG (x), GET_MODE (x)));
8426
8427      /* If the inner mode is a single word for both the host and target
8428	 machines, we can compute this from which bits of the inner
8429	 object might be nonzero.  */
8430      if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
8431	  && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8432	      <= HOST_BITS_PER_WIDE_INT))
8433	{
8434	  nonzero &= nonzero_bits_with_known (SUBREG_REG (x), mode);
8435
8436#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
8437	  /* If this is a typical RISC machine, we only have to worry
8438	     about the way loads are extended.  */
8439	  if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
8440	       ? (((nonzero
8441		    & (((unsigned HOST_WIDE_INT) 1
8442			<< (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
8443		   != 0))
8444	       : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
8445	      || GET_CODE (SUBREG_REG (x)) != MEM)
8446#endif
8447	    {
8448	      /* On many CISC machines, accessing an object in a wider mode
8449		 causes the high-order bits to become undefined.  So they are
8450		 not known to be zero.  */
8451	      if (GET_MODE_SIZE (GET_MODE (x))
8452		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8453		nonzero |= (GET_MODE_MASK (GET_MODE (x))
8454			    & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
8455	    }
8456	}
8457      break;
8458
8459    case ASHIFTRT:
8460    case LSHIFTRT:
8461    case ASHIFT:
8462    case ROTATE:
8463      /* The nonzero bits are in two classes: any bits within MODE
8464	 that aren't in GET_MODE (x) are always significant.  The rest of the
8465	 nonzero bits are those that are significant in the operand of
8466	 the shift when shifted the appropriate number of bits.  This
8467	 shows that high-order bits are cleared by the right shift and
8468	 low-order bits by left shifts.  */
8469      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8470	  && INTVAL (XEXP (x, 1)) >= 0
8471	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8472	{
8473	  enum machine_mode inner_mode = GET_MODE (x);
8474	  unsigned int width = GET_MODE_BITSIZE (inner_mode);
8475	  int count = INTVAL (XEXP (x, 1));
8476	  unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
8477	  unsigned HOST_WIDE_INT op_nonzero =
8478	    nonzero_bits_with_known (XEXP (x, 0), mode);
8479	  unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
8480	  unsigned HOST_WIDE_INT outer = 0;
8481
8482	  if (mode_width > width)
8483	    outer = (op_nonzero & nonzero & ~mode_mask);
8484
8485	  if (code == LSHIFTRT)
8486	    inner >>= count;
8487	  else if (code == ASHIFTRT)
8488	    {
8489	      inner >>= count;
8490
8491	      /* If the sign bit may have been nonzero before the shift, we
8492		 need to mark all the places it could have been copied to
8493		 by the shift as possibly nonzero.  */
8494	      if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
8495		inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
8496	    }
8497	  else if (code == ASHIFT)
8498	    inner <<= count;
8499	  else
8500	    inner = ((inner << (count % width)
8501		      | (inner >> (width - (count % width)))) & mode_mask);
8502
8503	  nonzero &= (outer | inner);
8504	}
8505      break;
8506
8507    case FFS:
8508    case POPCOUNT:
8509      /* This is at most the number of bits in the mode.  */
8510      nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
8511      break;
8512
8513    case CLZ:
8514      /* If CLZ has a known value at zero, then the nonzero bits are
8515	 that value, plus the number of bits in the mode minus one.  */
8516      if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
8517	nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
8518      else
8519	nonzero = -1;
8520      break;
8521
8522    case CTZ:
8523      /* If CTZ has a known value at zero, then the nonzero bits are
8524	 that value, plus the number of bits in the mode minus one.  */
8525      if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
8526	nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
8527      else
8528	nonzero = -1;
8529      break;
8530
8531    case PARITY:
8532      nonzero = 1;
8533      break;
8534
8535    case IF_THEN_ELSE:
8536      nonzero &= (nonzero_bits_with_known (XEXP (x, 1), mode)
8537		  | nonzero_bits_with_known (XEXP (x, 2), mode));
8538      break;
8539
8540    default:
8541      break;
8542    }
8543
8544  return nonzero;
8545}
8546
8547/* See the macro definition above.  */
8548#undef cached_num_sign_bit_copies
8549
8550#define num_sign_bit_copies_with_known(X, M) \
8551  cached_num_sign_bit_copies (X, M, known_x, known_mode, known_ret)
8552
8553/* The function cached_num_sign_bit_copies is a wrapper around
8554   num_sign_bit_copies1.  It avoids exponential behavior in
8555   num_sign_bit_copies1 when X has identical subexpressions on the
8556   first or the second level.  */
8557
8558static unsigned int
8559cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
8560			    enum machine_mode known_mode,
8561			    unsigned int known_ret)
8562{
8563  if (x == known_x && mode == known_mode)
8564    return known_ret;
8565
8566  /* Try to find identical subexpressions.  If found call
8567     num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
8568     the precomputed value for the subexpression as KNOWN_RET.  */
8569
8570  if (GET_RTX_CLASS (GET_CODE (x)) == '2'
8571      || GET_RTX_CLASS (GET_CODE (x)) == 'c')
8572    {
8573      rtx x0 = XEXP (x, 0);
8574      rtx x1 = XEXP (x, 1);
8575
8576      /* Check the first level.  */
8577      if (x0 == x1)
8578	return
8579	  num_sign_bit_copies1 (x, mode, x0, mode,
8580				num_sign_bit_copies_with_known (x0, mode));
8581
8582      /* Check the second level.  */
8583      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
8584	   || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
8585	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
8586	return
8587	  num_sign_bit_copies1 (x, mode, x1, mode,
8588				num_sign_bit_copies_with_known (x1, mode));
8589
8590      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
8591	   || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
8592	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
8593	return
8594	  num_sign_bit_copies1 (x, mode, x0, mode,
8595				num_sign_bit_copies_with_known (x0, mode));
8596    }
8597
8598  return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
8599}
8600
8601/* Return the number of bits at the high-order end of X that are known to
8602   be equal to the sign bit.  X will be used in mode MODE; if MODE is
8603   VOIDmode, X will be used in its own mode.  The returned value  will always
8604   be between 1 and the number of bits in MODE.  */
8605
8606static unsigned int
8607num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
8608		      enum machine_mode known_mode,
8609		      unsigned int known_ret)
8610{
8611  enum rtx_code code = GET_CODE (x);
8612  unsigned int bitwidth;
8613  int num0, num1, result;
8614  unsigned HOST_WIDE_INT nonzero;
8615  rtx tem;
8616
8617  /* If we weren't given a mode, use the mode of X.  If the mode is still
8618     VOIDmode, we don't know anything.  Likewise if one of the modes is
8619     floating-point.  */
8620
8621  if (mode == VOIDmode)
8622    mode = GET_MODE (x);
8623
8624  if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
8625    return 1;
8626
8627  bitwidth = GET_MODE_BITSIZE (mode);
8628
8629  /* For a smaller object, just ignore the high bits.  */
8630  if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
8631    {
8632      num0 = num_sign_bit_copies_with_known (x, GET_MODE (x));
8633      return MAX (1,
8634		  num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
8635    }
8636
8637  if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
8638    {
8639#ifndef WORD_REGISTER_OPERATIONS
8640  /* If this machine does not do all register operations on the entire
8641     register and MODE is wider than the mode of X, we can say nothing
8642     at all about the high-order bits.  */
8643      return 1;
8644#else
8645      /* Likewise on machines that do, if the mode of the object is smaller
8646	 than a word and loads of that size don't sign extend, we can say
8647	 nothing about the high order bits.  */
8648      if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
8649#ifdef LOAD_EXTEND_OP
8650	  && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
8651#endif
8652	  )
8653	return 1;
8654#endif
8655    }
8656
8657  switch (code)
8658    {
8659    case REG:
8660
8661#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
8662      /* If pointers extend signed and this is a pointer in Pmode, say that
8663	 all the bits above ptr_mode are known to be sign bit copies.  */
8664      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
8665	  && REG_POINTER (x))
8666	return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
8667#endif
8668
8669      if (reg_last_set_value[REGNO (x)] != 0
8670	  && reg_last_set_mode[REGNO (x)] == mode
8671	  && (reg_last_set_label[REGNO (x)] == label_tick
8672	      || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8673		  && REG_N_SETS (REGNO (x)) == 1
8674		  && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start,
8675					REGNO (x))))
8676	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8677	return reg_last_set_sign_bit_copies[REGNO (x)];
8678
8679      tem = get_last_value (x);
8680      if (tem != 0)
8681	return num_sign_bit_copies_with_known (tem, mode);
8682
8683      if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0
8684	  && GET_MODE_BITSIZE (GET_MODE (x)) == bitwidth)
8685	return reg_sign_bit_copies[REGNO (x)];
8686      break;
8687
8688    case MEM:
8689#ifdef LOAD_EXTEND_OP
8690      /* Some RISC machines sign-extend all loads of smaller than a word.  */
8691      if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
8692	return MAX (1, ((int) bitwidth
8693			- (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
8694#endif
8695      break;
8696
8697    case CONST_INT:
8698      /* If the constant is negative, take its 1's complement and remask.
8699	 Then see how many zero bits we have.  */
8700      nonzero = INTVAL (x) & GET_MODE_MASK (mode);
8701      if (bitwidth <= HOST_BITS_PER_WIDE_INT
8702	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8703	nonzero = (~nonzero) & GET_MODE_MASK (mode);
8704
8705      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8706
8707    case SUBREG:
8708      /* If this is a SUBREG for a promoted object that is sign-extended
8709	 and we are looking at it in a wider mode, we know that at least the
8710	 high-order bits are known to be sign bit copies.  */
8711
8712      if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
8713	{
8714	  num0 = num_sign_bit_copies_with_known (SUBREG_REG (x), mode);
8715	  return MAX ((int) bitwidth
8716		      - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
8717		      num0);
8718	}
8719
8720      /* For a smaller object, just ignore the high bits.  */
8721      if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
8722	{
8723	  num0 = num_sign_bit_copies_with_known (SUBREG_REG (x), VOIDmode);
8724	  return MAX (1, (num0
8725			  - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8726				   - bitwidth)));
8727	}
8728
8729#ifdef WORD_REGISTER_OPERATIONS
8730#ifdef LOAD_EXTEND_OP
8731      /* For paradoxical SUBREGs on machines where all register operations
8732	 affect the entire register, just look inside.  Note that we are
8733	 passing MODE to the recursive call, so the number of sign bit copies
8734	 will remain relative to that mode, not the inner mode.  */
8735
8736      /* This works only if loads sign extend.  Otherwise, if we get a
8737	 reload for the inner part, it may be loaded from the stack, and
8738	 then we lose all sign bit copies that existed before the store
8739	 to the stack.  */
8740
8741      if ((GET_MODE_SIZE (GET_MODE (x))
8742	   > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8743	  && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
8744	  && GET_CODE (SUBREG_REG (x)) == MEM)
8745	return num_sign_bit_copies_with_known (SUBREG_REG (x), mode);
8746#endif
8747#endif
8748      break;
8749
8750    case SIGN_EXTRACT:
8751      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
8752	return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
8753      break;
8754
8755    case SIGN_EXTEND:
8756      return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8757	      + num_sign_bit_copies_with_known (XEXP (x, 0), VOIDmode));
8758
8759    case TRUNCATE:
8760      /* For a smaller object, just ignore the high bits.  */
8761      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), VOIDmode);
8762      return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8763				    - bitwidth)));
8764
8765    case NOT:
8766      return num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8767
8768    case ROTATE:       case ROTATERT:
8769      /* If we are rotating left by a number of bits less than the number
8770	 of sign bit copies, we can just subtract that amount from the
8771	 number.  */
8772      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8773	  && INTVAL (XEXP (x, 1)) >= 0
8774	  && INTVAL (XEXP (x, 1)) < (int) bitwidth)
8775	{
8776	  num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8777	  return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
8778				 : (int) bitwidth - INTVAL (XEXP (x, 1))));
8779	}
8780      break;
8781
8782    case NEG:
8783      /* In general, this subtracts one sign bit copy.  But if the value
8784	 is known to be positive, the number of sign bit copies is the
8785	 same as that of the input.  Finally, if the input has just one bit
8786	 that might be nonzero, all the bits are copies of the sign bit.  */
8787      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8788      if (bitwidth > HOST_BITS_PER_WIDE_INT)
8789	return num0 > 1 ? num0 - 1 : 1;
8790
8791      nonzero = nonzero_bits (XEXP (x, 0), mode);
8792      if (nonzero == 1)
8793	return bitwidth;
8794
8795      if (num0 > 1
8796	  && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
8797	num0--;
8798
8799      return num0;
8800
8801    case IOR:   case AND:   case XOR:
8802    case SMIN:  case SMAX:  case UMIN:  case UMAX:
8803      /* Logical operations will preserve the number of sign-bit copies.
8804	 MIN and MAX operations always return one of the operands.  */
8805      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8806      num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8807      return MIN (num0, num1);
8808
8809    case PLUS:  case MINUS:
8810      /* For addition and subtraction, we can have a 1-bit carry.  However,
8811	 if we are subtracting 1 from a positive number, there will not
8812	 be such a carry.  Furthermore, if the positive number is known to
8813	 be 0 or 1, we know the result is either -1 or 0.  */
8814
8815      if (code == PLUS && XEXP (x, 1) == constm1_rtx
8816	  && bitwidth <= HOST_BITS_PER_WIDE_INT)
8817	{
8818	  nonzero = nonzero_bits (XEXP (x, 0), mode);
8819	  if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8820	    return (nonzero == 1 || nonzero == 0 ? bitwidth
8821		    : bitwidth - floor_log2 (nonzero) - 1);
8822	}
8823
8824      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8825      num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8826      result = MAX (1, MIN (num0, num1) - 1);
8827
8828#ifdef POINTERS_EXTEND_UNSIGNED
8829      /* If pointers extend signed and this is an addition or subtraction
8830	 to a pointer in Pmode, all the bits above ptr_mode are known to be
8831	 sign bit copies.  */
8832      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8833	  && (code == PLUS || code == MINUS)
8834	  && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8835	result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
8836			     - GET_MODE_BITSIZE (ptr_mode) + 1),
8837		      result);
8838#endif
8839      return result;
8840
8841    case MULT:
8842      /* The number of bits of the product is the sum of the number of
8843	 bits of both terms.  However, unless one of the terms if known
8844	 to be positive, we must allow for an additional bit since negating
8845	 a negative number can remove one sign bit copy.  */
8846
8847      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8848      num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8849
8850      result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8851      if (result > 0
8852	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8853	      || (((nonzero_bits (XEXP (x, 0), mode)
8854		    & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8855		  && ((nonzero_bits (XEXP (x, 1), mode)
8856		       & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
8857	result--;
8858
8859      return MAX (1, result);
8860
8861    case UDIV:
8862      /* The result must be <= the first operand.  If the first operand
8863         has the high bit set, we know nothing about the number of sign
8864         bit copies.  */
8865      if (bitwidth > HOST_BITS_PER_WIDE_INT)
8866	return 1;
8867      else if ((nonzero_bits (XEXP (x, 0), mode)
8868		& ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8869	return 1;
8870      else
8871	return num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8872
8873    case UMOD:
8874      /* The result must be <= the second operand.  */
8875      return num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8876
8877    case DIV:
8878      /* Similar to unsigned division, except that we have to worry about
8879	 the case where the divisor is negative, in which case we have
8880	 to add 1.  */
8881      result = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8882      if (result > 1
8883	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8884	      || (nonzero_bits (XEXP (x, 1), mode)
8885		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8886	result--;
8887
8888      return result;
8889
8890    case MOD:
8891      result = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8892      if (result > 1
8893	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8894	      || (nonzero_bits (XEXP (x, 1), mode)
8895		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8896	result--;
8897
8898      return result;
8899
8900    case ASHIFTRT:
8901      /* Shifts by a constant add to the number of bits equal to the
8902	 sign bit.  */
8903      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8904      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8905	  && INTVAL (XEXP (x, 1)) > 0)
8906	num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
8907
8908      return num0;
8909
8910    case ASHIFT:
8911      /* Left shifts destroy copies.  */
8912      if (GET_CODE (XEXP (x, 1)) != CONST_INT
8913	  || INTVAL (XEXP (x, 1)) < 0
8914	  || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
8915	return 1;
8916
8917      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8918      return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8919
8920    case IF_THEN_ELSE:
8921      num0 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8922      num1 = num_sign_bit_copies_with_known (XEXP (x, 2), mode);
8923      return MIN (num0, num1);
8924
8925    case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
8926    case UNEQ:  case LTGT:  case UNGE:  case UNGT:  case UNLE:  case UNLT:
8927    case GEU: case GTU: case LEU: case LTU:
8928    case UNORDERED: case ORDERED:
8929      /* If the constant is negative, take its 1's complement and remask.
8930	 Then see how many zero bits we have.  */
8931      nonzero = STORE_FLAG_VALUE;
8932      if (bitwidth <= HOST_BITS_PER_WIDE_INT
8933	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8934	nonzero = (~nonzero) & GET_MODE_MASK (mode);
8935
8936      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8937      break;
8938
8939    default:
8940      break;
8941    }
8942
8943  /* If we haven't been able to figure it out by one of the above rules,
8944     see if some of the high-order bits are known to be zero.  If so,
8945     count those bits and return one less than that amount.  If we can't
8946     safely compute the mask for this mode, always return BITWIDTH.  */
8947
8948  if (bitwidth > HOST_BITS_PER_WIDE_INT)
8949    return 1;
8950
8951  nonzero = nonzero_bits (x, mode);
8952  return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
8953	  ? 1 : bitwidth - floor_log2 (nonzero) - 1);
8954}
8955
8956/* Return the number of "extended" bits there are in X, when interpreted
8957   as a quantity in MODE whose signedness is indicated by UNSIGNEDP.  For
8958   unsigned quantities, this is the number of high-order zero bits.
8959   For signed quantities, this is the number of copies of the sign bit
8960   minus 1.  In both case, this function returns the number of "spare"
8961   bits.  For example, if two quantities for which this function returns
8962   at least 1 are added, the addition is known not to overflow.
8963
8964   This function will always return 0 unless called during combine, which
8965   implies that it must be called from a define_split.  */
8966
8967unsigned int
8968extended_count (rtx x, enum machine_mode mode, int unsignedp)
8969{
8970  if (nonzero_sign_valid == 0)
8971    return 0;
8972
8973  return (unsignedp
8974	  ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8975	     ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1
8976			       - floor_log2 (nonzero_bits (x, mode)))
8977	     : 0)
8978	  : num_sign_bit_copies (x, mode) - 1);
8979}
8980
8981/* This function is called from `simplify_shift_const' to merge two
8982   outer operations.  Specifically, we have already found that we need
8983   to perform operation *POP0 with constant *PCONST0 at the outermost
8984   position.  We would now like to also perform OP1 with constant CONST1
8985   (with *POP0 being done last).
8986
8987   Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8988   the resulting operation.  *PCOMP_P is set to 1 if we would need to
8989   complement the innermost operand, otherwise it is unchanged.
8990
8991   MODE is the mode in which the operation will be done.  No bits outside
8992   the width of this mode matter.  It is assumed that the width of this mode
8993   is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8994
8995   If *POP0 or OP1 are NIL, it means no operation is required.  Only NEG, PLUS,
8996   IOR, XOR, and AND are supported.  We may set *POP0 to SET if the proper
8997   result is simply *PCONST0.
8998
8999   If the resulting operation cannot be expressed as one operation, we
9000   return 0 and do not change *POP0, *PCONST0, and *PCOMP_P.  */
9001
9002static int
9003merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, enum machine_mode mode, int *pcomp_p)
9004{
9005  enum rtx_code op0 = *pop0;
9006  HOST_WIDE_INT const0 = *pconst0;
9007
9008  const0 &= GET_MODE_MASK (mode);
9009  const1 &= GET_MODE_MASK (mode);
9010
9011  /* If OP0 is an AND, clear unimportant bits in CONST1.  */
9012  if (op0 == AND)
9013    const1 &= const0;
9014
9015  /* If OP0 or OP1 is NIL, this is easy.  Similarly if they are the same or
9016     if OP0 is SET.  */
9017
9018  if (op1 == NIL || op0 == SET)
9019    return 1;
9020
9021  else if (op0 == NIL)
9022    op0 = op1, const0 = const1;
9023
9024  else if (op0 == op1)
9025    {
9026      switch (op0)
9027	{
9028	case AND:
9029	  const0 &= const1;
9030	  break;
9031	case IOR:
9032	  const0 |= const1;
9033	  break;
9034	case XOR:
9035	  const0 ^= const1;
9036	  break;
9037	case PLUS:
9038	  const0 += const1;
9039	  break;
9040	case NEG:
9041	  op0 = NIL;
9042	  break;
9043	default:
9044	  break;
9045	}
9046    }
9047
9048  /* Otherwise, if either is a PLUS or NEG, we can't do anything.  */
9049  else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
9050    return 0;
9051
9052  /* If the two constants aren't the same, we can't do anything.  The
9053     remaining six cases can all be done.  */
9054  else if (const0 != const1)
9055    return 0;
9056
9057  else
9058    switch (op0)
9059      {
9060      case IOR:
9061	if (op1 == AND)
9062	  /* (a & b) | b == b */
9063	  op0 = SET;
9064	else /* op1 == XOR */
9065	  /* (a ^ b) | b == a | b */
9066	  {;}
9067	break;
9068
9069      case XOR:
9070	if (op1 == AND)
9071	  /* (a & b) ^ b == (~a) & b */
9072	  op0 = AND, *pcomp_p = 1;
9073	else /* op1 == IOR */
9074	  /* (a | b) ^ b == a & ~b */
9075	  op0 = AND, const0 = ~const0;
9076	break;
9077
9078      case AND:
9079	if (op1 == IOR)
9080	  /* (a | b) & b == b */
9081	op0 = SET;
9082	else /* op1 == XOR */
9083	  /* (a ^ b) & b) == (~a) & b */
9084	  *pcomp_p = 1;
9085	break;
9086      default:
9087	break;
9088      }
9089
9090  /* Check for NO-OP cases.  */
9091  const0 &= GET_MODE_MASK (mode);
9092  if (const0 == 0
9093      && (op0 == IOR || op0 == XOR || op0 == PLUS))
9094    op0 = NIL;
9095  else if (const0 == 0 && op0 == AND)
9096    op0 = SET;
9097  else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
9098	   && op0 == AND)
9099    op0 = NIL;
9100
9101  /* ??? Slightly redundant with the above mask, but not entirely.
9102     Moving this above means we'd have to sign-extend the mode mask
9103     for the final test.  */
9104  const0 = trunc_int_for_mode (const0, mode);
9105
9106  *pop0 = op0;
9107  *pconst0 = const0;
9108
9109  return 1;
9110}
9111
9112/* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
9113   The result of the shift is RESULT_MODE.  X, if nonzero, is an expression
9114   that we started with.
9115
9116   The shift is normally computed in the widest mode we find in VAROP, as
9117   long as it isn't a different number of words than RESULT_MODE.  Exceptions
9118   are ASHIFTRT and ROTATE, which are always done in their original mode,  */
9119
9120static rtx
9121simplify_shift_const (rtx x, enum rtx_code code,
9122		      enum machine_mode result_mode, rtx varop,
9123		      int orig_count)
9124{
9125  enum rtx_code orig_code = code;
9126  unsigned int count;
9127  int signed_count;
9128  enum machine_mode mode = result_mode;
9129  enum machine_mode shift_mode, tmode;
9130  unsigned int mode_words
9131    = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
9132  /* We form (outer_op (code varop count) (outer_const)).  */
9133  enum rtx_code outer_op = NIL;
9134  HOST_WIDE_INT outer_const = 0;
9135  rtx const_rtx;
9136  int complement_p = 0;
9137  rtx new;
9138
9139  /* Make sure and truncate the "natural" shift on the way in.  We don't
9140     want to do this inside the loop as it makes it more difficult to
9141     combine shifts.  */
9142  if (SHIFT_COUNT_TRUNCATED)
9143    orig_count &= GET_MODE_BITSIZE (mode) - 1;
9144
9145  /* If we were given an invalid count, don't do anything except exactly
9146     what was requested.  */
9147
9148  if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
9149    {
9150      if (x)
9151	return x;
9152
9153      return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (orig_count));
9154    }
9155
9156  count = orig_count;
9157
9158  /* Unless one of the branches of the `if' in this loop does a `continue',
9159     we will `break' the loop after the `if'.  */
9160
9161  while (count != 0)
9162    {
9163      /* If we have an operand of (clobber (const_int 0)), just return that
9164	 value.  */
9165      if (GET_CODE (varop) == CLOBBER)
9166	return varop;
9167
9168      /* If we discovered we had to complement VAROP, leave.  Making a NOT
9169	 here would cause an infinite loop.  */
9170      if (complement_p)
9171	break;
9172
9173      /* Convert ROTATERT to ROTATE.  */
9174      if (code == ROTATERT)
9175	{
9176	  unsigned int bitsize = GET_MODE_BITSIZE (result_mode);;
9177	  code = ROTATE;
9178	  if (VECTOR_MODE_P (result_mode))
9179	    count = bitsize / GET_MODE_NUNITS (result_mode) - count;
9180	  else
9181	    count = bitsize - count;
9182	}
9183
9184      /* We need to determine what mode we will do the shift in.  If the
9185	 shift is a right shift or a ROTATE, we must always do it in the mode
9186	 it was originally done in.  Otherwise, we can do it in MODE, the
9187	 widest mode encountered.  */
9188      shift_mode
9189	= (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9190	   ? result_mode : mode);
9191
9192      /* Handle cases where the count is greater than the size of the mode
9193	 minus 1.  For ASHIFT, use the size minus one as the count (this can
9194	 occur when simplifying (lshiftrt (ashiftrt ..))).  For rotates,
9195	 take the count modulo the size.  For other shifts, the result is
9196	 zero.
9197
9198	 Since these shifts are being produced by the compiler by combining
9199	 multiple operations, each of which are defined, we know what the
9200	 result is supposed to be.  */
9201
9202      if (count > (unsigned int) (GET_MODE_BITSIZE (shift_mode) - 1))
9203	{
9204	  if (code == ASHIFTRT)
9205	    count = GET_MODE_BITSIZE (shift_mode) - 1;
9206	  else if (code == ROTATE || code == ROTATERT)
9207	    count %= GET_MODE_BITSIZE (shift_mode);
9208	  else
9209	    {
9210	      /* We can't simply return zero because there may be an
9211		 outer op.  */
9212	      varop = const0_rtx;
9213	      count = 0;
9214	      break;
9215	    }
9216	}
9217
9218      /* An arithmetic right shift of a quantity known to be -1 or 0
9219	 is a no-op.  */
9220      if (code == ASHIFTRT
9221	  && (num_sign_bit_copies (varop, shift_mode)
9222	      == GET_MODE_BITSIZE (shift_mode)))
9223	{
9224	  count = 0;
9225	  break;
9226	}
9227
9228      /* If we are doing an arithmetic right shift and discarding all but
9229	 the sign bit copies, this is equivalent to doing a shift by the
9230	 bitsize minus one.  Convert it into that shift because it will often
9231	 allow other simplifications.  */
9232
9233      if (code == ASHIFTRT
9234	  && (count + num_sign_bit_copies (varop, shift_mode)
9235	      >= GET_MODE_BITSIZE (shift_mode)))
9236	count = GET_MODE_BITSIZE (shift_mode) - 1;
9237
9238      /* We simplify the tests below and elsewhere by converting
9239	 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
9240	 `make_compound_operation' will convert it to an ASHIFTRT for
9241	 those machines (such as VAX) that don't have an LSHIFTRT.  */
9242      if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9243	  && code == ASHIFTRT
9244	  && ((nonzero_bits (varop, shift_mode)
9245	       & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
9246	      == 0))
9247	code = LSHIFTRT;
9248
9249      if (code == LSHIFTRT
9250	  && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9251	  && !(nonzero_bits (varop, shift_mode) >> count))
9252	varop = const0_rtx;
9253      if (code == ASHIFT
9254	  && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9255	  && !((nonzero_bits (varop, shift_mode) << count)
9256	       & GET_MODE_MASK (shift_mode)))
9257	varop = const0_rtx;
9258
9259      switch (GET_CODE (varop))
9260	{
9261	case SIGN_EXTEND:
9262	case ZERO_EXTEND:
9263	case SIGN_EXTRACT:
9264	case ZERO_EXTRACT:
9265	  new = expand_compound_operation (varop);
9266	  if (new != varop)
9267	    {
9268	      varop = new;
9269	      continue;
9270	    }
9271	  break;
9272
9273	case MEM:
9274	  /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
9275	     minus the width of a smaller mode, we can do this with a
9276	     SIGN_EXTEND or ZERO_EXTEND from the narrower memory location.  */
9277	  if ((code == ASHIFTRT || code == LSHIFTRT)
9278	      && ! mode_dependent_address_p (XEXP (varop, 0))
9279	      && ! MEM_VOLATILE_P (varop)
9280	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9281					 MODE_INT, 1)) != BLKmode)
9282	    {
9283	      new = adjust_address_nv (varop, tmode,
9284				       BYTES_BIG_ENDIAN ? 0
9285				       : count / BITS_PER_UNIT);
9286
9287	      varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9288				     : ZERO_EXTEND, mode, new);
9289	      count = 0;
9290	      continue;
9291	    }
9292	  break;
9293
9294	case USE:
9295	  /* Similar to the case above, except that we can only do this if
9296	     the resulting mode is the same as that of the underlying
9297	     MEM and adjust the address depending on the *bits* endianness
9298	     because of the way that bit-field extract insns are defined.  */
9299	  if ((code == ASHIFTRT || code == LSHIFTRT)
9300	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9301					 MODE_INT, 1)) != BLKmode
9302	      && tmode == GET_MODE (XEXP (varop, 0)))
9303	    {
9304	      if (BITS_BIG_ENDIAN)
9305		new = XEXP (varop, 0);
9306	      else
9307		{
9308		  new = copy_rtx (XEXP (varop, 0));
9309		  SUBST (XEXP (new, 0),
9310			 plus_constant (XEXP (new, 0),
9311					count / BITS_PER_UNIT));
9312		}
9313
9314	      varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9315				     : ZERO_EXTEND, mode, new);
9316	      count = 0;
9317	      continue;
9318	    }
9319	  break;
9320
9321	case SUBREG:
9322	  /* If VAROP is a SUBREG, strip it as long as the inner operand has
9323	     the same number of words as what we've seen so far.  Then store
9324	     the widest mode in MODE.  */
9325	  if (subreg_lowpart_p (varop)
9326	      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9327		  > GET_MODE_SIZE (GET_MODE (varop)))
9328	      && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9329				  + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9330		 == mode_words)
9331	    {
9332	      varop = SUBREG_REG (varop);
9333	      if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9334		mode = GET_MODE (varop);
9335	      continue;
9336	    }
9337	  break;
9338
9339	case MULT:
9340	  /* Some machines use MULT instead of ASHIFT because MULT
9341	     is cheaper.  But it is still better on those machines to
9342	     merge two shifts into one.  */
9343	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9344	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9345	    {
9346	      varop
9347		= gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
9348			      GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9349	      continue;
9350	    }
9351	  break;
9352
9353	case UDIV:
9354	  /* Similar, for when divides are cheaper.  */
9355	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9356	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9357	    {
9358	      varop
9359		= gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
9360			      GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9361	      continue;
9362	    }
9363	  break;
9364
9365	case ASHIFTRT:
9366	  /* If we are extracting just the sign bit of an arithmetic
9367	     right shift, that shift is not needed.  However, the sign
9368	     bit of a wider mode may be different from what would be
9369	     interpreted as the sign bit in a narrower mode, so, if
9370	     the result is narrower, don't discard the shift.  */
9371	  if (code == LSHIFTRT
9372	      && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9373	      && (GET_MODE_BITSIZE (result_mode)
9374		  >= GET_MODE_BITSIZE (GET_MODE (varop))))
9375	    {
9376	      varop = XEXP (varop, 0);
9377	      continue;
9378	    }
9379
9380	  /* ... fall through ...  */
9381
9382	case LSHIFTRT:
9383	case ASHIFT:
9384	case ROTATE:
9385	  /* Here we have two nested shifts.  The result is usually the
9386	     AND of a new shift with a mask.  We compute the result below.  */
9387	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9388	      && INTVAL (XEXP (varop, 1)) >= 0
9389	      && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
9390	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9391	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9392	    {
9393	      enum rtx_code first_code = GET_CODE (varop);
9394	      unsigned int first_count = INTVAL (XEXP (varop, 1));
9395	      unsigned HOST_WIDE_INT mask;
9396	      rtx mask_rtx;
9397
9398	      /* We have one common special case.  We can't do any merging if
9399		 the inner code is an ASHIFTRT of a smaller mode.  However, if
9400		 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9401		 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9402		 we can convert it to
9403		 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9404		 This simplifies certain SIGN_EXTEND operations.  */
9405	      if (code == ASHIFT && first_code == ASHIFTRT
9406		  && count == (unsigned int)
9407			      (GET_MODE_BITSIZE (result_mode)
9408			       - GET_MODE_BITSIZE (GET_MODE (varop))))
9409		{
9410		  /* C3 has the low-order C1 bits zero.  */
9411
9412		  mask = (GET_MODE_MASK (mode)
9413			  & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
9414
9415		  varop = simplify_and_const_int (NULL_RTX, result_mode,
9416						  XEXP (varop, 0), mask);
9417		  varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
9418						varop, count);
9419		  count = first_count;
9420		  code = ASHIFTRT;
9421		  continue;
9422		}
9423
9424	      /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9425		 than C1 high-order bits equal to the sign bit, we can convert
9426		 this to either an ASHIFT or an ASHIFTRT depending on the
9427		 two counts.
9428
9429		 We cannot do this if VAROP's mode is not SHIFT_MODE.  */
9430
9431	      if (code == ASHIFTRT && first_code == ASHIFT
9432		  && GET_MODE (varop) == shift_mode
9433		  && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9434		      > first_count))
9435		{
9436		  varop = XEXP (varop, 0);
9437
9438		  signed_count = count - first_count;
9439		  if (signed_count < 0)
9440		    count = -signed_count, code = ASHIFT;
9441		  else
9442		    count = signed_count;
9443
9444		  continue;
9445		}
9446
9447	      /* There are some cases we can't do.  If CODE is ASHIFTRT,
9448		 we can only do this if FIRST_CODE is also ASHIFTRT.
9449
9450		 We can't do the case when CODE is ROTATE and FIRST_CODE is
9451		 ASHIFTRT.
9452
9453		 If the mode of this shift is not the mode of the outer shift,
9454		 we can't do this if either shift is a right shift or ROTATE.
9455
9456		 Finally, we can't do any of these if the mode is too wide
9457		 unless the codes are the same.
9458
9459		 Handle the case where the shift codes are the same
9460		 first.  */
9461
9462	      if (code == first_code)
9463		{
9464		  if (GET_MODE (varop) != result_mode
9465		      && (code == ASHIFTRT || code == LSHIFTRT
9466			  || code == ROTATE))
9467		    break;
9468
9469		  count += first_count;
9470		  varop = XEXP (varop, 0);
9471		  continue;
9472		}
9473
9474	      if (code == ASHIFTRT
9475		  || (code == ROTATE && first_code == ASHIFTRT)
9476		  || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
9477		  || (GET_MODE (varop) != result_mode
9478		      && (first_code == ASHIFTRT || first_code == LSHIFTRT
9479			  || first_code == ROTATE
9480			  || code == ROTATE)))
9481		break;
9482
9483	      /* To compute the mask to apply after the shift, shift the
9484		 nonzero bits of the inner shift the same way the
9485		 outer shift will.  */
9486
9487	      mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
9488
9489	      mask_rtx
9490		= simplify_binary_operation (code, result_mode, mask_rtx,
9491					     GEN_INT (count));
9492
9493	      /* Give up if we can't compute an outer operation to use.  */
9494	      if (mask_rtx == 0
9495		  || GET_CODE (mask_rtx) != CONST_INT
9496		  || ! merge_outer_ops (&outer_op, &outer_const, AND,
9497					INTVAL (mask_rtx),
9498					result_mode, &complement_p))
9499		break;
9500
9501	      /* If the shifts are in the same direction, we add the
9502		 counts.  Otherwise, we subtract them.  */
9503	      signed_count = count;
9504	      if ((code == ASHIFTRT || code == LSHIFTRT)
9505		  == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9506		signed_count += first_count;
9507	      else
9508		signed_count -= first_count;
9509
9510	      /* If COUNT is positive, the new shift is usually CODE,
9511		 except for the two exceptions below, in which case it is
9512		 FIRST_CODE.  If the count is negative, FIRST_CODE should
9513		 always be used  */
9514	      if (signed_count > 0
9515		  && ((first_code == ROTATE && code == ASHIFT)
9516		      || (first_code == ASHIFTRT && code == LSHIFTRT)))
9517		code = first_code, count = signed_count;
9518	      else if (signed_count < 0)
9519		code = first_code, count = -signed_count;
9520	      else
9521		count = signed_count;
9522
9523	      varop = XEXP (varop, 0);
9524	      continue;
9525	    }
9526
9527	  /* If we have (A << B << C) for any shift, we can convert this to
9528	     (A << C << B).  This wins if A is a constant.  Only try this if
9529	     B is not a constant.  */
9530
9531	  else if (GET_CODE (varop) == code
9532		   && GET_CODE (XEXP (varop, 1)) != CONST_INT
9533		   && 0 != (new
9534			    = simplify_binary_operation (code, mode,
9535							 XEXP (varop, 0),
9536							 GEN_INT (count))))
9537	    {
9538	      varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1));
9539	      count = 0;
9540	      continue;
9541	    }
9542	  break;
9543
9544	case NOT:
9545	  /* Make this fit the case below.  */
9546	  varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9547			       GEN_INT (GET_MODE_MASK (mode)));
9548	  continue;
9549
9550	case IOR:
9551	case AND:
9552	case XOR:
9553	  /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9554	     with C the size of VAROP - 1 and the shift is logical if
9555	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9556	     we have an (le X 0) operation.   If we have an arithmetic shift
9557	     and STORE_FLAG_VALUE is 1 or we have a logical shift with
9558	     STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
9559
9560	  if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9561	      && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9562	      && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9563	      && (code == LSHIFTRT || code == ASHIFTRT)
9564	      && count == (unsigned int)
9565			  (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9566	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9567	    {
9568	      count = 0;
9569	      varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9570				  const0_rtx);
9571
9572	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9573		varop = gen_rtx_NEG (GET_MODE (varop), varop);
9574
9575	      continue;
9576	    }
9577
9578	  /* If we have (shift (logical)), move the logical to the outside
9579	     to allow it to possibly combine with another logical and the
9580	     shift to combine with another shift.  This also canonicalizes to
9581	     what a ZERO_EXTRACT looks like.  Also, some machines have
9582	     (and (shift)) insns.  */
9583
9584	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9585	      && (new = simplify_binary_operation (code, result_mode,
9586						   XEXP (varop, 1),
9587						   GEN_INT (count))) != 0
9588	      && GET_CODE (new) == CONST_INT
9589	      && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9590				  INTVAL (new), result_mode, &complement_p))
9591	    {
9592	      varop = XEXP (varop, 0);
9593	      continue;
9594	    }
9595
9596	  /* If we can't do that, try to simplify the shift in each arm of the
9597	     logical expression, make a new logical expression, and apply
9598	     the inverse distributive law.  */
9599	  {
9600	    rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9601					    XEXP (varop, 0), count);
9602	    rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9603					    XEXP (varop, 1), count);
9604
9605	    varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
9606	    varop = apply_distributive_law (varop);
9607
9608	    count = 0;
9609	  }
9610	  break;
9611
9612	case EQ:
9613	  /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9614	     says that the sign bit can be tested, FOO has mode MODE, C is
9615	     GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9616	     that may be nonzero.  */
9617	  if (code == LSHIFTRT
9618	      && XEXP (varop, 1) == const0_rtx
9619	      && GET_MODE (XEXP (varop, 0)) == result_mode
9620	      && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9621	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9622	      && ((STORE_FLAG_VALUE
9623		   & ((HOST_WIDE_INT) 1
9624		      < (GET_MODE_BITSIZE (result_mode) - 1))))
9625	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9626	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9627				  (HOST_WIDE_INT) 1, result_mode,
9628				  &complement_p))
9629	    {
9630	      varop = XEXP (varop, 0);
9631	      count = 0;
9632	      continue;
9633	    }
9634	  break;
9635
9636	case NEG:
9637	  /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9638	     than the number of bits in the mode is equivalent to A.  */
9639	  if (code == LSHIFTRT
9640	      && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9641	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9642	    {
9643	      varop = XEXP (varop, 0);
9644	      count = 0;
9645	      continue;
9646	    }
9647
9648	  /* NEG commutes with ASHIFT since it is multiplication.  Move the
9649	     NEG outside to allow shifts to combine.  */
9650	  if (code == ASHIFT
9651	      && merge_outer_ops (&outer_op, &outer_const, NEG,
9652				  (HOST_WIDE_INT) 0, result_mode,
9653				  &complement_p))
9654	    {
9655	      varop = XEXP (varop, 0);
9656	      continue;
9657	    }
9658	  break;
9659
9660	case PLUS:
9661	  /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9662	     is one less than the number of bits in the mode is
9663	     equivalent to (xor A 1).  */
9664	  if (code == LSHIFTRT
9665	      && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9666	      && XEXP (varop, 1) == constm1_rtx
9667	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9668	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9669				  (HOST_WIDE_INT) 1, result_mode,
9670				  &complement_p))
9671	    {
9672	      count = 0;
9673	      varop = XEXP (varop, 0);
9674	      continue;
9675	    }
9676
9677	  /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9678	     that might be nonzero in BAR are those being shifted out and those
9679	     bits are known zero in FOO, we can replace the PLUS with FOO.
9680	     Similarly in the other operand order.  This code occurs when
9681	     we are computing the size of a variable-size array.  */
9682
9683	  if ((code == ASHIFTRT || code == LSHIFTRT)
9684	      && count < HOST_BITS_PER_WIDE_INT
9685	      && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9686	      && (nonzero_bits (XEXP (varop, 1), result_mode)
9687		  & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9688	    {
9689	      varop = XEXP (varop, 0);
9690	      continue;
9691	    }
9692	  else if ((code == ASHIFTRT || code == LSHIFTRT)
9693		   && count < HOST_BITS_PER_WIDE_INT
9694		   && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9695		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9696			    >> count)
9697		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9698			    & nonzero_bits (XEXP (varop, 1),
9699						 result_mode)))
9700	    {
9701	      varop = XEXP (varop, 1);
9702	      continue;
9703	    }
9704
9705	  /* (ashift (plus foo C) N) is (plus (ashift foo N) C').  */
9706	  if (code == ASHIFT
9707	      && GET_CODE (XEXP (varop, 1)) == CONST_INT
9708	      && (new = simplify_binary_operation (ASHIFT, result_mode,
9709						   XEXP (varop, 1),
9710						   GEN_INT (count))) != 0
9711	      && GET_CODE (new) == CONST_INT
9712	      && merge_outer_ops (&outer_op, &outer_const, PLUS,
9713				  INTVAL (new), result_mode, &complement_p))
9714	    {
9715	      varop = XEXP (varop, 0);
9716	      continue;
9717	    }
9718	  break;
9719
9720	case MINUS:
9721	  /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9722	     with C the size of VAROP - 1 and the shift is logical if
9723	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9724	     we have a (gt X 0) operation.  If the shift is arithmetic with
9725	     STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9726	     we have a (neg (gt X 0)) operation.  */
9727
9728	  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9729	      && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9730	      && count == (unsigned int)
9731			  (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9732	      && (code == LSHIFTRT || code == ASHIFTRT)
9733	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9734	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (varop, 0), 1))
9735		 == count
9736	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9737	    {
9738	      count = 0;
9739	      varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9740				  const0_rtx);
9741
9742	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9743		varop = gen_rtx_NEG (GET_MODE (varop), varop);
9744
9745	      continue;
9746	    }
9747	  break;
9748
9749	case TRUNCATE:
9750	  /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9751	     if the truncate does not affect the value.  */
9752	  if (code == LSHIFTRT
9753	      && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9754	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9755	      && (INTVAL (XEXP (XEXP (varop, 0), 1))
9756		  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9757		      - GET_MODE_BITSIZE (GET_MODE (varop)))))
9758	    {
9759	      rtx varop_inner = XEXP (varop, 0);
9760
9761	      varop_inner
9762		= gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9763				    XEXP (varop_inner, 0),
9764				    GEN_INT
9765				    (count + INTVAL (XEXP (varop_inner, 1))));
9766	      varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
9767	      count = 0;
9768	      continue;
9769	    }
9770	  break;
9771
9772	default:
9773	  break;
9774	}
9775
9776      break;
9777    }
9778
9779  /* We need to determine what mode to do the shift in.  If the shift is
9780     a right shift or ROTATE, we must always do it in the mode it was
9781     originally done in.  Otherwise, we can do it in MODE, the widest mode
9782     encountered.  The code we care about is that of the shift that will
9783     actually be done, not the shift that was originally requested.  */
9784  shift_mode
9785    = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9786       ? result_mode : mode);
9787
9788  /* We have now finished analyzing the shift.  The result should be
9789     a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places.  If
9790     OUTER_OP is non-NIL, it is an operation that needs to be applied
9791     to the result of the shift.  OUTER_CONST is the relevant constant,
9792     but we must turn off all bits turned off in the shift.
9793
9794     If we were passed a value for X, see if we can use any pieces of
9795     it.  If not, make new rtx.  */
9796
9797  if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
9798      && GET_CODE (XEXP (x, 1)) == CONST_INT
9799      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == count)
9800    const_rtx = XEXP (x, 1);
9801  else
9802    const_rtx = GEN_INT (count);
9803
9804  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9805      && GET_MODE (XEXP (x, 0)) == shift_mode
9806      && SUBREG_REG (XEXP (x, 0)) == varop)
9807    varop = XEXP (x, 0);
9808  else if (GET_MODE (varop) != shift_mode)
9809    varop = gen_lowpart_for_combine (shift_mode, varop);
9810
9811  /* If we can't make the SUBREG, try to return what we were given.  */
9812  if (GET_CODE (varop) == CLOBBER)
9813    return x ? x : varop;
9814
9815  new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9816  if (new != 0)
9817    x = new;
9818  else
9819    x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx);
9820
9821  /* If we have an outer operation and we just made a shift, it is
9822     possible that we could have simplified the shift were it not
9823     for the outer operation.  So try to do the simplification
9824     recursively.  */
9825
9826  if (outer_op != NIL && GET_CODE (x) == code
9827      && GET_CODE (XEXP (x, 1)) == CONST_INT)
9828    x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9829			      INTVAL (XEXP (x, 1)));
9830
9831  /* If we were doing an LSHIFTRT in a wider mode than it was originally,
9832     turn off all the bits that the shift would have turned off.  */
9833  if (orig_code == LSHIFTRT && result_mode != shift_mode)
9834    x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9835				GET_MODE_MASK (result_mode) >> orig_count);
9836
9837  /* Do the remainder of the processing in RESULT_MODE.  */
9838  x = gen_lowpart_for_combine (result_mode, x);
9839
9840  /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9841     operation.  */
9842  if (complement_p)
9843    x = simplify_gen_unary (NOT, result_mode, x, result_mode);
9844
9845  if (outer_op != NIL)
9846    {
9847      if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9848	outer_const = trunc_int_for_mode (outer_const, result_mode);
9849
9850      if (outer_op == AND)
9851	x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9852      else if (outer_op == SET)
9853	/* This means that we have determined that the result is
9854	   equivalent to a constant.  This should be rare.  */
9855	x = GEN_INT (outer_const);
9856      else if (GET_RTX_CLASS (outer_op) == '1')
9857	x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
9858      else
9859	x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
9860    }
9861
9862  return x;
9863}
9864
9865/* Like recog, but we receive the address of a pointer to a new pattern.
9866   We try to match the rtx that the pointer points to.
9867   If that fails, we may try to modify or replace the pattern,
9868   storing the replacement into the same pointer object.
9869
9870   Modifications include deletion or addition of CLOBBERs.
9871
9872   PNOTES is a pointer to a location where any REG_UNUSED notes added for
9873   the CLOBBERs are placed.
9874
9875   The value is the final insn code from the pattern ultimately matched,
9876   or -1.  */
9877
9878static int
9879recog_for_combine (rtx *pnewpat, rtx insn, rtx *pnotes)
9880{
9881  rtx pat = *pnewpat;
9882  int insn_code_number;
9883  int num_clobbers_to_add = 0;
9884  int i;
9885  rtx notes = 0;
9886  rtx old_notes, old_pat;
9887
9888  /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9889     we use to indicate that something didn't match.  If we find such a
9890     thing, force rejection.  */
9891  if (GET_CODE (pat) == PARALLEL)
9892    for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9893      if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9894	  && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
9895	return -1;
9896
9897  old_pat = PATTERN (insn);
9898  old_notes = REG_NOTES (insn);
9899  PATTERN (insn) = pat;
9900  REG_NOTES (insn) = 0;
9901
9902  insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9903
9904  /* If it isn't, there is the possibility that we previously had an insn
9905     that clobbered some register as a side effect, but the combined
9906     insn doesn't need to do that.  So try once more without the clobbers
9907     unless this represents an ASM insn.  */
9908
9909  if (insn_code_number < 0 && ! check_asm_operands (pat)
9910      && GET_CODE (pat) == PARALLEL)
9911    {
9912      int pos;
9913
9914      for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9915	if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9916	  {
9917	    if (i != pos)
9918	      SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9919	    pos++;
9920	  }
9921
9922      SUBST_INT (XVECLEN (pat, 0), pos);
9923
9924      if (pos == 1)
9925	pat = XVECEXP (pat, 0, 0);
9926
9927      PATTERN (insn) = pat;
9928      insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9929    }
9930  PATTERN (insn) = old_pat;
9931  REG_NOTES (insn) = old_notes;
9932
9933  /* Recognize all noop sets, these will be killed by followup pass.  */
9934  if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
9935    insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
9936
9937  /* If we had any clobbers to add, make a new pattern than contains
9938     them.  Then check to make sure that all of them are dead.  */
9939  if (num_clobbers_to_add)
9940    {
9941      rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9942				     rtvec_alloc (GET_CODE (pat) == PARALLEL
9943						  ? (XVECLEN (pat, 0)
9944						     + num_clobbers_to_add)
9945						  : num_clobbers_to_add + 1));
9946
9947      if (GET_CODE (pat) == PARALLEL)
9948	for (i = 0; i < XVECLEN (pat, 0); i++)
9949	  XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9950      else
9951	XVECEXP (newpat, 0, 0) = pat;
9952
9953      add_clobbers (newpat, insn_code_number);
9954
9955      for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9956	   i < XVECLEN (newpat, 0); i++)
9957	{
9958	  if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9959	      && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9960	    return -1;
9961	  notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9962				     XEXP (XVECEXP (newpat, 0, i), 0), notes);
9963	}
9964      pat = newpat;
9965    }
9966
9967  *pnewpat = pat;
9968  *pnotes = notes;
9969
9970  return insn_code_number;
9971}
9972
9973/* Like gen_lowpart but for use by combine.  In combine it is not possible
9974   to create any new pseudoregs.  However, it is safe to create
9975   invalid memory addresses, because combine will try to recognize
9976   them and all they will do is make the combine attempt fail.
9977
9978   If for some reason this cannot do its job, an rtx
9979   (clobber (const_int 0)) is returned.
9980   An insn containing that will not be recognized.  */
9981
9982#undef gen_lowpart
9983
9984static rtx
9985gen_lowpart_for_combine (enum machine_mode mode, rtx x)
9986{
9987  rtx result;
9988
9989  if (GET_MODE (x) == mode)
9990    return x;
9991
9992  /* Return identity if this is a CONST or symbolic
9993     reference.  */
9994  if (mode == Pmode
9995      && (GET_CODE (x) == CONST
9996	  || GET_CODE (x) == SYMBOL_REF
9997	  || GET_CODE (x) == LABEL_REF))
9998    return x;
9999
10000  /* We can only support MODE being wider than a word if X is a
10001     constant integer or has a mode the same size.  */
10002
10003  if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
10004      && ! ((GET_MODE (x) == VOIDmode
10005	     && (GET_CODE (x) == CONST_INT
10006		 || GET_CODE (x) == CONST_DOUBLE))
10007	    || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
10008    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10009
10010  /* X might be a paradoxical (subreg (mem)).  In that case, gen_lowpart
10011     won't know what to do.  So we will strip off the SUBREG here and
10012     process normally.  */
10013  if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
10014    {
10015      x = SUBREG_REG (x);
10016      if (GET_MODE (x) == mode)
10017	return x;
10018    }
10019
10020  result = gen_lowpart_common (mode, x);
10021#ifdef CANNOT_CHANGE_MODE_CLASS
10022  if (result != 0
10023      && GET_CODE (result) == SUBREG
10024      && GET_CODE (SUBREG_REG (result)) == REG
10025      && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER)
10026    bitmap_set_bit (&subregs_of_mode, REGNO (SUBREG_REG (result))
10027				      * MAX_MACHINE_MODE
10028				      + GET_MODE (result));
10029#endif
10030
10031  if (result)
10032    return result;
10033
10034  if (GET_CODE (x) == MEM)
10035    {
10036      int offset = 0;
10037
10038      /* Refuse to work on a volatile memory ref or one with a mode-dependent
10039	 address.  */
10040      if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
10041	return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10042
10043      /* If we want to refer to something bigger than the original memref,
10044	 generate a perverse subreg instead.  That will force a reload
10045	 of the original memref X.  */
10046      if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
10047	return gen_rtx_SUBREG (mode, x, 0);
10048
10049      if (WORDS_BIG_ENDIAN)
10050	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
10051		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
10052
10053      if (BYTES_BIG_ENDIAN)
10054	{
10055	  /* Adjust the address so that the address-after-the-data is
10056	     unchanged.  */
10057	  offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
10058		     - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
10059	}
10060
10061      return adjust_address_nv (x, mode, offset);
10062    }
10063
10064  /* If X is a comparison operator, rewrite it in a new mode.  This
10065     probably won't match, but may allow further simplifications.  */
10066  else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
10067    return gen_rtx_fmt_ee (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
10068
10069  /* If we couldn't simplify X any other way, just enclose it in a
10070     SUBREG.  Normally, this SUBREG won't match, but some patterns may
10071     include an explicit SUBREG or we may simplify it further in combine.  */
10072  else
10073    {
10074      int offset = 0;
10075      rtx res;
10076      enum machine_mode sub_mode = GET_MODE (x);
10077
10078      offset = subreg_lowpart_offset (mode, sub_mode);
10079      if (sub_mode == VOIDmode)
10080	{
10081	  sub_mode = int_mode_for_mode (mode);
10082	  x = gen_lowpart_common (sub_mode, x);
10083	  if (x == 0)
10084	    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
10085	}
10086      res = simplify_gen_subreg (mode, x, sub_mode, offset);
10087      if (res)
10088	return res;
10089      return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10090    }
10091}
10092
10093/* These routines make binary and unary operations by first seeing if they
10094   fold; if not, a new expression is allocated.  */
10095
10096static rtx
10097gen_binary (enum rtx_code code, enum machine_mode mode, rtx op0, rtx op1)
10098{
10099  rtx result;
10100  rtx tem;
10101
10102  if (GET_CODE (op0) == CLOBBER)
10103    return op0;
10104  else if (GET_CODE (op1) == CLOBBER)
10105    return op1;
10106
10107  if (GET_RTX_CLASS (code) == 'c'
10108      && swap_commutative_operands_p (op0, op1))
10109    tem = op0, op0 = op1, op1 = tem;
10110
10111  if (GET_RTX_CLASS (code) == '<')
10112    {
10113      enum machine_mode op_mode = GET_MODE (op0);
10114
10115      /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
10116	 just (REL_OP X Y).  */
10117      if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
10118	{
10119	  op1 = XEXP (op0, 1);
10120	  op0 = XEXP (op0, 0);
10121	  op_mode = GET_MODE (op0);
10122	}
10123
10124      if (op_mode == VOIDmode)
10125	op_mode = GET_MODE (op1);
10126      result = simplify_relational_operation (code, op_mode, op0, op1);
10127    }
10128  else
10129    result = simplify_binary_operation (code, mode, op0, op1);
10130
10131  if (result)
10132    return result;
10133
10134  /* Put complex operands first and constants second.  */
10135  if (GET_RTX_CLASS (code) == 'c'
10136      && swap_commutative_operands_p (op0, op1))
10137    return gen_rtx_fmt_ee (code, mode, op1, op0);
10138
10139  /* If we are turning off bits already known off in OP0, we need not do
10140     an AND.  */
10141  else if (code == AND && GET_CODE (op1) == CONST_INT
10142	   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
10143	   && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
10144    return op0;
10145
10146  return gen_rtx_fmt_ee (code, mode, op0, op1);
10147}
10148
10149/* Simplify a comparison between *POP0 and *POP1 where CODE is the
10150   comparison code that will be tested.
10151
10152   The result is a possibly different comparison code to use.  *POP0 and
10153   *POP1 may be updated.
10154
10155   It is possible that we might detect that a comparison is either always
10156   true or always false.  However, we do not perform general constant
10157   folding in combine, so this knowledge isn't useful.  Such tautologies
10158   should have been detected earlier.  Hence we ignore all such cases.  */
10159
10160static enum rtx_code
10161simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
10162{
10163  rtx op0 = *pop0;
10164  rtx op1 = *pop1;
10165  rtx tem, tem1;
10166  int i;
10167  enum machine_mode mode, tmode;
10168
10169  /* Try a few ways of applying the same transformation to both operands.  */
10170  while (1)
10171    {
10172#ifndef WORD_REGISTER_OPERATIONS
10173      /* The test below this one won't handle SIGN_EXTENDs on these machines,
10174	 so check specially.  */
10175      if (code != GTU && code != GEU && code != LTU && code != LEU
10176	  && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
10177	  && GET_CODE (XEXP (op0, 0)) == ASHIFT
10178	  && GET_CODE (XEXP (op1, 0)) == ASHIFT
10179	  && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
10180	  && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
10181	  && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
10182	      == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
10183	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
10184	  && XEXP (op0, 1) == XEXP (op1, 1)
10185	  && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10186	  && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
10187	  && (INTVAL (XEXP (op0, 1))
10188	      == (GET_MODE_BITSIZE (GET_MODE (op0))
10189		  - (GET_MODE_BITSIZE
10190		     (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
10191	{
10192	  op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
10193	  op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
10194	}
10195#endif
10196
10197      /* If both operands are the same constant shift, see if we can ignore the
10198	 shift.  We can if the shift is a rotate or if the bits shifted out of
10199	 this shift are known to be zero for both inputs and if the type of
10200	 comparison is compatible with the shift.  */
10201      if (GET_CODE (op0) == GET_CODE (op1)
10202	  && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10203	  && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
10204	      || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
10205		  && (code != GT && code != LT && code != GE && code != LE))
10206	      || (GET_CODE (op0) == ASHIFTRT
10207		  && (code != GTU && code != LTU
10208		      && code != GEU && code != LEU)))
10209	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
10210	  && INTVAL (XEXP (op0, 1)) >= 0
10211	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10212	  && XEXP (op0, 1) == XEXP (op1, 1))
10213	{
10214	  enum machine_mode mode = GET_MODE (op0);
10215	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10216	  int shift_count = INTVAL (XEXP (op0, 1));
10217
10218	  if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
10219	    mask &= (mask >> shift_count) << shift_count;
10220	  else if (GET_CODE (op0) == ASHIFT)
10221	    mask = (mask & (mask << shift_count)) >> shift_count;
10222
10223	  if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
10224	      && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
10225	    op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
10226	  else
10227	    break;
10228	}
10229
10230      /* If both operands are AND's of a paradoxical SUBREG by constant, the
10231	 SUBREGs are of the same mode, and, in both cases, the AND would
10232	 be redundant if the comparison was done in the narrower mode,
10233	 do the comparison in the narrower mode (e.g., we are AND'ing with 1
10234	 and the operand's possibly nonzero bits are 0xffffff01; in that case
10235	 if we only care about QImode, we don't need the AND).  This case
10236	 occurs if the output mode of an scc insn is not SImode and
10237	 STORE_FLAG_VALUE == 1 (e.g., the 386).
10238
10239	 Similarly, check for a case where the AND's are ZERO_EXTEND
10240	 operations from some narrower mode even though a SUBREG is not
10241	 present.  */
10242
10243      else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
10244	       && GET_CODE (XEXP (op0, 1)) == CONST_INT
10245	       && GET_CODE (XEXP (op1, 1)) == CONST_INT)
10246	{
10247	  rtx inner_op0 = XEXP (op0, 0);
10248	  rtx inner_op1 = XEXP (op1, 0);
10249	  HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
10250	  HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
10251	  int changed = 0;
10252
10253	  if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
10254	      && (GET_MODE_SIZE (GET_MODE (inner_op0))
10255		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
10256	      && (GET_MODE (SUBREG_REG (inner_op0))
10257		  == GET_MODE (SUBREG_REG (inner_op1)))
10258	      && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
10259		  <= HOST_BITS_PER_WIDE_INT)
10260	      && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
10261					     GET_MODE (SUBREG_REG (inner_op0)))))
10262	      && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
10263					     GET_MODE (SUBREG_REG (inner_op1))))))
10264	    {
10265	      op0 = SUBREG_REG (inner_op0);
10266	      op1 = SUBREG_REG (inner_op1);
10267
10268	      /* The resulting comparison is always unsigned since we masked
10269		 off the original sign bit.  */
10270	      code = unsigned_condition (code);
10271
10272	      changed = 1;
10273	    }
10274
10275	  else if (c0 == c1)
10276	    for (tmode = GET_CLASS_NARROWEST_MODE
10277		 (GET_MODE_CLASS (GET_MODE (op0)));
10278		 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
10279	      if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
10280		{
10281		  op0 = gen_lowpart_for_combine (tmode, inner_op0);
10282		  op1 = gen_lowpart_for_combine (tmode, inner_op1);
10283		  code = unsigned_condition (code);
10284		  changed = 1;
10285		  break;
10286		}
10287
10288	  if (! changed)
10289	    break;
10290	}
10291
10292      /* If both operands are NOT, we can strip off the outer operation
10293	 and adjust the comparison code for swapped operands; similarly for
10294	 NEG, except that this must be an equality comparison.  */
10295      else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
10296	       || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
10297		   && (code == EQ || code == NE)))
10298	op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
10299
10300      else
10301	break;
10302    }
10303
10304  /* If the first operand is a constant, swap the operands and adjust the
10305     comparison code appropriately, but don't do this if the second operand
10306     is already a constant integer.  */
10307  if (swap_commutative_operands_p (op0, op1))
10308    {
10309      tem = op0, op0 = op1, op1 = tem;
10310      code = swap_condition (code);
10311    }
10312
10313  /* We now enter a loop during which we will try to simplify the comparison.
10314     For the most part, we only are concerned with comparisons with zero,
10315     but some things may really be comparisons with zero but not start
10316     out looking that way.  */
10317
10318  while (GET_CODE (op1) == CONST_INT)
10319    {
10320      enum machine_mode mode = GET_MODE (op0);
10321      unsigned int mode_width = GET_MODE_BITSIZE (mode);
10322      unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10323      int equality_comparison_p;
10324      int sign_bit_comparison_p;
10325      int unsigned_comparison_p;
10326      HOST_WIDE_INT const_op;
10327
10328      /* We only want to handle integral modes.  This catches VOIDmode,
10329	 CCmode, and the floating-point modes.  An exception is that we
10330	 can handle VOIDmode if OP0 is a COMPARE or a comparison
10331	 operation.  */
10332
10333      if (GET_MODE_CLASS (mode) != MODE_INT
10334	  && ! (mode == VOIDmode
10335		&& (GET_CODE (op0) == COMPARE
10336		    || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
10337	break;
10338
10339      /* Get the constant we are comparing against and turn off all bits
10340	 not on in our mode.  */
10341      const_op = INTVAL (op1);
10342      if (mode != VOIDmode)
10343	const_op = trunc_int_for_mode (const_op, mode);
10344      op1 = GEN_INT (const_op);
10345
10346      /* If we are comparing against a constant power of two and the value
10347	 being compared can only have that single bit nonzero (e.g., it was
10348	 `and'ed with that bit), we can replace this with a comparison
10349	 with zero.  */
10350      if (const_op
10351	  && (code == EQ || code == NE || code == GE || code == GEU
10352	      || code == LT || code == LTU)
10353	  && mode_width <= HOST_BITS_PER_WIDE_INT
10354	  && exact_log2 (const_op) >= 0
10355	  && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
10356	{
10357	  code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10358	  op1 = const0_rtx, const_op = 0;
10359	}
10360
10361      /* Similarly, if we are comparing a value known to be either -1 or
10362	 0 with -1, change it to the opposite comparison against zero.  */
10363
10364      if (const_op == -1
10365	  && (code == EQ || code == NE || code == GT || code == LE
10366	      || code == GEU || code == LTU)
10367	  && num_sign_bit_copies (op0, mode) == mode_width)
10368	{
10369	  code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10370	  op1 = const0_rtx, const_op = 0;
10371	}
10372
10373      /* Do some canonicalizations based on the comparison code.  We prefer
10374	 comparisons against zero and then prefer equality comparisons.
10375	 If we can reduce the size of a constant, we will do that too.  */
10376
10377      switch (code)
10378	{
10379	case LT:
10380	  /* < C is equivalent to <= (C - 1) */
10381	  if (const_op > 0)
10382	    {
10383	      const_op -= 1;
10384	      op1 = GEN_INT (const_op);
10385	      code = LE;
10386	      /* ... fall through to LE case below.  */
10387	    }
10388	  else
10389	    break;
10390
10391	case LE:
10392	  /* <= C is equivalent to < (C + 1); we do this for C < 0  */
10393	  if (const_op < 0)
10394	    {
10395	      const_op += 1;
10396	      op1 = GEN_INT (const_op);
10397	      code = LT;
10398	    }
10399
10400	  /* If we are doing a <= 0 comparison on a value known to have
10401	     a zero sign bit, we can replace this with == 0.  */
10402	  else if (const_op == 0
10403		   && mode_width <= HOST_BITS_PER_WIDE_INT
10404		   && (nonzero_bits (op0, mode)
10405		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10406	    code = EQ;
10407	  break;
10408
10409	case GE:
10410	  /* >= C is equivalent to > (C - 1).  */
10411	  if (const_op > 0)
10412	    {
10413	      const_op -= 1;
10414	      op1 = GEN_INT (const_op);
10415	      code = GT;
10416	      /* ... fall through to GT below.  */
10417	    }
10418	  else
10419	    break;
10420
10421	case GT:
10422	  /* > C is equivalent to >= (C + 1); we do this for C < 0.  */
10423	  if (const_op < 0)
10424	    {
10425	      const_op += 1;
10426	      op1 = GEN_INT (const_op);
10427	      code = GE;
10428	    }
10429
10430	  /* If we are doing a > 0 comparison on a value known to have
10431	     a zero sign bit, we can replace this with != 0.  */
10432	  else if (const_op == 0
10433		   && mode_width <= HOST_BITS_PER_WIDE_INT
10434		   && (nonzero_bits (op0, mode)
10435		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10436	    code = NE;
10437	  break;
10438
10439	case LTU:
10440	  /* < C is equivalent to <= (C - 1).  */
10441	  if (const_op > 0)
10442	    {
10443	      const_op -= 1;
10444	      op1 = GEN_INT (const_op);
10445	      code = LEU;
10446	      /* ... fall through ...  */
10447	    }
10448
10449	  /* (unsigned) < 0x80000000 is equivalent to >= 0.  */
10450	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10451		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10452	    {
10453	      const_op = 0, op1 = const0_rtx;
10454	      code = GE;
10455	      break;
10456	    }
10457	  else
10458	    break;
10459
10460	case LEU:
10461	  /* unsigned <= 0 is equivalent to == 0 */
10462	  if (const_op == 0)
10463	    code = EQ;
10464
10465	  /* (unsigned) <= 0x7fffffff is equivalent to >= 0.  */
10466	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10467		   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10468	    {
10469	      const_op = 0, op1 = const0_rtx;
10470	      code = GE;
10471	    }
10472	  break;
10473
10474	case GEU:
10475	  /* >= C is equivalent to < (C - 1).  */
10476	  if (const_op > 1)
10477	    {
10478	      const_op -= 1;
10479	      op1 = GEN_INT (const_op);
10480	      code = GTU;
10481	      /* ... fall through ...  */
10482	    }
10483
10484	  /* (unsigned) >= 0x80000000 is equivalent to < 0.  */
10485	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10486		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10487	    {
10488	      const_op = 0, op1 = const0_rtx;
10489	      code = LT;
10490	      break;
10491	    }
10492	  else
10493	    break;
10494
10495	case GTU:
10496	  /* unsigned > 0 is equivalent to != 0 */
10497	  if (const_op == 0)
10498	    code = NE;
10499
10500	  /* (unsigned) > 0x7fffffff is equivalent to < 0.  */
10501	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10502		   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10503	    {
10504	      const_op = 0, op1 = const0_rtx;
10505	      code = LT;
10506	    }
10507	  break;
10508
10509	default:
10510	  break;
10511	}
10512
10513      /* Compute some predicates to simplify code below.  */
10514
10515      equality_comparison_p = (code == EQ || code == NE);
10516      sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10517      unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10518			       || code == GEU);
10519
10520      /* If this is a sign bit comparison and we can do arithmetic in
10521	 MODE, say that we will only be needing the sign bit of OP0.  */
10522      if (sign_bit_comparison_p
10523	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10524	op0 = force_to_mode (op0, mode,
10525			     ((HOST_WIDE_INT) 1
10526			      << (GET_MODE_BITSIZE (mode) - 1)),
10527			     NULL_RTX, 0);
10528
10529      /* Now try cases based on the opcode of OP0.  If none of the cases
10530	 does a "continue", we exit this loop immediately after the
10531	 switch.  */
10532
10533      switch (GET_CODE (op0))
10534	{
10535	case ZERO_EXTRACT:
10536	  /* If we are extracting a single bit from a variable position in
10537	     a constant that has only a single bit set and are comparing it
10538	     with zero, we can convert this into an equality comparison
10539	     between the position and the location of the single bit.  */
10540	  /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
10541	     have already reduced the shift count modulo the word size.  */
10542	  if (!SHIFT_COUNT_TRUNCATED
10543	      && GET_CODE (XEXP (op0, 0)) == CONST_INT
10544	      && XEXP (op0, 1) == const1_rtx
10545	      && equality_comparison_p && const_op == 0
10546	      && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
10547	    {
10548	      if (BITS_BIG_ENDIAN)
10549		{
10550		  enum machine_mode new_mode
10551		    = mode_for_extraction (EP_extzv, 1);
10552		  if (new_mode == MAX_MACHINE_MODE)
10553		    i = BITS_PER_WORD - 1 - i;
10554		  else
10555		    {
10556		      mode = new_mode;
10557		      i = (GET_MODE_BITSIZE (mode) - 1 - i);
10558		    }
10559		}
10560
10561	      op0 = XEXP (op0, 2);
10562	      op1 = GEN_INT (i);
10563	      const_op = i;
10564
10565	      /* Result is nonzero iff shift count is equal to I.  */
10566	      code = reverse_condition (code);
10567	      continue;
10568	    }
10569
10570	  /* ... fall through ...  */
10571
10572	case SIGN_EXTRACT:
10573	  tem = expand_compound_operation (op0);
10574	  if (tem != op0)
10575	    {
10576	      op0 = tem;
10577	      continue;
10578	    }
10579	  break;
10580
10581	case NOT:
10582	  /* If testing for equality, we can take the NOT of the constant.  */
10583	  if (equality_comparison_p
10584	      && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10585	    {
10586	      op0 = XEXP (op0, 0);
10587	      op1 = tem;
10588	      continue;
10589	    }
10590
10591	  /* If just looking at the sign bit, reverse the sense of the
10592	     comparison.  */
10593	  if (sign_bit_comparison_p)
10594	    {
10595	      op0 = XEXP (op0, 0);
10596	      code = (code == GE ? LT : GE);
10597	      continue;
10598	    }
10599	  break;
10600
10601	case NEG:
10602	  /* If testing for equality, we can take the NEG of the constant.  */
10603	  if (equality_comparison_p
10604	      && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10605	    {
10606	      op0 = XEXP (op0, 0);
10607	      op1 = tem;
10608	      continue;
10609	    }
10610
10611	  /* The remaining cases only apply to comparisons with zero.  */
10612	  if (const_op != 0)
10613	    break;
10614
10615	  /* When X is ABS or is known positive,
10616	     (neg X) is < 0 if and only if X != 0.  */
10617
10618	  if (sign_bit_comparison_p
10619	      && (GET_CODE (XEXP (op0, 0)) == ABS
10620		  || (mode_width <= HOST_BITS_PER_WIDE_INT
10621		      && (nonzero_bits (XEXP (op0, 0), mode)
10622			  & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
10623	    {
10624	      op0 = XEXP (op0, 0);
10625	      code = (code == LT ? NE : EQ);
10626	      continue;
10627	    }
10628
10629	  /* If we have NEG of something whose two high-order bits are the
10630	     same, we know that "(-a) < 0" is equivalent to "a > 0".  */
10631	  if (num_sign_bit_copies (op0, mode) >= 2)
10632	    {
10633	      op0 = XEXP (op0, 0);
10634	      code = swap_condition (code);
10635	      continue;
10636	    }
10637	  break;
10638
10639	case ROTATE:
10640	  /* If we are testing equality and our count is a constant, we
10641	     can perform the inverse operation on our RHS.  */
10642	  if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10643	      && (tem = simplify_binary_operation (ROTATERT, mode,
10644						   op1, XEXP (op0, 1))) != 0)
10645	    {
10646	      op0 = XEXP (op0, 0);
10647	      op1 = tem;
10648	      continue;
10649	    }
10650
10651	  /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10652	     a particular bit.  Convert it to an AND of a constant of that
10653	     bit.  This will be converted into a ZERO_EXTRACT.  */
10654	  if (const_op == 0 && sign_bit_comparison_p
10655	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10656	      && mode_width <= HOST_BITS_PER_WIDE_INT)
10657	    {
10658	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10659					    ((HOST_WIDE_INT) 1
10660					     << (mode_width - 1
10661						 - INTVAL (XEXP (op0, 1)))));
10662	      code = (code == LT ? NE : EQ);
10663	      continue;
10664	    }
10665
10666	  /* Fall through.  */
10667
10668	case ABS:
10669	  /* ABS is ignorable inside an equality comparison with zero.  */
10670	  if (const_op == 0 && equality_comparison_p)
10671	    {
10672	      op0 = XEXP (op0, 0);
10673	      continue;
10674	    }
10675	  break;
10676
10677	case SIGN_EXTEND:
10678	  /* Can simplify (compare (zero/sign_extend FOO) CONST)
10679	     to (compare FOO CONST) if CONST fits in FOO's mode and we
10680	     are either testing inequality or have an unsigned comparison
10681	     with ZERO_EXTEND or a signed comparison with SIGN_EXTEND.  */
10682	  if (! unsigned_comparison_p
10683	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10684		  <= HOST_BITS_PER_WIDE_INT)
10685	      && ((unsigned HOST_WIDE_INT) const_op
10686		  < (((unsigned HOST_WIDE_INT) 1
10687		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
10688	    {
10689	      op0 = XEXP (op0, 0);
10690	      continue;
10691	    }
10692	  break;
10693
10694	case SUBREG:
10695	  /* Check for the case where we are comparing A - C1 with C2,
10696	     both constants are smaller than 1/2 the maximum positive
10697	     value in MODE, and the comparison is equality or unsigned.
10698	     In that case, if A is either zero-extended to MODE or has
10699	     sufficient sign bits so that the high-order bit in MODE
10700	     is a copy of the sign in the inner mode, we can prove that it is
10701	     safe to do the operation in the wider mode.  This simplifies
10702	     many range checks.  */
10703
10704	  if (mode_width <= HOST_BITS_PER_WIDE_INT
10705	      && subreg_lowpart_p (op0)
10706	      && GET_CODE (SUBREG_REG (op0)) == PLUS
10707	      && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
10708	      && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
10709	      && (-INTVAL (XEXP (SUBREG_REG (op0), 1))
10710		  < (HOST_WIDE_INT) (GET_MODE_MASK (mode) / 2))
10711	      && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
10712	      && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
10713				      GET_MODE (SUBREG_REG (op0)))
10714			& ~GET_MODE_MASK (mode))
10715		  || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
10716					   GET_MODE (SUBREG_REG (op0)))
10717		      > (unsigned int)
10718			(GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10719			 - GET_MODE_BITSIZE (mode)))))
10720	    {
10721	      op0 = SUBREG_REG (op0);
10722	      continue;
10723	    }
10724
10725	  /* If the inner mode is narrower and we are extracting the low part,
10726	     we can treat the SUBREG as if it were a ZERO_EXTEND.  */
10727	  if (subreg_lowpart_p (op0)
10728	      && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10729	    /* Fall through */ ;
10730	  else
10731	    break;
10732
10733	  /* ... fall through ...  */
10734
10735	case ZERO_EXTEND:
10736	  if ((unsigned_comparison_p || equality_comparison_p)
10737	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10738		  <= HOST_BITS_PER_WIDE_INT)
10739	      && ((unsigned HOST_WIDE_INT) const_op
10740		  < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10741	    {
10742	      op0 = XEXP (op0, 0);
10743	      continue;
10744	    }
10745	  break;
10746
10747	case PLUS:
10748	  /* (eq (plus X A) B) -> (eq X (minus B A)).  We can only do
10749	     this for equality comparisons due to pathological cases involving
10750	     overflows.  */
10751	  if (equality_comparison_p
10752	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10753							op1, XEXP (op0, 1))))
10754	    {
10755	      op0 = XEXP (op0, 0);
10756	      op1 = tem;
10757	      continue;
10758	    }
10759
10760	  /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0.  */
10761	  if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10762	      && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10763	    {
10764	      op0 = XEXP (XEXP (op0, 0), 0);
10765	      code = (code == LT ? EQ : NE);
10766	      continue;
10767	    }
10768	  break;
10769
10770	case MINUS:
10771	  /* We used to optimize signed comparisons against zero, but that
10772	     was incorrect.  Unsigned comparisons against zero (GTU, LEU)
10773	     arrive here as equality comparisons, or (GEU, LTU) are
10774	     optimized away.  No need to special-case them.  */
10775
10776	  /* (eq (minus A B) C) -> (eq A (plus B C)) or
10777	     (eq B (minus A C)), whichever simplifies.  We can only do
10778	     this for equality comparisons due to pathological cases involving
10779	     overflows.  */
10780	  if (equality_comparison_p
10781	      && 0 != (tem = simplify_binary_operation (PLUS, mode,
10782							XEXP (op0, 1), op1)))
10783	    {
10784	      op0 = XEXP (op0, 0);
10785	      op1 = tem;
10786	      continue;
10787	    }
10788
10789	  if (equality_comparison_p
10790	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10791							XEXP (op0, 0), op1)))
10792	    {
10793	      op0 = XEXP (op0, 1);
10794	      op1 = tem;
10795	      continue;
10796	    }
10797
10798	  /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10799	     of bits in X minus 1, is one iff X > 0.  */
10800	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10801	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10802	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1))
10803		 == mode_width - 1
10804	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10805	    {
10806	      op0 = XEXP (op0, 1);
10807	      code = (code == GE ? LE : GT);
10808	      continue;
10809	    }
10810	  break;
10811
10812	case XOR:
10813	  /* (eq (xor A B) C) -> (eq A (xor B C)).  This is a simplification
10814	     if C is zero or B is a constant.  */
10815	  if (equality_comparison_p
10816	      && 0 != (tem = simplify_binary_operation (XOR, mode,
10817							XEXP (op0, 1), op1)))
10818	    {
10819	      op0 = XEXP (op0, 0);
10820	      op1 = tem;
10821	      continue;
10822	    }
10823	  break;
10824
10825	case EQ:  case NE:
10826	case UNEQ:  case LTGT:
10827	case LT:  case LTU:  case UNLT:  case LE:  case LEU:  case UNLE:
10828	case GT:  case GTU:  case UNGT:  case GE:  case GEU:  case UNGE:
10829        case UNORDERED: case ORDERED:
10830	  /* We can't do anything if OP0 is a condition code value, rather
10831	     than an actual data value.  */
10832	  if (const_op != 0
10833	      || CC0_P (XEXP (op0, 0))
10834	      || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10835	    break;
10836
10837	  /* Get the two operands being compared.  */
10838	  if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10839	    tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10840	  else
10841	    tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10842
10843	  /* Check for the cases where we simply want the result of the
10844	     earlier test or the opposite of that result.  */
10845	  if (code == NE || code == EQ
10846	      || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10847		  && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10848		  && (STORE_FLAG_VALUE
10849		      & (((HOST_WIDE_INT) 1
10850			  << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
10851		  && (code == LT || code == GE)))
10852	    {
10853	      enum rtx_code new_code;
10854	      if (code == LT || code == NE)
10855		new_code = GET_CODE (op0);
10856	      else
10857		new_code = combine_reversed_comparison_code (op0);
10858
10859	      if (new_code != UNKNOWN)
10860		{
10861		  code = new_code;
10862		  op0 = tem;
10863		  op1 = tem1;
10864		  continue;
10865		}
10866	    }
10867	  break;
10868
10869	case IOR:
10870	  /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
10871	     iff X <= 0.  */
10872	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10873	      && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10874	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10875	    {
10876	      op0 = XEXP (op0, 1);
10877	      code = (code == GE ? GT : LE);
10878	      continue;
10879	    }
10880	  break;
10881
10882	case AND:
10883	  /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1).  This
10884	     will be converted to a ZERO_EXTRACT later.  */
10885	  if (const_op == 0 && equality_comparison_p
10886	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
10887	      && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10888	    {
10889	      op0 = simplify_and_const_int
10890		(op0, mode, gen_rtx_LSHIFTRT (mode,
10891					      XEXP (op0, 1),
10892					      XEXP (XEXP (op0, 0), 1)),
10893		 (HOST_WIDE_INT) 1);
10894	      continue;
10895	    }
10896
10897	  /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10898	     zero and X is a comparison and C1 and C2 describe only bits set
10899	     in STORE_FLAG_VALUE, we can compare with X.  */
10900	  if (const_op == 0 && equality_comparison_p
10901	      && mode_width <= HOST_BITS_PER_WIDE_INT
10902	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10903	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10904	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10905	      && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
10906	      && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
10907	    {
10908	      mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10909		      << INTVAL (XEXP (XEXP (op0, 0), 1)));
10910	      if ((~STORE_FLAG_VALUE & mask) == 0
10911		  && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10912		      || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10913			  && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10914		{
10915		  op0 = XEXP (XEXP (op0, 0), 0);
10916		  continue;
10917		}
10918	    }
10919
10920	  /* If we are doing an equality comparison of an AND of a bit equal
10921	     to the sign bit, replace this with a LT or GE comparison of
10922	     the underlying value.  */
10923	  if (equality_comparison_p
10924	      && const_op == 0
10925	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10926	      && mode_width <= HOST_BITS_PER_WIDE_INT
10927	      && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10928		  == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
10929	    {
10930	      op0 = XEXP (op0, 0);
10931	      code = (code == EQ ? GE : LT);
10932	      continue;
10933	    }
10934
10935	  /* If this AND operation is really a ZERO_EXTEND from a narrower
10936	     mode, the constant fits within that mode, and this is either an
10937	     equality or unsigned comparison, try to do this comparison in
10938	     the narrower mode.  */
10939	  if ((equality_comparison_p || unsigned_comparison_p)
10940	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10941	      && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10942				   & GET_MODE_MASK (mode))
10943				  + 1)) >= 0
10944	      && const_op >> i == 0
10945	      && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10946	    {
10947	      op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10948	      continue;
10949	    }
10950
10951	  /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1
10952	     fits in both M1 and M2 and the SUBREG is either paradoxical
10953	     or represents the low part, permute the SUBREG and the AND
10954	     and try again.  */
10955	  if (GET_CODE (XEXP (op0, 0)) == SUBREG)
10956	    {
10957	      unsigned HOST_WIDE_INT c1;
10958	      tmode = GET_MODE (SUBREG_REG (XEXP (op0, 0)));
10959	      /* Require an integral mode, to avoid creating something like
10960		 (AND:SF ...).  */
10961	      if (SCALAR_INT_MODE_P (tmode)
10962		  /* It is unsafe to commute the AND into the SUBREG if the
10963		     SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
10964		     not defined.  As originally written the upper bits
10965		     have a defined value due to the AND operation.
10966		     However, if we commute the AND inside the SUBREG then
10967		     they no longer have defined values and the meaning of
10968		     the code has been changed.  */
10969		  && (0
10970#ifdef WORD_REGISTER_OPERATIONS
10971		      || (mode_width > GET_MODE_BITSIZE (tmode)
10972			  && mode_width <= BITS_PER_WORD)
10973#endif
10974		      || (mode_width <= GET_MODE_BITSIZE (tmode)
10975			  && subreg_lowpart_p (XEXP (op0, 0))))
10976		  && GET_CODE (XEXP (op0, 1)) == CONST_INT
10977		  && mode_width <= HOST_BITS_PER_WIDE_INT
10978		  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
10979		  && ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0
10980		  && (c1 & ~GET_MODE_MASK (tmode)) == 0
10981		  && c1 != mask
10982		  && c1 != GET_MODE_MASK (tmode))
10983		{
10984		  op0 = gen_binary (AND, tmode,
10985				    SUBREG_REG (XEXP (op0, 0)),
10986				    gen_int_mode (c1, tmode));
10987		  op0 = gen_lowpart_for_combine (mode, op0);
10988		  continue;
10989		}
10990	    }
10991
10992	  /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0).  */
10993	  if (const_op == 0 && equality_comparison_p
10994	      && XEXP (op0, 1) == const1_rtx
10995	      && GET_CODE (XEXP (op0, 0)) == NOT)
10996	    {
10997	      op0 = simplify_and_const_int
10998		(NULL_RTX, mode, XEXP (XEXP (op0, 0), 0), (HOST_WIDE_INT) 1);
10999	      code = (code == NE ? EQ : NE);
11000	      continue;
11001	    }
11002
11003	  /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
11004	     (eq (and (lshiftrt X) 1) 0).
11005	     Also handle the case where (not X) is expressed using xor.  */
11006	  if (const_op == 0 && equality_comparison_p
11007	      && XEXP (op0, 1) == const1_rtx
11008	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
11009	    {
11010	      rtx shift_op = XEXP (XEXP (op0, 0), 0);
11011	      rtx shift_count = XEXP (XEXP (op0, 0), 1);
11012
11013	      if (GET_CODE (shift_op) == NOT
11014		  || (GET_CODE (shift_op) == XOR
11015		      && GET_CODE (XEXP (shift_op, 1)) == CONST_INT
11016		      && GET_CODE (shift_count) == CONST_INT
11017		      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
11018		      && (INTVAL (XEXP (shift_op, 1))
11019			  == (HOST_WIDE_INT) 1 << INTVAL (shift_count))))
11020		{
11021		  op0 = simplify_and_const_int
11022		    (NULL_RTX, mode,
11023		     gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count),
11024		     (HOST_WIDE_INT) 1);
11025		  code = (code == NE ? EQ : NE);
11026		  continue;
11027		}
11028	    }
11029	  break;
11030
11031	case ASHIFT:
11032	  /* If we have (compare (ashift FOO N) (const_int C)) and
11033	     the high order N bits of FOO (N+1 if an inequality comparison)
11034	     are known to be zero, we can do this by comparing FOO with C
11035	     shifted right N bits so long as the low-order N bits of C are
11036	     zero.  */
11037	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
11038	      && INTVAL (XEXP (op0, 1)) >= 0
11039	      && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
11040		  < HOST_BITS_PER_WIDE_INT)
11041	      && ((const_op
11042		   & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
11043	      && mode_width <= HOST_BITS_PER_WIDE_INT
11044	      && (nonzero_bits (XEXP (op0, 0), mode)
11045		  & ~(mask >> (INTVAL (XEXP (op0, 1))
11046			       + ! equality_comparison_p))) == 0)
11047	    {
11048	      /* We must perform a logical shift, not an arithmetic one,
11049		 as we want the top N bits of C to be zero.  */
11050	      unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
11051
11052	      temp >>= INTVAL (XEXP (op0, 1));
11053	      op1 = gen_int_mode (temp, mode);
11054	      op0 = XEXP (op0, 0);
11055	      continue;
11056	    }
11057
11058	  /* If we are doing a sign bit comparison, it means we are testing
11059	     a particular bit.  Convert it to the appropriate AND.  */
11060	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
11061	      && mode_width <= HOST_BITS_PER_WIDE_INT)
11062	    {
11063	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11064					    ((HOST_WIDE_INT) 1
11065					     << (mode_width - 1
11066						 - INTVAL (XEXP (op0, 1)))));
11067	      code = (code == LT ? NE : EQ);
11068	      continue;
11069	    }
11070
11071	  /* If this an equality comparison with zero and we are shifting
11072	     the low bit to the sign bit, we can convert this to an AND of the
11073	     low-order bit.  */
11074	  if (const_op == 0 && equality_comparison_p
11075	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11076	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11077		 == mode_width - 1)
11078	    {
11079	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11080					    (HOST_WIDE_INT) 1);
11081	      continue;
11082	    }
11083	  break;
11084
11085	case ASHIFTRT:
11086	  /* If this is an equality comparison with zero, we can do this
11087	     as a logical shift, which might be much simpler.  */
11088	  if (equality_comparison_p && const_op == 0
11089	      && GET_CODE (XEXP (op0, 1)) == CONST_INT)
11090	    {
11091	      op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
11092					  XEXP (op0, 0),
11093					  INTVAL (XEXP (op0, 1)));
11094	      continue;
11095	    }
11096
11097	  /* If OP0 is a sign extension and CODE is not an unsigned comparison,
11098	     do the comparison in a narrower mode.  */
11099	  if (! unsigned_comparison_p
11100	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11101	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
11102	      && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
11103	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11104					 MODE_INT, 1)) != BLKmode
11105	      && (((unsigned HOST_WIDE_INT) const_op
11106		   + (GET_MODE_MASK (tmode) >> 1) + 1)
11107		  <= GET_MODE_MASK (tmode)))
11108	    {
11109	      op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
11110	      continue;
11111	    }
11112
11113	  /* Likewise if OP0 is a PLUS of a sign extension with a
11114	     constant, which is usually represented with the PLUS
11115	     between the shifts.  */
11116	  if (! unsigned_comparison_p
11117	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11118	      && GET_CODE (XEXP (op0, 0)) == PLUS
11119	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
11120	      && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
11121	      && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
11122	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11123					 MODE_INT, 1)) != BLKmode
11124	      && (((unsigned HOST_WIDE_INT) const_op
11125		   + (GET_MODE_MASK (tmode) >> 1) + 1)
11126		  <= GET_MODE_MASK (tmode)))
11127	    {
11128	      rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
11129	      rtx add_const = XEXP (XEXP (op0, 0), 1);
11130	      rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const,
11131					  XEXP (op0, 1));
11132
11133	      op0 = gen_binary (PLUS, tmode,
11134				gen_lowpart_for_combine (tmode, inner),
11135				new_const);
11136	      continue;
11137	    }
11138
11139	  /* ... fall through ...  */
11140	case LSHIFTRT:
11141	  /* If we have (compare (xshiftrt FOO N) (const_int C)) and
11142	     the low order N bits of FOO are known to be zero, we can do this
11143	     by comparing FOO with C shifted left N bits so long as no
11144	     overflow occurs.  */
11145	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
11146	      && INTVAL (XEXP (op0, 1)) >= 0
11147	      && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
11148	      && mode_width <= HOST_BITS_PER_WIDE_INT
11149	      && (nonzero_bits (XEXP (op0, 0), mode)
11150		  & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
11151	      && (((unsigned HOST_WIDE_INT) const_op
11152		   + (GET_CODE (op0) != LSHIFTRT
11153		      ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
11154			 + 1)
11155		      : 0))
11156		  <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
11157	    {
11158	      /* If the shift was logical, then we must make the condition
11159		 unsigned.  */
11160	      if (GET_CODE (op0) == LSHIFTRT)
11161		code = unsigned_condition (code);
11162
11163	      const_op <<= INTVAL (XEXP (op0, 1));
11164	      op1 = GEN_INT (const_op);
11165	      op0 = XEXP (op0, 0);
11166	      continue;
11167	    }
11168
11169	  /* If we are using this shift to extract just the sign bit, we
11170	     can replace this with an LT or GE comparison.  */
11171	  if (const_op == 0
11172	      && (equality_comparison_p || sign_bit_comparison_p)
11173	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11174	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11175		 == mode_width - 1)
11176	    {
11177	      op0 = XEXP (op0, 0);
11178	      code = (code == NE || code == GT ? LT : GE);
11179	      continue;
11180	    }
11181	  break;
11182
11183	default:
11184	  break;
11185	}
11186
11187      break;
11188    }
11189
11190  /* Now make any compound operations involved in this comparison.  Then,
11191     check for an outmost SUBREG on OP0 that is not doing anything or is
11192     paradoxical.  The latter transformation must only be performed when
11193     it is known that the "extra" bits will be the same in op0 and op1 or
11194     that they don't matter.  There are three cases to consider:
11195
11196     1. SUBREG_REG (op0) is a register.  In this case the bits are don't
11197     care bits and we can assume they have any convenient value.  So
11198     making the transformation is safe.
11199
11200     2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
11201     In this case the upper bits of op0 are undefined.  We should not make
11202     the simplification in that case as we do not know the contents of
11203     those bits.
11204
11205     3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
11206     NIL.  In that case we know those bits are zeros or ones.  We must
11207     also be sure that they are the same as the upper bits of op1.
11208
11209     We can never remove a SUBREG for a non-equality comparison because
11210     the sign bit is in a different place in the underlying object.  */
11211
11212  op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
11213  op1 = make_compound_operation (op1, SET);
11214
11215  if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
11216      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11217      && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
11218      && (code == NE || code == EQ))
11219    {
11220      if (GET_MODE_SIZE (GET_MODE (op0))
11221	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
11222	{
11223	  /* For paradoxical subregs, allow case 1 as above.  Case 3 isn't
11224	     implemented.  */
11225          if (GET_CODE (SUBREG_REG (op0)) == REG)
11226	    {
11227	      op0 = SUBREG_REG (op0);
11228	      op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
11229	    }
11230	}
11231      else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
11232		<= HOST_BITS_PER_WIDE_INT)
11233	       && (nonzero_bits (SUBREG_REG (op0),
11234				 GET_MODE (SUBREG_REG (op0)))
11235		   & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11236	{
11237	  tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)), op1);
11238
11239	  if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
11240	       & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11241	    op0 = SUBREG_REG (op0), op1 = tem;
11242	}
11243    }
11244
11245  /* We now do the opposite procedure: Some machines don't have compare
11246     insns in all modes.  If OP0's mode is an integer mode smaller than a
11247     word and we can't do a compare in that mode, see if there is a larger
11248     mode for which we can do the compare.  There are a number of cases in
11249     which we can use the wider mode.  */
11250
11251  mode = GET_MODE (op0);
11252  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
11253      && GET_MODE_SIZE (mode) < UNITS_PER_WORD
11254      && ! have_insn_for (COMPARE, mode))
11255    for (tmode = GET_MODE_WIDER_MODE (mode);
11256	 (tmode != VOIDmode
11257	  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
11258	 tmode = GET_MODE_WIDER_MODE (tmode))
11259      if (have_insn_for (COMPARE, tmode))
11260	{
11261	  int zero_extended;
11262
11263	  /* If the only nonzero bits in OP0 and OP1 are those in the
11264	     narrower mode and this is an equality or unsigned comparison,
11265	     we can use the wider mode.  Similarly for sign-extended
11266	     values, in which case it is true for all comparisons.  */
11267	  zero_extended = ((code == EQ || code == NE
11268			    || code == GEU || code == GTU
11269			    || code == LEU || code == LTU)
11270			   && (nonzero_bits (op0, tmode)
11271			       & ~GET_MODE_MASK (mode)) == 0
11272			   && ((GET_CODE (op1) == CONST_INT
11273				|| (nonzero_bits (op1, tmode)
11274				    & ~GET_MODE_MASK (mode)) == 0)));
11275
11276	  if (zero_extended
11277	      || ((num_sign_bit_copies (op0, tmode)
11278		   > (unsigned int) (GET_MODE_BITSIZE (tmode)
11279				     - GET_MODE_BITSIZE (mode)))
11280		  && (num_sign_bit_copies (op1, tmode)
11281		      > (unsigned int) (GET_MODE_BITSIZE (tmode)
11282					- GET_MODE_BITSIZE (mode)))))
11283	    {
11284	      /* If OP0 is an AND and we don't have an AND in MODE either,
11285		 make a new AND in the proper mode.  */
11286	      if (GET_CODE (op0) == AND
11287		  && !have_insn_for (AND, mode))
11288		op0 = gen_binary (AND, tmode,
11289				  gen_lowpart_for_combine (tmode,
11290							   XEXP (op0, 0)),
11291				  gen_lowpart_for_combine (tmode,
11292							   XEXP (op0, 1)));
11293
11294	      op0 = gen_lowpart_for_combine (tmode, op0);
11295	      if (zero_extended && GET_CODE (op1) == CONST_INT)
11296		op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode));
11297	      op1 = gen_lowpart_for_combine (tmode, op1);
11298	      break;
11299	    }
11300
11301	  /* If this is a test for negative, we can make an explicit
11302	     test of the sign bit.  */
11303
11304	  if (op1 == const0_rtx && (code == LT || code == GE)
11305	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11306	    {
11307	      op0 = gen_binary (AND, tmode,
11308				gen_lowpart_for_combine (tmode, op0),
11309				GEN_INT ((HOST_WIDE_INT) 1
11310					 << (GET_MODE_BITSIZE (mode) - 1)));
11311	      code = (code == LT) ? NE : EQ;
11312	      break;
11313	    }
11314	}
11315
11316#ifdef CANONICALIZE_COMPARISON
11317  /* If this machine only supports a subset of valid comparisons, see if we
11318     can convert an unsupported one into a supported one.  */
11319  CANONICALIZE_COMPARISON (code, op0, op1);
11320#endif
11321
11322  *pop0 = op0;
11323  *pop1 = op1;
11324
11325  return code;
11326}
11327
11328/* Like jump.c' reversed_comparison_code, but use combine infrastructure for
11329   searching backward.  */
11330static enum rtx_code
11331combine_reversed_comparison_code (rtx exp)
11332{
11333  enum rtx_code code1 = reversed_comparison_code (exp, NULL);
11334  rtx x;
11335
11336  if (code1 != UNKNOWN
11337      || GET_MODE_CLASS (GET_MODE (XEXP (exp, 0))) != MODE_CC)
11338    return code1;
11339  /* Otherwise try and find where the condition codes were last set and
11340     use that.  */
11341  x = get_last_value (XEXP (exp, 0));
11342  if (!x || GET_CODE (x) != COMPARE)
11343    return UNKNOWN;
11344  return reversed_comparison_code_parts (GET_CODE (exp),
11345					 XEXP (x, 0), XEXP (x, 1), NULL);
11346}
11347
11348/* Return comparison with reversed code of EXP and operands OP0 and OP1.
11349   Return NULL_RTX in case we fail to do the reversal.  */
11350static rtx
11351reversed_comparison (rtx exp, enum machine_mode mode, rtx op0, rtx op1)
11352{
11353  enum rtx_code reversed_code = combine_reversed_comparison_code (exp);
11354  if (reversed_code == UNKNOWN)
11355    return NULL_RTX;
11356  else
11357    return gen_binary (reversed_code, mode, op0, op1);
11358}
11359
11360/* Utility function for following routine.  Called when X is part of a value
11361   being stored into reg_last_set_value.  Sets reg_last_set_table_tick
11362   for each register mentioned.  Similar to mention_regs in cse.c  */
11363
11364static void
11365update_table_tick (rtx x)
11366{
11367  enum rtx_code code = GET_CODE (x);
11368  const char *fmt = GET_RTX_FORMAT (code);
11369  int i;
11370
11371  if (code == REG)
11372    {
11373      unsigned int regno = REGNO (x);
11374      unsigned int endregno
11375	= regno + (regno < FIRST_PSEUDO_REGISTER
11376		   ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11377      unsigned int r;
11378
11379      for (r = regno; r < endregno; r++)
11380	reg_last_set_table_tick[r] = label_tick;
11381
11382      return;
11383    }
11384
11385  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11386    /* Note that we can't have an "E" in values stored; see
11387       get_last_value_validate.  */
11388    if (fmt[i] == 'e')
11389      {
11390	/* Check for identical subexpressions.  If x contains
11391	   identical subexpression we only have to traverse one of
11392	   them.  */
11393	if (i == 0
11394	    && (GET_RTX_CLASS (code) == '2'
11395		|| GET_RTX_CLASS (code) == 'c'))
11396	  {
11397	    /* Note that at this point x1 has already been
11398	       processed.  */
11399	    rtx x0 = XEXP (x, 0);
11400	    rtx x1 = XEXP (x, 1);
11401
11402	    /* If x0 and x1 are identical then there is no need to
11403	       process x0.  */
11404	    if (x0 == x1)
11405	      break;
11406
11407	    /* If x0 is identical to a subexpression of x1 then while
11408	       processing x1, x0 has already been processed.  Thus we
11409	       are done with x.  */
11410	    if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
11411		 || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
11412		&& (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11413	      break;
11414
11415	    /* If x1 is identical to a subexpression of x0 then we
11416	       still have to process the rest of x0.  */
11417	    if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
11418		 || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
11419		&& (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11420	      {
11421		update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
11422		break;
11423	      }
11424	  }
11425
11426	update_table_tick (XEXP (x, i));
11427      }
11428}
11429
11430/* Record that REG is set to VALUE in insn INSN.  If VALUE is zero, we
11431   are saying that the register is clobbered and we no longer know its
11432   value.  If INSN is zero, don't update reg_last_set; this is only permitted
11433   with VALUE also zero and is used to invalidate the register.  */
11434
11435static void
11436record_value_for_reg (rtx reg, rtx insn, rtx value)
11437{
11438  unsigned int regno = REGNO (reg);
11439  unsigned int endregno
11440    = regno + (regno < FIRST_PSEUDO_REGISTER
11441	       ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
11442  unsigned int i;
11443
11444  /* If VALUE contains REG and we have a previous value for REG, substitute
11445     the previous value.  */
11446  if (value && insn && reg_overlap_mentioned_p (reg, value))
11447    {
11448      rtx tem;
11449
11450      /* Set things up so get_last_value is allowed to see anything set up to
11451	 our insn.  */
11452      subst_low_cuid = INSN_CUID (insn);
11453      tem = get_last_value (reg);
11454
11455      /* If TEM is simply a binary operation with two CLOBBERs as operands,
11456	 it isn't going to be useful and will take a lot of time to process,
11457	 so just use the CLOBBER.  */
11458
11459      if (tem)
11460	{
11461	  if ((GET_RTX_CLASS (GET_CODE (tem)) == '2'
11462	       || GET_RTX_CLASS (GET_CODE (tem)) == 'c')
11463	      && GET_CODE (XEXP (tem, 0)) == CLOBBER
11464	      && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11465	    tem = XEXP (tem, 0);
11466
11467	  value = replace_rtx (copy_rtx (value), reg, tem);
11468	}
11469    }
11470
11471  /* For each register modified, show we don't know its value, that
11472     we don't know about its bitwise content, that its value has been
11473     updated, and that we don't know the location of the death of the
11474     register.  */
11475  for (i = regno; i < endregno; i++)
11476    {
11477      if (insn)
11478	reg_last_set[i] = insn;
11479
11480      reg_last_set_value[i] = 0;
11481      reg_last_set_mode[i] = 0;
11482      reg_last_set_nonzero_bits[i] = 0;
11483      reg_last_set_sign_bit_copies[i] = 0;
11484      reg_last_death[i] = 0;
11485    }
11486
11487  /* Mark registers that are being referenced in this value.  */
11488  if (value)
11489    update_table_tick (value);
11490
11491  /* Now update the status of each register being set.
11492     If someone is using this register in this block, set this register
11493     to invalid since we will get confused between the two lives in this
11494     basic block.  This makes using this register always invalid.  In cse, we
11495     scan the table to invalidate all entries using this register, but this
11496     is too much work for us.  */
11497
11498  for (i = regno; i < endregno; i++)
11499    {
11500      reg_last_set_label[i] = label_tick;
11501      if (value && reg_last_set_table_tick[i] == label_tick)
11502	reg_last_set_invalid[i] = 1;
11503      else
11504	reg_last_set_invalid[i] = 0;
11505    }
11506
11507  /* The value being assigned might refer to X (like in "x++;").  In that
11508     case, we must replace it with (clobber (const_int 0)) to prevent
11509     infinite loops.  */
11510  if (value && ! get_last_value_validate (&value, insn,
11511					  reg_last_set_label[regno], 0))
11512    {
11513      value = copy_rtx (value);
11514      if (! get_last_value_validate (&value, insn,
11515				     reg_last_set_label[regno], 1))
11516	value = 0;
11517    }
11518
11519  /* For the main register being modified, update the value, the mode, the
11520     nonzero bits, and the number of sign bit copies.  */
11521
11522  reg_last_set_value[regno] = value;
11523
11524  if (value)
11525    {
11526      enum machine_mode mode = GET_MODE (reg);
11527      subst_low_cuid = INSN_CUID (insn);
11528      reg_last_set_mode[regno] = mode;
11529      if (GET_MODE_CLASS (mode) == MODE_INT
11530	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11531	mode = nonzero_bits_mode;
11532      reg_last_set_nonzero_bits[regno] = nonzero_bits (value, mode);
11533      reg_last_set_sign_bit_copies[regno]
11534	= num_sign_bit_copies (value, GET_MODE (reg));
11535    }
11536}
11537
11538/* Called via note_stores from record_dead_and_set_regs to handle one
11539   SET or CLOBBER in an insn.  DATA is the instruction in which the
11540   set is occurring.  */
11541
11542static void
11543record_dead_and_set_regs_1 (rtx dest, rtx setter, void *data)
11544{
11545  rtx record_dead_insn = (rtx) data;
11546
11547  if (GET_CODE (dest) == SUBREG)
11548    dest = SUBREG_REG (dest);
11549
11550  if (GET_CODE (dest) == REG)
11551    {
11552      /* If we are setting the whole register, we know its value.  Otherwise
11553	 show that we don't know the value.  We can handle SUBREG in
11554	 some cases.  */
11555      if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11556	record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11557      else if (GET_CODE (setter) == SET
11558	       && GET_CODE (SET_DEST (setter)) == SUBREG
11559	       && SUBREG_REG (SET_DEST (setter)) == dest
11560	       && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
11561	       && subreg_lowpart_p (SET_DEST (setter)))
11562	record_value_for_reg (dest, record_dead_insn,
11563			      gen_lowpart_for_combine (GET_MODE (dest),
11564						       SET_SRC (setter)));
11565      else
11566	record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11567    }
11568  else if (GET_CODE (dest) == MEM
11569	   /* Ignore pushes, they clobber nothing.  */
11570	   && ! push_operand (dest, GET_MODE (dest)))
11571    mem_last_set = INSN_CUID (record_dead_insn);
11572}
11573
11574/* Update the records of when each REG was most recently set or killed
11575   for the things done by INSN.  This is the last thing done in processing
11576   INSN in the combiner loop.
11577
11578   We update reg_last_set, reg_last_set_value, reg_last_set_mode,
11579   reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
11580   and also the similar information mem_last_set (which insn most recently
11581   modified memory) and last_call_cuid (which insn was the most recent
11582   subroutine call).  */
11583
11584static void
11585record_dead_and_set_regs (rtx insn)
11586{
11587  rtx link;
11588  unsigned int i;
11589
11590  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11591    {
11592      if (REG_NOTE_KIND (link) == REG_DEAD
11593	  && GET_CODE (XEXP (link, 0)) == REG)
11594	{
11595	  unsigned int regno = REGNO (XEXP (link, 0));
11596	  unsigned int endregno
11597	    = regno + (regno < FIRST_PSEUDO_REGISTER
11598		       ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
11599		       : 1);
11600
11601	  for (i = regno; i < endregno; i++)
11602	    reg_last_death[i] = insn;
11603	}
11604      else if (REG_NOTE_KIND (link) == REG_INC)
11605	record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11606    }
11607
11608  if (GET_CODE (insn) == CALL_INSN)
11609    {
11610      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11611	if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
11612	  {
11613	    reg_last_set_value[i] = 0;
11614	    reg_last_set_mode[i] = 0;
11615	    reg_last_set_nonzero_bits[i] = 0;
11616	    reg_last_set_sign_bit_copies[i] = 0;
11617	    reg_last_death[i] = 0;
11618	  }
11619
11620      last_call_cuid = mem_last_set = INSN_CUID (insn);
11621
11622      /* Don't bother recording what this insn does.  It might set the
11623	 return value register, but we can't combine into a call
11624	 pattern anyway, so there's no point trying (and it may cause
11625	 a crash, if e.g. we wind up asking for last_set_value of a
11626	 SUBREG of the return value register).  */
11627      return;
11628    }
11629
11630  note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11631}
11632
11633/* If a SUBREG has the promoted bit set, it is in fact a property of the
11634   register present in the SUBREG, so for each such SUBREG go back and
11635   adjust nonzero and sign bit information of the registers that are
11636   known to have some zero/sign bits set.
11637
11638   This is needed because when combine blows the SUBREGs away, the
11639   information on zero/sign bits is lost and further combines can be
11640   missed because of that.  */
11641
11642static void
11643record_promoted_value (rtx insn, rtx subreg)
11644{
11645  rtx links, set;
11646  unsigned int regno = REGNO (SUBREG_REG (subreg));
11647  enum machine_mode mode = GET_MODE (subreg);
11648
11649  if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11650    return;
11651
11652  for (links = LOG_LINKS (insn); links;)
11653    {
11654      insn = XEXP (links, 0);
11655      set = single_set (insn);
11656
11657      if (! set || GET_CODE (SET_DEST (set)) != REG
11658	  || REGNO (SET_DEST (set)) != regno
11659	  || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11660	{
11661	  links = XEXP (links, 1);
11662	  continue;
11663	}
11664
11665      if (reg_last_set[regno] == insn)
11666	{
11667	  if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
11668	    reg_last_set_nonzero_bits[regno] &= GET_MODE_MASK (mode);
11669	}
11670
11671      if (GET_CODE (SET_SRC (set)) == REG)
11672	{
11673	  regno = REGNO (SET_SRC (set));
11674	  links = LOG_LINKS (insn);
11675	}
11676      else
11677	break;
11678    }
11679}
11680
11681/* Scan X for promoted SUBREGs.  For each one found,
11682   note what it implies to the registers used in it.  */
11683
11684static void
11685check_promoted_subreg (rtx insn, rtx x)
11686{
11687  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11688      && GET_CODE (SUBREG_REG (x)) == REG)
11689    record_promoted_value (insn, x);
11690  else
11691    {
11692      const char *format = GET_RTX_FORMAT (GET_CODE (x));
11693      int i, j;
11694
11695      for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
11696	switch (format[i])
11697	  {
11698	  case 'e':
11699	    check_promoted_subreg (insn, XEXP (x, i));
11700	    break;
11701	  case 'V':
11702	  case 'E':
11703	    if (XVEC (x, i) != 0)
11704	      for (j = 0; j < XVECLEN (x, i); j++)
11705		check_promoted_subreg (insn, XVECEXP (x, i, j));
11706	    break;
11707	  }
11708    }
11709}
11710
11711/* Utility routine for the following function.  Verify that all the registers
11712   mentioned in *LOC are valid when *LOC was part of a value set when
11713   label_tick == TICK.  Return 0 if some are not.
11714
11715   If REPLACE is nonzero, replace the invalid reference with
11716   (clobber (const_int 0)) and return 1.  This replacement is useful because
11717   we often can get useful information about the form of a value (e.g., if
11718   it was produced by a shift that always produces -1 or 0) even though
11719   we don't know exactly what registers it was produced from.  */
11720
11721static int
11722get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
11723{
11724  rtx x = *loc;
11725  const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
11726  int len = GET_RTX_LENGTH (GET_CODE (x));
11727  int i;
11728
11729  if (GET_CODE (x) == REG)
11730    {
11731      unsigned int regno = REGNO (x);
11732      unsigned int endregno
11733	= regno + (regno < FIRST_PSEUDO_REGISTER
11734		   ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11735      unsigned int j;
11736
11737      for (j = regno; j < endregno; j++)
11738	if (reg_last_set_invalid[j]
11739	    /* If this is a pseudo-register that was only set once and not
11740	       live at the beginning of the function, it is always valid.  */
11741	    || (! (regno >= FIRST_PSEUDO_REGISTER
11742		   && REG_N_SETS (regno) == 1
11743		   && (! REGNO_REG_SET_P
11744		       (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno)))
11745		&& reg_last_set_label[j] > tick))
11746	  {
11747	    if (replace)
11748	      *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11749	    return replace;
11750	  }
11751
11752      return 1;
11753    }
11754  /* If this is a memory reference, make sure that there were
11755     no stores after it that might have clobbered the value.  We don't
11756     have alias info, so we assume any store invalidates it.  */
11757  else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
11758	   && INSN_CUID (insn) <= mem_last_set)
11759    {
11760      if (replace)
11761	*loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11762      return replace;
11763    }
11764
11765  for (i = 0; i < len; i++)
11766    {
11767      if (fmt[i] == 'e')
11768	{
11769	  /* Check for identical subexpressions.  If x contains
11770	     identical subexpression we only have to traverse one of
11771	     them.  */
11772	  if (i == 1
11773	      && (GET_RTX_CLASS (GET_CODE (x)) == '2'
11774		  || GET_RTX_CLASS (GET_CODE (x)) == 'c'))
11775	    {
11776	      /* Note that at this point x0 has already been checked
11777		 and found valid.  */
11778	      rtx x0 = XEXP (x, 0);
11779	      rtx x1 = XEXP (x, 1);
11780
11781	      /* If x0 and x1 are identical then x is also valid.  */
11782	      if (x0 == x1)
11783		return 1;
11784
11785	      /* If x1 is identical to a subexpression of x0 then
11786		 while checking x0, x1 has already been checked.  Thus
11787		 it is valid and so as x.  */
11788	      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
11789		   || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
11790		  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11791		return 1;
11792
11793	      /* If x0 is identical to a subexpression of x1 then x is
11794		 valid iff the rest of x1 is valid.  */
11795	      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
11796		   || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
11797		  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11798		return
11799		  get_last_value_validate (&XEXP (x1,
11800						  x0 == XEXP (x1, 0) ? 1 : 0),
11801					   insn, tick, replace);
11802	    }
11803
11804	  if (get_last_value_validate (&XEXP (x, i), insn, tick,
11805				       replace) == 0)
11806	    return 0;
11807	}
11808      /* Don't bother with these.  They shouldn't occur anyway.  */
11809      else if (fmt[i] == 'E')
11810	return 0;
11811    }
11812
11813  /* If we haven't found a reason for it to be invalid, it is valid.  */
11814  return 1;
11815}
11816
11817/* Get the last value assigned to X, if known.  Some registers
11818   in the value may be replaced with (clobber (const_int 0)) if their value
11819   is known longer known reliably.  */
11820
11821static rtx
11822get_last_value (rtx x)
11823{
11824  unsigned int regno;
11825  rtx value;
11826
11827  /* If this is a non-paradoxical SUBREG, get the value of its operand and
11828     then convert it to the desired mode.  If this is a paradoxical SUBREG,
11829     we cannot predict what values the "extra" bits might have.  */
11830  if (GET_CODE (x) == SUBREG
11831      && subreg_lowpart_p (x)
11832      && (GET_MODE_SIZE (GET_MODE (x))
11833	  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11834      && (value = get_last_value (SUBREG_REG (x))) != 0)
11835    return gen_lowpart_for_combine (GET_MODE (x), value);
11836
11837  if (GET_CODE (x) != REG)
11838    return 0;
11839
11840  regno = REGNO (x);
11841  value = reg_last_set_value[regno];
11842
11843  /* If we don't have a value, or if it isn't for this basic block and
11844     it's either a hard register, set more than once, or it's a live
11845     at the beginning of the function, return 0.
11846
11847     Because if it's not live at the beginning of the function then the reg
11848     is always set before being used (is never used without being set).
11849     And, if it's set only once, and it's always set before use, then all
11850     uses must have the same last value, even if it's not from this basic
11851     block.  */
11852
11853  if (value == 0
11854      || (reg_last_set_label[regno] != label_tick
11855	  && (regno < FIRST_PSEUDO_REGISTER
11856	      || REG_N_SETS (regno) != 1
11857	      || (REGNO_REG_SET_P
11858		  (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno)))))
11859    return 0;
11860
11861  /* If the value was set in a later insn than the ones we are processing,
11862     we can't use it even if the register was only set once.  */
11863  if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
11864    return 0;
11865
11866  /* If the value has all its registers valid, return it.  */
11867  if (get_last_value_validate (&value, reg_last_set[regno],
11868			       reg_last_set_label[regno], 0))
11869    return value;
11870
11871  /* Otherwise, make a copy and replace any invalid register with
11872     (clobber (const_int 0)).  If that fails for some reason, return 0.  */
11873
11874  value = copy_rtx (value);
11875  if (get_last_value_validate (&value, reg_last_set[regno],
11876			       reg_last_set_label[regno], 1))
11877    return value;
11878
11879  return 0;
11880}
11881
11882/* Return nonzero if expression X refers to a REG or to memory
11883   that is set in an instruction more recent than FROM_CUID.  */
11884
11885static int
11886use_crosses_set_p (rtx x, int from_cuid)
11887{
11888  const char *fmt;
11889  int i;
11890  enum rtx_code code = GET_CODE (x);
11891
11892  if (code == REG)
11893    {
11894      unsigned int regno = REGNO (x);
11895      unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
11896				 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11897
11898#ifdef PUSH_ROUNDING
11899      /* Don't allow uses of the stack pointer to be moved,
11900	 because we don't know whether the move crosses a push insn.  */
11901      if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
11902	return 1;
11903#endif
11904      for (; regno < endreg; regno++)
11905	if (reg_last_set[regno]
11906	    && INSN_CUID (reg_last_set[regno]) > from_cuid)
11907	  return 1;
11908      return 0;
11909    }
11910
11911  if (code == MEM && mem_last_set > from_cuid)
11912    return 1;
11913
11914  fmt = GET_RTX_FORMAT (code);
11915
11916  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11917    {
11918      if (fmt[i] == 'E')
11919	{
11920	  int j;
11921	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11922	    if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11923	      return 1;
11924	}
11925      else if (fmt[i] == 'e'
11926	       && use_crosses_set_p (XEXP (x, i), from_cuid))
11927	return 1;
11928    }
11929  return 0;
11930}
11931
11932/* Define three variables used for communication between the following
11933   routines.  */
11934
11935static unsigned int reg_dead_regno, reg_dead_endregno;
11936static int reg_dead_flag;
11937
11938/* Function called via note_stores from reg_dead_at_p.
11939
11940   If DEST is within [reg_dead_regno, reg_dead_endregno), set
11941   reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET.  */
11942
11943static void
11944reg_dead_at_p_1 (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
11945{
11946  unsigned int regno, endregno;
11947
11948  if (GET_CODE (dest) != REG)
11949    return;
11950
11951  regno = REGNO (dest);
11952  endregno = regno + (regno < FIRST_PSEUDO_REGISTER
11953		      ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
11954
11955  if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11956    reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11957}
11958
11959/* Return nonzero if REG is known to be dead at INSN.
11960
11961   We scan backwards from INSN.  If we hit a REG_DEAD note or a CLOBBER
11962   referencing REG, it is dead.  If we hit a SET referencing REG, it is
11963   live.  Otherwise, see if it is live or dead at the start of the basic
11964   block we are in.  Hard regs marked as being live in NEWPAT_USED_REGS
11965   must be assumed to be always live.  */
11966
11967static int
11968reg_dead_at_p (rtx reg, rtx insn)
11969{
11970  basic_block block;
11971  unsigned int i;
11972
11973  /* Set variables for reg_dead_at_p_1.  */
11974  reg_dead_regno = REGNO (reg);
11975  reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11976					? HARD_REGNO_NREGS (reg_dead_regno,
11977							    GET_MODE (reg))
11978					: 1);
11979
11980  reg_dead_flag = 0;
11981
11982  /* Check that reg isn't mentioned in NEWPAT_USED_REGS.  */
11983  if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11984    {
11985      for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11986	if (TEST_HARD_REG_BIT (newpat_used_regs, i))
11987	  return 0;
11988    }
11989
11990  /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11991     beginning of function.  */
11992  for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
11993       insn = prev_nonnote_insn (insn))
11994    {
11995      note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
11996      if (reg_dead_flag)
11997	return reg_dead_flag == 1 ? 1 : 0;
11998
11999      if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
12000	return 1;
12001    }
12002
12003  /* Get the basic block that we were in.  */
12004  if (insn == 0)
12005    block = ENTRY_BLOCK_PTR->next_bb;
12006  else
12007    {
12008      FOR_EACH_BB (block)
12009	if (insn == BB_HEAD (block))
12010	  break;
12011
12012      if (block == EXIT_BLOCK_PTR)
12013	return 0;
12014    }
12015
12016  for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12017    if (REGNO_REG_SET_P (block->global_live_at_start, i))
12018      return 0;
12019
12020  return 1;
12021}
12022
12023/* Note hard registers in X that are used.  This code is similar to
12024   that in flow.c, but much simpler since we don't care about pseudos.  */
12025
12026static void
12027mark_used_regs_combine (rtx x)
12028{
12029  RTX_CODE code = GET_CODE (x);
12030  unsigned int regno;
12031  int i;
12032
12033  switch (code)
12034    {
12035    case LABEL_REF:
12036    case SYMBOL_REF:
12037    case CONST_INT:
12038    case CONST:
12039    case CONST_DOUBLE:
12040    case CONST_VECTOR:
12041    case PC:
12042    case ADDR_VEC:
12043    case ADDR_DIFF_VEC:
12044    case ASM_INPUT:
12045#ifdef HAVE_cc0
12046    /* CC0 must die in the insn after it is set, so we don't need to take
12047       special note of it here.  */
12048    case CC0:
12049#endif
12050      return;
12051
12052    case CLOBBER:
12053      /* If we are clobbering a MEM, mark any hard registers inside the
12054	 address as used.  */
12055      if (GET_CODE (XEXP (x, 0)) == MEM)
12056	mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
12057      return;
12058
12059    case REG:
12060      regno = REGNO (x);
12061      /* A hard reg in a wide mode may really be multiple registers.
12062	 If so, mark all of them just like the first.  */
12063      if (regno < FIRST_PSEUDO_REGISTER)
12064	{
12065	  unsigned int endregno, r;
12066
12067	  /* None of this applies to the stack, frame or arg pointers.  */
12068	  if (regno == STACK_POINTER_REGNUM
12069#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
12070	      || regno == HARD_FRAME_POINTER_REGNUM
12071#endif
12072#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
12073	      || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
12074#endif
12075	      || regno == FRAME_POINTER_REGNUM)
12076	    return;
12077
12078	  endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12079	  for (r = regno; r < endregno; r++)
12080	    SET_HARD_REG_BIT (newpat_used_regs, r);
12081	}
12082      return;
12083
12084    case SET:
12085      {
12086	/* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
12087	   the address.  */
12088	rtx testreg = SET_DEST (x);
12089
12090	while (GET_CODE (testreg) == SUBREG
12091	       || GET_CODE (testreg) == ZERO_EXTRACT
12092	       || GET_CODE (testreg) == SIGN_EXTRACT
12093	       || GET_CODE (testreg) == STRICT_LOW_PART)
12094	  testreg = XEXP (testreg, 0);
12095
12096	if (GET_CODE (testreg) == MEM)
12097	  mark_used_regs_combine (XEXP (testreg, 0));
12098
12099	mark_used_regs_combine (SET_SRC (x));
12100      }
12101      return;
12102
12103    default:
12104      break;
12105    }
12106
12107  /* Recursively scan the operands of this expression.  */
12108
12109  {
12110    const char *fmt = GET_RTX_FORMAT (code);
12111
12112    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12113      {
12114	if (fmt[i] == 'e')
12115	  mark_used_regs_combine (XEXP (x, i));
12116	else if (fmt[i] == 'E')
12117	  {
12118	    int j;
12119
12120	    for (j = 0; j < XVECLEN (x, i); j++)
12121	      mark_used_regs_combine (XVECEXP (x, i, j));
12122	  }
12123      }
12124  }
12125}
12126
12127/* Remove register number REGNO from the dead registers list of INSN.
12128
12129   Return the note used to record the death, if there was one.  */
12130
12131rtx
12132remove_death (unsigned int regno, rtx insn)
12133{
12134  rtx note = find_regno_note (insn, REG_DEAD, regno);
12135
12136  if (note)
12137    {
12138      REG_N_DEATHS (regno)--;
12139      remove_note (insn, note);
12140    }
12141
12142  return note;
12143}
12144
12145/* For each register (hardware or pseudo) used within expression X, if its
12146   death is in an instruction with cuid between FROM_CUID (inclusive) and
12147   TO_INSN (exclusive), put a REG_DEAD note for that register in the
12148   list headed by PNOTES.
12149
12150   That said, don't move registers killed by maybe_kill_insn.
12151
12152   This is done when X is being merged by combination into TO_INSN.  These
12153   notes will then be distributed as needed.  */
12154
12155static void
12156move_deaths (rtx x, rtx maybe_kill_insn, int from_cuid, rtx to_insn,
12157	     rtx *pnotes)
12158{
12159  const char *fmt;
12160  int len, i;
12161  enum rtx_code code = GET_CODE (x);
12162
12163  if (code == REG)
12164    {
12165      unsigned int regno = REGNO (x);
12166      rtx where_dead = reg_last_death[regno];
12167      rtx before_dead, after_dead;
12168
12169      /* Don't move the register if it gets killed in between from and to.  */
12170      if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
12171	  && ! reg_referenced_p (x, maybe_kill_insn))
12172	return;
12173
12174      /* WHERE_DEAD could be a USE insn made by combine, so first we
12175	 make sure that we have insns with valid INSN_CUID values.  */
12176      before_dead = where_dead;
12177      while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
12178	before_dead = PREV_INSN (before_dead);
12179
12180      after_dead = where_dead;
12181      while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
12182	after_dead = NEXT_INSN (after_dead);
12183
12184      if (before_dead && after_dead
12185	  && INSN_CUID (before_dead) >= from_cuid
12186	  && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
12187	      || (where_dead != after_dead
12188		  && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
12189	{
12190	  rtx note = remove_death (regno, where_dead);
12191
12192	  /* It is possible for the call above to return 0.  This can occur
12193	     when reg_last_death points to I2 or I1 that we combined with.
12194	     In that case make a new note.
12195
12196	     We must also check for the case where X is a hard register
12197	     and NOTE is a death note for a range of hard registers
12198	     including X.  In that case, we must put REG_DEAD notes for
12199	     the remaining registers in place of NOTE.  */
12200
12201	  if (note != 0 && regno < FIRST_PSEUDO_REGISTER
12202	      && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12203		  > GET_MODE_SIZE (GET_MODE (x))))
12204	    {
12205	      unsigned int deadregno = REGNO (XEXP (note, 0));
12206	      unsigned int deadend
12207		= (deadregno + HARD_REGNO_NREGS (deadregno,
12208						 GET_MODE (XEXP (note, 0))));
12209	      unsigned int ourend
12210		= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12211	      unsigned int i;
12212
12213	      for (i = deadregno; i < deadend; i++)
12214		if (i < regno || i >= ourend)
12215		  REG_NOTES (where_dead)
12216		    = gen_rtx_EXPR_LIST (REG_DEAD,
12217					 regno_reg_rtx[i],
12218					 REG_NOTES (where_dead));
12219	    }
12220
12221	  /* If we didn't find any note, or if we found a REG_DEAD note that
12222	     covers only part of the given reg, and we have a multi-reg hard
12223	     register, then to be safe we must check for REG_DEAD notes
12224	     for each register other than the first.  They could have
12225	     their own REG_DEAD notes lying around.  */
12226	  else if ((note == 0
12227		    || (note != 0
12228			&& (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12229			    < GET_MODE_SIZE (GET_MODE (x)))))
12230		   && regno < FIRST_PSEUDO_REGISTER
12231		   && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
12232	    {
12233	      unsigned int ourend
12234		= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12235	      unsigned int i, offset;
12236	      rtx oldnotes = 0;
12237
12238	      if (note)
12239		offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
12240	      else
12241		offset = 1;
12242
12243	      for (i = regno + offset; i < ourend; i++)
12244		move_deaths (regno_reg_rtx[i],
12245			     maybe_kill_insn, from_cuid, to_insn, &oldnotes);
12246	    }
12247
12248	  if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
12249	    {
12250	      XEXP (note, 1) = *pnotes;
12251	      *pnotes = note;
12252	    }
12253	  else
12254	    *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
12255
12256	  REG_N_DEATHS (regno)++;
12257	}
12258
12259      return;
12260    }
12261
12262  else if (GET_CODE (x) == SET)
12263    {
12264      rtx dest = SET_DEST (x);
12265
12266      move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
12267
12268      /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
12269	 that accesses one word of a multi-word item, some
12270	 piece of everything register in the expression is used by
12271	 this insn, so remove any old death.  */
12272      /* ??? So why do we test for equality of the sizes?  */
12273
12274      if (GET_CODE (dest) == ZERO_EXTRACT
12275	  || GET_CODE (dest) == STRICT_LOW_PART
12276	  || (GET_CODE (dest) == SUBREG
12277	      && (((GET_MODE_SIZE (GET_MODE (dest))
12278		    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
12279		  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
12280		       + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
12281	{
12282	  move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
12283	  return;
12284	}
12285
12286      /* If this is some other SUBREG, we know it replaces the entire
12287	 value, so use that as the destination.  */
12288      if (GET_CODE (dest) == SUBREG)
12289	dest = SUBREG_REG (dest);
12290
12291      /* If this is a MEM, adjust deaths of anything used in the address.
12292	 For a REG (the only other possibility), the entire value is
12293	 being replaced so the old value is not used in this insn.  */
12294
12295      if (GET_CODE (dest) == MEM)
12296	move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
12297		     to_insn, pnotes);
12298      return;
12299    }
12300
12301  else if (GET_CODE (x) == CLOBBER)
12302    return;
12303
12304  len = GET_RTX_LENGTH (code);
12305  fmt = GET_RTX_FORMAT (code);
12306
12307  for (i = 0; i < len; i++)
12308    {
12309      if (fmt[i] == 'E')
12310	{
12311	  int j;
12312	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12313	    move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
12314			 to_insn, pnotes);
12315	}
12316      else if (fmt[i] == 'e')
12317	move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
12318    }
12319}
12320
12321/* Return 1 if X is the target of a bit-field assignment in BODY, the
12322   pattern of an insn.  X must be a REG.  */
12323
12324static int
12325reg_bitfield_target_p (rtx x, rtx body)
12326{
12327  int i;
12328
12329  if (GET_CODE (body) == SET)
12330    {
12331      rtx dest = SET_DEST (body);
12332      rtx target;
12333      unsigned int regno, tregno, endregno, endtregno;
12334
12335      if (GET_CODE (dest) == ZERO_EXTRACT)
12336	target = XEXP (dest, 0);
12337      else if (GET_CODE (dest) == STRICT_LOW_PART)
12338	target = SUBREG_REG (XEXP (dest, 0));
12339      else
12340	return 0;
12341
12342      if (GET_CODE (target) == SUBREG)
12343	target = SUBREG_REG (target);
12344
12345      if (GET_CODE (target) != REG)
12346	return 0;
12347
12348      tregno = REGNO (target), regno = REGNO (x);
12349      if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
12350	return target == x;
12351
12352      endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
12353      endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12354
12355      return endregno > tregno && regno < endtregno;
12356    }
12357
12358  else if (GET_CODE (body) == PARALLEL)
12359    for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
12360      if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
12361	return 1;
12362
12363  return 0;
12364}
12365
12366/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
12367   as appropriate.  I3 and I2 are the insns resulting from the combination
12368   insns including FROM (I2 may be zero).
12369
12370   Each note in the list is either ignored or placed on some insns, depending
12371   on the type of note.  */
12372
12373static void
12374distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2)
12375{
12376  rtx note, next_note;
12377  rtx tem;
12378
12379  for (note = notes; note; note = next_note)
12380    {
12381      rtx place = 0, place2 = 0;
12382
12383      /* If this NOTE references a pseudo register, ensure it references
12384	 the latest copy of that register.  */
12385      if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
12386	  && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
12387	XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
12388
12389      next_note = XEXP (note, 1);
12390      switch (REG_NOTE_KIND (note))
12391	{
12392	case REG_BR_PROB:
12393	case REG_BR_PRED:
12394	  /* Doesn't matter much where we put this, as long as it's somewhere.
12395	     It is preferable to keep these notes on branches, which is most
12396	     likely to be i3.  */
12397	  place = i3;
12398	  break;
12399
12400	case REG_VALUE_PROFILE:
12401	  /* Just get rid of this note, as it is unused later anyway.  */
12402	  break;
12403
12404	case REG_VTABLE_REF:
12405	  /* ??? Should remain with *a particular* memory load.  Given the
12406	     nature of vtable data, the last insn seems relatively safe.  */
12407	  place = i3;
12408	  break;
12409
12410	case REG_NON_LOCAL_GOTO:
12411	  if (GET_CODE (i3) == JUMP_INSN)
12412	    place = i3;
12413	  else if (i2 && GET_CODE (i2) == JUMP_INSN)
12414	    place = i2;
12415	  else
12416	    abort ();
12417	  break;
12418
12419	case REG_EH_REGION:
12420	  /* These notes must remain with the call or trapping instruction.  */
12421	  if (GET_CODE (i3) == CALL_INSN)
12422	    place = i3;
12423	  else if (i2 && GET_CODE (i2) == CALL_INSN)
12424	    place = i2;
12425	  else if (flag_non_call_exceptions)
12426	    {
12427	      if (may_trap_p (i3))
12428		place = i3;
12429	      else if (i2 && may_trap_p (i2))
12430		place = i2;
12431	      /* ??? Otherwise assume we've combined things such that we
12432		 can now prove that the instructions can't trap.  Drop the
12433		 note in this case.  */
12434	    }
12435	  else
12436	    abort ();
12437	  break;
12438
12439	case REG_ALWAYS_RETURN:
12440	case REG_NORETURN:
12441	case REG_SETJMP:
12442	  /* These notes must remain with the call.  It should not be
12443	     possible for both I2 and I3 to be a call.  */
12444	  if (GET_CODE (i3) == CALL_INSN)
12445	    place = i3;
12446	  else if (i2 && GET_CODE (i2) == CALL_INSN)
12447	    place = i2;
12448	  else
12449	    abort ();
12450	  break;
12451
12452	case REG_UNUSED:
12453	  /* Any clobbers for i3 may still exist, and so we must process
12454	     REG_UNUSED notes from that insn.
12455
12456	     Any clobbers from i2 or i1 can only exist if they were added by
12457	     recog_for_combine.  In that case, recog_for_combine created the
12458	     necessary REG_UNUSED notes.  Trying to keep any original
12459	     REG_UNUSED notes from these insns can cause incorrect output
12460	     if it is for the same register as the original i3 dest.
12461	     In that case, we will notice that the register is set in i3,
12462	     and then add a REG_UNUSED note for the destination of i3, which
12463	     is wrong.  However, it is possible to have REG_UNUSED notes from
12464	     i2 or i1 for register which were both used and clobbered, so
12465	     we keep notes from i2 or i1 if they will turn into REG_DEAD
12466	     notes.  */
12467
12468	  /* If this register is set or clobbered in I3, put the note there
12469	     unless there is one already.  */
12470	  if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
12471	    {
12472	      if (from_insn != i3)
12473		break;
12474
12475	      if (! (GET_CODE (XEXP (note, 0)) == REG
12476		     ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12477		     : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12478		place = i3;
12479	    }
12480	  /* Otherwise, if this register is used by I3, then this register
12481	     now dies here, so we must put a REG_DEAD note here unless there
12482	     is one already.  */
12483	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12484		   && ! (GET_CODE (XEXP (note, 0)) == REG
12485			 ? find_regno_note (i3, REG_DEAD,
12486					    REGNO (XEXP (note, 0)))
12487			 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12488	    {
12489	      PUT_REG_NOTE_KIND (note, REG_DEAD);
12490	      place = i3;
12491	    }
12492	  break;
12493
12494	case REG_EQUAL:
12495	case REG_EQUIV:
12496	case REG_NOALIAS:
12497	  /* These notes say something about results of an insn.  We can
12498	     only support them if they used to be on I3 in which case they
12499	     remain on I3.  Otherwise they are ignored.
12500
12501	     If the note refers to an expression that is not a constant, we
12502	     must also ignore the note since we cannot tell whether the
12503	     equivalence is still true.  It might be possible to do
12504	     slightly better than this (we only have a problem if I2DEST
12505	     or I1DEST is present in the expression), but it doesn't
12506	     seem worth the trouble.  */
12507
12508	  if (from_insn == i3
12509	      && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12510	    place = i3;
12511	  break;
12512
12513	case REG_INC:
12514	case REG_NO_CONFLICT:
12515	  /* These notes say something about how a register is used.  They must
12516	     be present on any use of the register in I2 or I3.  */
12517	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12518	    place = i3;
12519
12520	  if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12521	    {
12522	      if (place)
12523		place2 = i2;
12524	      else
12525		place = i2;
12526	    }
12527	  break;
12528
12529	case REG_LABEL:
12530	  /* This can show up in several ways -- either directly in the
12531	     pattern, or hidden off in the constant pool with (or without?)
12532	     a REG_EQUAL note.  */
12533	  /* ??? Ignore the without-reg_equal-note problem for now.  */
12534	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12535	      || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12536		  && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12537		  && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12538	    place = i3;
12539
12540	  if (i2
12541	      && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12542		  || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12543		      && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12544		      && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12545	    {
12546	      if (place)
12547		place2 = i2;
12548	      else
12549		place = i2;
12550	    }
12551
12552	  /* Don't attach REG_LABEL note to a JUMP_INSN which has
12553	     JUMP_LABEL already.  Instead, decrement LABEL_NUSES.  */
12554	  if (place && GET_CODE (place) == JUMP_INSN && JUMP_LABEL (place))
12555	    {
12556	      if (JUMP_LABEL (place) != XEXP (note, 0))
12557		abort ();
12558	      if (GET_CODE (JUMP_LABEL (place)) == CODE_LABEL)
12559		LABEL_NUSES (JUMP_LABEL (place))--;
12560	      place = 0;
12561	    }
12562	  if (place2 && GET_CODE (place2) == JUMP_INSN && JUMP_LABEL (place2))
12563	    {
12564	      if (JUMP_LABEL (place2) != XEXP (note, 0))
12565		abort ();
12566	      if (GET_CODE (JUMP_LABEL (place2)) == CODE_LABEL)
12567		LABEL_NUSES (JUMP_LABEL (place2))--;
12568	      place2 = 0;
12569	    }
12570	  break;
12571
12572	case REG_NONNEG:
12573	  /* This note says something about the value of a register prior
12574	     to the execution of an insn.  It is too much trouble to see
12575	     if the note is still correct in all situations.  It is better
12576	     to simply delete it.  */
12577	  break;
12578
12579	case REG_RETVAL:
12580	  /* If the insn previously containing this note still exists,
12581	     put it back where it was.  Otherwise move it to the previous
12582	     insn.  Adjust the corresponding REG_LIBCALL note.  */
12583	  if (GET_CODE (from_insn) != NOTE)
12584	    place = from_insn;
12585	  else
12586	    {
12587	      tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
12588	      place = prev_real_insn (from_insn);
12589	      if (tem && place)
12590		XEXP (tem, 0) = place;
12591	      /* If we're deleting the last remaining instruction of a
12592		 libcall sequence, don't add the notes.  */
12593	      else if (XEXP (note, 0) == from_insn)
12594		tem = place = 0;
12595	      /* Don't add the dangling REG_RETVAL note.  */
12596	      else if (! tem)
12597		place = 0;
12598	    }
12599	  break;
12600
12601	case REG_LIBCALL:
12602	  /* This is handled similarly to REG_RETVAL.  */
12603	  if (GET_CODE (from_insn) != NOTE)
12604	    place = from_insn;
12605	  else
12606	    {
12607	      tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
12608	      place = next_real_insn (from_insn);
12609	      if (tem && place)
12610		XEXP (tem, 0) = place;
12611	      /* If we're deleting the last remaining instruction of a
12612		 libcall sequence, don't add the notes.  */
12613	      else if (XEXP (note, 0) == from_insn)
12614		tem = place = 0;
12615	      /* Don't add the dangling REG_LIBCALL note.  */
12616	      else if (! tem)
12617		place = 0;
12618	    }
12619	  break;
12620
12621	case REG_DEAD:
12622	  /* If the register is used as an input in I3, it dies there.
12623	     Similarly for I2, if it is nonzero and adjacent to I3.
12624
12625	     If the register is not used as an input in either I3 or I2
12626	     and it is not one of the registers we were supposed to eliminate,
12627	     there are two possibilities.  We might have a non-adjacent I2
12628	     or we might have somehow eliminated an additional register
12629	     from a computation.  For example, we might have had A & B where
12630	     we discover that B will always be zero.  In this case we will
12631	     eliminate the reference to A.
12632
12633	     In both cases, we must search to see if we can find a previous
12634	     use of A and put the death note there.  */
12635
12636	  if (from_insn
12637	      && GET_CODE (from_insn) == CALL_INSN
12638	      && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12639	    place = from_insn;
12640	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12641	    place = i3;
12642	  else if (i2 != 0 && next_nonnote_insn (i2) == i3
12643		   && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12644	    place = i2;
12645
12646	  if (place == 0)
12647	    {
12648	      basic_block bb = this_basic_block;
12649
12650	      for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
12651		{
12652		  if (! INSN_P (tem))
12653		    {
12654		      if (tem == BB_HEAD (bb))
12655			break;
12656		      continue;
12657		    }
12658
12659		  /* If the register is being set at TEM, see if that is all
12660		     TEM is doing.  If so, delete TEM.  Otherwise, make this
12661		     into a REG_UNUSED note instead.  Don't delete sets to
12662		     global register vars.  */
12663		  if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
12664		       || !global_regs[REGNO (XEXP (note, 0))])
12665		      && reg_set_p (XEXP (note, 0), PATTERN (tem)))
12666		    {
12667		      rtx set = single_set (tem);
12668		      rtx inner_dest = 0;
12669#ifdef HAVE_cc0
12670		      rtx cc0_setter = NULL_RTX;
12671#endif
12672
12673		      if (set != 0)
12674			for (inner_dest = SET_DEST (set);
12675			     (GET_CODE (inner_dest) == STRICT_LOW_PART
12676			      || GET_CODE (inner_dest) == SUBREG
12677			      || GET_CODE (inner_dest) == ZERO_EXTRACT);
12678			     inner_dest = XEXP (inner_dest, 0))
12679			  ;
12680
12681		      /* Verify that it was the set, and not a clobber that
12682			 modified the register.
12683
12684			 CC0 targets must be careful to maintain setter/user
12685			 pairs.  If we cannot delete the setter due to side
12686			 effects, mark the user with an UNUSED note instead
12687			 of deleting it.  */
12688
12689		      if (set != 0 && ! side_effects_p (SET_SRC (set))
12690			  && rtx_equal_p (XEXP (note, 0), inner_dest)
12691#ifdef HAVE_cc0
12692			  && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12693			      || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12694				  && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12695#endif
12696			  )
12697			{
12698			  /* Move the notes and links of TEM elsewhere.
12699			     This might delete other dead insns recursively.
12700			     First set the pattern to something that won't use
12701			     any register.  */
12702			  rtx old_notes = REG_NOTES (tem);
12703
12704			  PATTERN (tem) = pc_rtx;
12705			  REG_NOTES (tem) = NULL;
12706
12707			  distribute_notes (old_notes, tem, tem, NULL_RTX);
12708			  distribute_links (LOG_LINKS (tem));
12709
12710			  PUT_CODE (tem, NOTE);
12711			  NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
12712			  NOTE_SOURCE_FILE (tem) = 0;
12713
12714#ifdef HAVE_cc0
12715			  /* Delete the setter too.  */
12716			  if (cc0_setter)
12717			    {
12718			      PATTERN (cc0_setter) = pc_rtx;
12719			      old_notes = REG_NOTES (cc0_setter);
12720			      REG_NOTES (cc0_setter) = NULL;
12721
12722			      distribute_notes (old_notes, cc0_setter,
12723						cc0_setter, NULL_RTX);
12724			      distribute_links (LOG_LINKS (cc0_setter));
12725
12726			      PUT_CODE (cc0_setter, NOTE);
12727			      NOTE_LINE_NUMBER (cc0_setter)
12728				= NOTE_INSN_DELETED;
12729			      NOTE_SOURCE_FILE (cc0_setter) = 0;
12730			    }
12731#endif
12732			}
12733		      /* If the register is both set and used here, put the
12734			 REG_DEAD note here, but place a REG_UNUSED note
12735			 here too unless there already is one.  */
12736		      else if (reg_referenced_p (XEXP (note, 0),
12737						 PATTERN (tem)))
12738			{
12739			  place = tem;
12740
12741			  if (! find_regno_note (tem, REG_UNUSED,
12742						 REGNO (XEXP (note, 0))))
12743			    REG_NOTES (tem)
12744			      = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0),
12745						   REG_NOTES (tem));
12746			}
12747		      else
12748			{
12749			  PUT_REG_NOTE_KIND (note, REG_UNUSED);
12750
12751			  /*  If there isn't already a REG_UNUSED note, put one
12752			      here.  */
12753			  if (! find_regno_note (tem, REG_UNUSED,
12754						 REGNO (XEXP (note, 0))))
12755			    place = tem;
12756			  break;
12757			}
12758		    }
12759		  else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12760			   || (GET_CODE (tem) == CALL_INSN
12761			       && find_reg_fusage (tem, USE, XEXP (note, 0))))
12762		    {
12763		      place = tem;
12764
12765		      /* If we are doing a 3->2 combination, and we have a
12766			 register which formerly died in i3 and was not used
12767			 by i2, which now no longer dies in i3 and is used in
12768			 i2 but does not die in i2, and place is between i2
12769			 and i3, then we may need to move a link from place to
12770			 i2.  */
12771		      if (i2 && INSN_UID (place) <= max_uid_cuid
12772			  && INSN_CUID (place) > INSN_CUID (i2)
12773			  && from_insn
12774			  && INSN_CUID (from_insn) > INSN_CUID (i2)
12775			  && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12776			{
12777			  rtx links = LOG_LINKS (place);
12778			  LOG_LINKS (place) = 0;
12779			  distribute_links (links);
12780			}
12781		      break;
12782		    }
12783
12784		  if (tem == BB_HEAD (bb))
12785		    break;
12786		}
12787
12788	      /* We haven't found an insn for the death note and it
12789		 is still a REG_DEAD note, but we have hit the beginning
12790		 of the block.  If the existing life info says the reg
12791		 was dead, there's nothing left to do.  Otherwise, we'll
12792		 need to do a global life update after combine.  */
12793	      if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12794		  && REGNO_REG_SET_P (bb->global_live_at_start,
12795				      REGNO (XEXP (note, 0))))
12796		SET_BIT (refresh_blocks, this_basic_block->index);
12797	    }
12798
12799	  /* If the register is set or already dead at PLACE, we needn't do
12800	     anything with this note if it is still a REG_DEAD note.
12801	     We can here if it is set at all, not if is it totally replace,
12802	     which is what `dead_or_set_p' checks, so also check for it being
12803	     set partially.  */
12804
12805	  if (place && REG_NOTE_KIND (note) == REG_DEAD)
12806	    {
12807	      unsigned int regno = REGNO (XEXP (note, 0));
12808
12809	      /* Similarly, if the instruction on which we want to place
12810		 the note is a noop, we'll need do a global live update
12811		 after we remove them in delete_noop_moves.  */
12812	      if (noop_move_p (place))
12813		SET_BIT (refresh_blocks, this_basic_block->index);
12814
12815	      if (dead_or_set_p (place, XEXP (note, 0))
12816		  || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12817		{
12818		  /* Unless the register previously died in PLACE, clear
12819		     reg_last_death.  [I no longer understand why this is
12820		     being done.] */
12821		  if (reg_last_death[regno] != place)
12822		    reg_last_death[regno] = 0;
12823		  place = 0;
12824		}
12825	      else
12826		reg_last_death[regno] = place;
12827
12828	      /* If this is a death note for a hard reg that is occupying
12829		 multiple registers, ensure that we are still using all
12830		 parts of the object.  If we find a piece of the object
12831		 that is unused, we must arrange for an appropriate REG_DEAD
12832		 note to be added for it.  However, we can't just emit a USE
12833		 and tag the note to it, since the register might actually
12834		 be dead; so we recourse, and the recursive call then finds
12835		 the previous insn that used this register.  */
12836
12837	      if (place && regno < FIRST_PSEUDO_REGISTER
12838		  && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
12839		{
12840		  unsigned int endregno
12841		    = regno + HARD_REGNO_NREGS (regno,
12842						GET_MODE (XEXP (note, 0)));
12843		  int all_used = 1;
12844		  unsigned int i;
12845
12846		  for (i = regno; i < endregno; i++)
12847		    if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12848			 && ! find_regno_fusage (place, USE, i))
12849			|| dead_or_set_regno_p (place, i))
12850		      all_used = 0;
12851
12852		  if (! all_used)
12853		    {
12854		      /* Put only REG_DEAD notes for pieces that are
12855			 not already dead or set.  */
12856
12857		      for (i = regno; i < endregno;
12858			   i += HARD_REGNO_NREGS (i, reg_raw_mode[i]))
12859			{
12860			  rtx piece = regno_reg_rtx[i];
12861			  basic_block bb = this_basic_block;
12862
12863			  if (! dead_or_set_p (place, piece)
12864			      && ! reg_bitfield_target_p (piece,
12865							  PATTERN (place)))
12866			    {
12867			      rtx new_note
12868				= gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
12869
12870			      distribute_notes (new_note, place, place,
12871						NULL_RTX);
12872			    }
12873			  else if (! refers_to_regno_p (i, i + 1,
12874							PATTERN (place), 0)
12875				   && ! find_regno_fusage (place, USE, i))
12876			    for (tem = PREV_INSN (place); ;
12877				 tem = PREV_INSN (tem))
12878			      {
12879				if (! INSN_P (tem))
12880				  {
12881				    if (tem == BB_HEAD (bb))
12882				      {
12883					SET_BIT (refresh_blocks,
12884						 this_basic_block->index);
12885					break;
12886				      }
12887				    continue;
12888				  }
12889				if (dead_or_set_p (tem, piece)
12890				    || reg_bitfield_target_p (piece,
12891							      PATTERN (tem)))
12892				  {
12893				    REG_NOTES (tem)
12894				      = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
12895							   REG_NOTES (tem));
12896				    break;
12897				  }
12898			      }
12899
12900			}
12901
12902		      place = 0;
12903		    }
12904		}
12905	    }
12906	  break;
12907
12908	default:
12909	  /* Any other notes should not be present at this point in the
12910	     compilation.  */
12911	  abort ();
12912	}
12913
12914      if (place)
12915	{
12916	  XEXP (note, 1) = REG_NOTES (place);
12917	  REG_NOTES (place) = note;
12918	}
12919      else if ((REG_NOTE_KIND (note) == REG_DEAD
12920		|| REG_NOTE_KIND (note) == REG_UNUSED)
12921	       && GET_CODE (XEXP (note, 0)) == REG)
12922	REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
12923
12924      if (place2)
12925	{
12926	  if ((REG_NOTE_KIND (note) == REG_DEAD
12927	       || REG_NOTE_KIND (note) == REG_UNUSED)
12928	      && GET_CODE (XEXP (note, 0)) == REG)
12929	    REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
12930
12931	  REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12932					       REG_NOTE_KIND (note),
12933					       XEXP (note, 0),
12934					       REG_NOTES (place2));
12935	}
12936    }
12937}
12938
12939/* Similarly to above, distribute the LOG_LINKS that used to be present on
12940   I3, I2, and I1 to new locations.  This is also called to add a link
12941   pointing at I3 when I3's destination is changed.  */
12942
12943static void
12944distribute_links (rtx links)
12945{
12946  rtx link, next_link;
12947
12948  for (link = links; link; link = next_link)
12949    {
12950      rtx place = 0;
12951      rtx insn;
12952      rtx set, reg;
12953
12954      next_link = XEXP (link, 1);
12955
12956      /* If the insn that this link points to is a NOTE or isn't a single
12957	 set, ignore it.  In the latter case, it isn't clear what we
12958	 can do other than ignore the link, since we can't tell which
12959	 register it was for.  Such links wouldn't be used by combine
12960	 anyway.
12961
12962	 It is not possible for the destination of the target of the link to
12963	 have been changed by combine.  The only potential of this is if we
12964	 replace I3, I2, and I1 by I3 and I2.  But in that case the
12965	 destination of I2 also remains unchanged.  */
12966
12967      if (GET_CODE (XEXP (link, 0)) == NOTE
12968	  || (set = single_set (XEXP (link, 0))) == 0)
12969	continue;
12970
12971      reg = SET_DEST (set);
12972      while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12973	     || GET_CODE (reg) == SIGN_EXTRACT
12974	     || GET_CODE (reg) == STRICT_LOW_PART)
12975	reg = XEXP (reg, 0);
12976
12977      /* A LOG_LINK is defined as being placed on the first insn that uses
12978	 a register and points to the insn that sets the register.  Start
12979	 searching at the next insn after the target of the link and stop
12980	 when we reach a set of the register or the end of the basic block.
12981
12982	 Note that this correctly handles the link that used to point from
12983	 I3 to I2.  Also note that not much searching is typically done here
12984	 since most links don't point very far away.  */
12985
12986      for (insn = NEXT_INSN (XEXP (link, 0));
12987	   (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
12988		     || BB_HEAD (this_basic_block->next_bb) != insn));
12989	   insn = NEXT_INSN (insn))
12990	if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
12991	  {
12992	    if (reg_referenced_p (reg, PATTERN (insn)))
12993	      place = insn;
12994	    break;
12995	  }
12996	else if (GET_CODE (insn) == CALL_INSN
12997		 && find_reg_fusage (insn, USE, reg))
12998	  {
12999	    place = insn;
13000	    break;
13001	  }
13002	else if (INSN_P (insn) && reg_set_p (reg, insn))
13003	  break;
13004
13005      /* If we found a place to put the link, place it there unless there
13006	 is already a link to the same insn as LINK at that point.  */
13007
13008      if (place)
13009	{
13010	  rtx link2;
13011
13012	  for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
13013	    if (XEXP (link2, 0) == XEXP (link, 0))
13014	      break;
13015
13016	  if (link2 == 0)
13017	    {
13018	      XEXP (link, 1) = LOG_LINKS (place);
13019	      LOG_LINKS (place) = link;
13020
13021	      /* Set added_links_insn to the earliest insn we added a
13022		 link to.  */
13023	      if (added_links_insn == 0
13024		  || INSN_CUID (added_links_insn) > INSN_CUID (place))
13025		added_links_insn = place;
13026	    }
13027	}
13028    }
13029}
13030
13031/* Compute INSN_CUID for INSN, which is an insn made by combine.  */
13032
13033static int
13034insn_cuid (rtx insn)
13035{
13036  while (insn != 0 && INSN_UID (insn) > max_uid_cuid
13037	 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
13038    insn = NEXT_INSN (insn);
13039
13040  if (INSN_UID (insn) > max_uid_cuid)
13041    abort ();
13042
13043  return INSN_CUID (insn);
13044}
13045
13046void
13047dump_combine_stats (FILE *file)
13048{
13049  fnotice
13050    (file,
13051     ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
13052     combine_attempts, combine_merges, combine_extras, combine_successes);
13053}
13054
13055void
13056dump_combine_total_stats (FILE *file)
13057{
13058  fnotice
13059    (file,
13060     "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
13061     total_attempts, total_merges, total_extras, total_successes);
13062}
13063