combine.c revision 146895
1/* Optimize by combining instructions for GNU compiler.
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23   Portable Optimizer, but redone to work on our list-structured
24   representation for RTL instead of their string representation.
25
26   The LOG_LINKS of each insn identify the most recent assignment
27   to each REG used in the insn.  It is a list of previous insns,
28   each of which contains a SET for a REG that is used in this insn
29   and not used or set in between.  LOG_LINKs never cross basic blocks.
30   They were set up by the preceding pass (lifetime analysis).
31
32   We try to combine each pair of insns joined by a logical link.
33   We also try to combine triples of insns A, B and C when
34   C has a link back to B and B has a link back to A.
35
36   LOG_LINKS does not have links for use of the CC0.  They don't
37   need to, because the insn that sets the CC0 is always immediately
38   before the insn that tests it.  So we always regard a branch
39   insn as having a logical link to the preceding insn.  The same is true
40   for an insn explicitly using CC0.
41
42   We check (with use_crosses_set_p) to avoid combining in such a way
43   as to move a computation to a place where its value would be different.
44
45   Combination is done by mathematically substituting the previous
46   insn(s) values for the regs they set into the expressions in
47   the later insns that refer to these regs.  If the result is a valid insn
48   for our target machine, according to the machine description,
49   we install it, delete the earlier insns, and update the data flow
50   information (LOG_LINKS and REG_NOTES) for what we did.
51
52   There are a few exceptions where the dataflow information created by
53   flow.c aren't completely updated:
54
55   - reg_live_length is not updated
56   - a LOG_LINKS entry that refers to an insn with multiple SETs may be
57     removed because there is no way to know which register it was
58     linking
59
60   To simplify substitution, we combine only when the earlier insn(s)
61   consist of only a single assignment.  To simplify updating afterward,
62   we never combine when a subroutine call appears in the middle.
63
64   Since we do not represent assignments to CC0 explicitly except when that
65   is all an insn does, there is no LOG_LINKS entry in an insn that uses
66   the condition code for the insn that set the condition code.
67   Fortunately, these two insns must be consecutive.
68   Therefore, every JUMP_INSN is taken to have an implicit logical link
69   to the preceding insn.  This is not quite right, since non-jumps can
70   also use the condition code; but in practice such insns would not
71   combine anyway.  */
72
73#include "config.h"
74#include "system.h"
75#include "coretypes.h"
76#include "tm.h"
77#include "rtl.h"
78#include "tree.h"
79#include "tm_p.h"
80#include "flags.h"
81#include "regs.h"
82#include "hard-reg-set.h"
83#include "basic-block.h"
84#include "insn-config.h"
85#include "function.h"
86/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
87#include "expr.h"
88#include "insn-attr.h"
89#include "recog.h"
90#include "real.h"
91#include "toplev.h"
92#include "target.h"
93#include "params.h"
94
95#ifndef SHIFT_COUNT_TRUNCATED
96#define SHIFT_COUNT_TRUNCATED 0
97#endif
98
99/* It is not safe to use ordinary gen_lowpart in combine.
100   Use gen_lowpart_for_combine instead.  See comments there.  */
101#define gen_lowpart dont_use_gen_lowpart_you_dummy
102
103/* Number of attempts to combine instructions in this function.  */
104
105static int combine_attempts;
106
107/* Number of attempts that got as far as substitution in this function.  */
108
109static int combine_merges;
110
111/* Number of instructions combined with added SETs in this function.  */
112
113static int combine_extras;
114
115/* Number of instructions combined in this function.  */
116
117static int combine_successes;
118
119/* Totals over entire compilation.  */
120
121static int total_attempts, total_merges, total_extras, total_successes;
122
123
124/* Vector mapping INSN_UIDs to cuids.
125   The cuids are like uids but increase monotonically always.
126   Combine always uses cuids so that it can compare them.
127   But actually renumbering the uids, which we used to do,
128   proves to be a bad idea because it makes it hard to compare
129   the dumps produced by earlier passes with those from later passes.  */
130
131static int *uid_cuid;
132static int max_uid_cuid;
133
134/* Get the cuid of an insn.  */
135
136#define INSN_CUID(INSN) \
137(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
138
139/* In case BITS_PER_WORD == HOST_BITS_PER_WIDE_INT, shifting by
140   BITS_PER_WORD would invoke undefined behavior.  Work around it.  */
141
142#define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \
143  (((unsigned HOST_WIDE_INT) (val) << (BITS_PER_WORD - 1)) << 1)
144
145#define nonzero_bits(X, M) \
146  cached_nonzero_bits (X, M, NULL_RTX, VOIDmode, 0)
147
148#define num_sign_bit_copies(X, M) \
149  cached_num_sign_bit_copies (X, M, NULL_RTX, VOIDmode, 0)
150
151/* Maximum register number, which is the size of the tables below.  */
152
153static unsigned int combine_max_regno;
154
155/* Record last point of death of (hard or pseudo) register n.  */
156
157static rtx *reg_last_death;
158
159/* Record last point of modification of (hard or pseudo) register n.  */
160
161static rtx *reg_last_set;
162
163/* Record the cuid of the last insn that invalidated memory
164   (anything that writes memory, and subroutine calls, but not pushes).  */
165
166static int mem_last_set;
167
168/* Record the cuid of the last CALL_INSN
169   so we can tell whether a potential combination crosses any calls.  */
170
171static int last_call_cuid;
172
173/* When `subst' is called, this is the insn that is being modified
174   (by combining in a previous insn).  The PATTERN of this insn
175   is still the old pattern partially modified and it should not be
176   looked at, but this may be used to examine the successors of the insn
177   to judge whether a simplification is valid.  */
178
179static rtx subst_insn;
180
181/* This is the lowest CUID that `subst' is currently dealing with.
182   get_last_value will not return a value if the register was set at or
183   after this CUID.  If not for this mechanism, we could get confused if
184   I2 or I1 in try_combine were an insn that used the old value of a register
185   to obtain a new value.  In that case, we might erroneously get the
186   new value of the register when we wanted the old one.  */
187
188static int subst_low_cuid;
189
190/* This contains any hard registers that are used in newpat; reg_dead_at_p
191   must consider all these registers to be always live.  */
192
193static HARD_REG_SET newpat_used_regs;
194
195/* This is an insn to which a LOG_LINKS entry has been added.  If this
196   insn is the earlier than I2 or I3, combine should rescan starting at
197   that location.  */
198
199static rtx added_links_insn;
200
201/* Basic block in which we are performing combines.  */
202static basic_block this_basic_block;
203
204/* A bitmap indicating which blocks had registers go dead at entry.
205   After combine, we'll need to re-do global life analysis with
206   those blocks as starting points.  */
207static sbitmap refresh_blocks;
208
209/* The next group of arrays allows the recording of the last value assigned
210   to (hard or pseudo) register n.  We use this information to see if an
211   operation being processed is redundant given a prior operation performed
212   on the register.  For example, an `and' with a constant is redundant if
213   all the zero bits are already known to be turned off.
214
215   We use an approach similar to that used by cse, but change it in the
216   following ways:
217
218   (1) We do not want to reinitialize at each label.
219   (2) It is useful, but not critical, to know the actual value assigned
220       to a register.  Often just its form is helpful.
221
222   Therefore, we maintain the following arrays:
223
224   reg_last_set_value		the last value assigned
225   reg_last_set_label		records the value of label_tick when the
226				register was assigned
227   reg_last_set_table_tick	records the value of label_tick when a
228				value using the register is assigned
229   reg_last_set_invalid		set to nonzero when it is not valid
230				to use the value of this register in some
231				register's value
232
233   To understand the usage of these tables, it is important to understand
234   the distinction between the value in reg_last_set_value being valid
235   and the register being validly contained in some other expression in the
236   table.
237
238   Entry I in reg_last_set_value is valid if it is nonzero, and either
239   reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
240
241   Register I may validly appear in any expression returned for the value
242   of another register if reg_n_sets[i] is 1.  It may also appear in the
243   value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
244   reg_last_set_invalid[j] is zero.
245
246   If an expression is found in the table containing a register which may
247   not validly appear in an expression, the register is replaced by
248   something that won't match, (clobber (const_int 0)).
249
250   reg_last_set_invalid[i] is set nonzero when register I is being assigned
251   to and reg_last_set_table_tick[i] == label_tick.  */
252
253/* Record last value assigned to (hard or pseudo) register n.  */
254
255static rtx *reg_last_set_value;
256
257/* Record the value of label_tick when the value for register n is placed in
258   reg_last_set_value[n].  */
259
260static int *reg_last_set_label;
261
262/* Record the value of label_tick when an expression involving register n
263   is placed in reg_last_set_value.  */
264
265static int *reg_last_set_table_tick;
266
267/* Set nonzero if references to register n in expressions should not be
268   used.  */
269
270static char *reg_last_set_invalid;
271
272/* Incremented for each label.  */
273
274static int label_tick;
275
276/* Some registers that are set more than once and used in more than one
277   basic block are nevertheless always set in similar ways.  For example,
278   a QImode register may be loaded from memory in two places on a machine
279   where byte loads zero extend.
280
281   We record in the following array what we know about the nonzero
282   bits of a register, specifically which bits are known to be zero.
283
284   If an entry is zero, it means that we don't know anything special.  */
285
286static unsigned HOST_WIDE_INT *reg_nonzero_bits;
287
288/* Mode used to compute significance in reg_nonzero_bits.  It is the largest
289   integer mode that can fit in HOST_BITS_PER_WIDE_INT.  */
290
291static enum machine_mode nonzero_bits_mode;
292
293/* Nonzero if we know that a register has some leading bits that are always
294   equal to the sign bit.  */
295
296static unsigned char *reg_sign_bit_copies;
297
298/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
299   It is zero while computing them and after combine has completed.  This
300   former test prevents propagating values based on previously set values,
301   which can be incorrect if a variable is modified in a loop.  */
302
303static int nonzero_sign_valid;
304
305/* These arrays are maintained in parallel with reg_last_set_value
306   and are used to store the mode in which the register was last set,
307   the bits that were known to be zero when it was last set, and the
308   number of sign bits copies it was known to have when it was last set.  */
309
310static enum machine_mode *reg_last_set_mode;
311static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
312static char *reg_last_set_sign_bit_copies;
313
314/* Record one modification to rtl structure
315   to be undone by storing old_contents into *where.
316   is_int is 1 if the contents are an int.  */
317
318struct undo
319{
320  struct undo *next;
321  int is_int;
322  union {rtx r; int i;} old_contents;
323  union {rtx *r; int *i;} where;
324};
325
326/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
327   num_undo says how many are currently recorded.
328
329   other_insn is nonzero if we have modified some other insn in the process
330   of working on subst_insn.  It must be verified too.  */
331
332struct undobuf
333{
334  struct undo *undos;
335  struct undo *frees;
336  rtx other_insn;
337};
338
339static struct undobuf undobuf;
340
341/* Number of times the pseudo being substituted for
342   was found and replaced.  */
343
344static int n_occurrences;
345
346static void do_SUBST (rtx *, rtx);
347static void do_SUBST_INT (int *, int);
348static void init_reg_last_arrays (void);
349static void setup_incoming_promotions (void);
350static void set_nonzero_bits_and_sign_copies (rtx, rtx, void *);
351static int cant_combine_insn_p (rtx);
352static int can_combine_p (rtx, rtx, rtx, rtx, rtx *, rtx *);
353static int combinable_i3pat (rtx, rtx *, rtx, rtx, int, rtx *);
354static int contains_muldiv (rtx);
355static rtx try_combine (rtx, rtx, rtx, int *);
356static void undo_all (void);
357static void undo_commit (void);
358static rtx *find_split_point (rtx *, rtx);
359static rtx subst (rtx, rtx, rtx, int, int);
360static rtx combine_simplify_rtx (rtx, enum machine_mode, int, int);
361static rtx simplify_if_then_else (rtx);
362static rtx simplify_set (rtx);
363static rtx simplify_logical (rtx, int);
364static rtx expand_compound_operation (rtx);
365static rtx expand_field_assignment (rtx);
366static rtx make_extraction (enum machine_mode, rtx, HOST_WIDE_INT,
367			    rtx, unsigned HOST_WIDE_INT, int, int, int);
368static rtx extract_left_shift (rtx, int);
369static rtx make_compound_operation (rtx, enum rtx_code);
370static int get_pos_from_mask (unsigned HOST_WIDE_INT,
371			      unsigned HOST_WIDE_INT *);
372static rtx force_to_mode (rtx, enum machine_mode,
373			  unsigned HOST_WIDE_INT, rtx, int);
374static rtx if_then_else_cond (rtx, rtx *, rtx *);
375static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
376static int rtx_equal_for_field_assignment_p (rtx, rtx);
377static rtx make_field_assignment (rtx);
378static rtx apply_distributive_law (rtx);
379static rtx simplify_and_const_int (rtx, enum machine_mode, rtx,
380				   unsigned HOST_WIDE_INT);
381static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
382						   rtx, enum machine_mode,
383						   unsigned HOST_WIDE_INT);
384static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
385					     enum machine_mode,
386					     unsigned HOST_WIDE_INT);
387static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
388						enum machine_mode,
389						unsigned int);
390static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
391					  enum machine_mode, unsigned int);
392static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
393			    HOST_WIDE_INT, enum machine_mode, int *);
394static rtx simplify_shift_const	(rtx, enum rtx_code, enum machine_mode, rtx,
395				 int);
396static int recog_for_combine (rtx *, rtx, rtx *);
397static rtx gen_lowpart_for_combine (enum machine_mode, rtx);
398static rtx gen_binary (enum rtx_code, enum machine_mode, rtx, rtx);
399static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
400static void update_table_tick (rtx);
401static void record_value_for_reg (rtx, rtx, rtx);
402static void check_promoted_subreg (rtx, rtx);
403static void record_dead_and_set_regs_1 (rtx, rtx, void *);
404static void record_dead_and_set_regs (rtx);
405static int get_last_value_validate (rtx *, rtx, int, int);
406static rtx get_last_value (rtx);
407static int use_crosses_set_p (rtx, int);
408static void reg_dead_at_p_1 (rtx, rtx, void *);
409static int reg_dead_at_p (rtx, rtx);
410static void move_deaths (rtx, rtx, int, rtx, rtx *);
411static int reg_bitfield_target_p (rtx, rtx);
412static void distribute_notes (rtx, rtx, rtx, rtx);
413static void distribute_links (rtx);
414static void mark_used_regs_combine (rtx);
415static int insn_cuid (rtx);
416static void record_promoted_value (rtx, rtx);
417static rtx reversed_comparison (rtx, enum machine_mode, rtx, rtx);
418static enum rtx_code combine_reversed_comparison_code (rtx);
419
420/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
421   insn.  The substitution can be undone by undo_all.  If INTO is already
422   set to NEWVAL, do not record this change.  Because computing NEWVAL might
423   also call SUBST, we have to compute it before we put anything into
424   the undo table.  */
425
426static void
427do_SUBST (rtx *into, rtx newval)
428{
429  struct undo *buf;
430  rtx oldval = *into;
431
432  if (oldval == newval)
433    return;
434
435  /* We'd like to catch as many invalid transformations here as
436     possible.  Unfortunately, there are way too many mode changes
437     that are perfectly valid, so we'd waste too much effort for
438     little gain doing the checks here.  Focus on catching invalid
439     transformations involving integer constants.  */
440  if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
441      && GET_CODE (newval) == CONST_INT)
442    {
443      /* Sanity check that we're replacing oldval with a CONST_INT
444	 that is a valid sign-extension for the original mode.  */
445      if (INTVAL (newval) != trunc_int_for_mode (INTVAL (newval),
446						 GET_MODE (oldval)))
447	abort ();
448
449      /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
450	 CONST_INT is not valid, because after the replacement, the
451	 original mode would be gone.  Unfortunately, we can't tell
452	 when do_SUBST is called to replace the operand thereof, so we
453	 perform this test on oldval instead, checking whether an
454	 invalid replacement took place before we got here.  */
455      if ((GET_CODE (oldval) == SUBREG
456	   && GET_CODE (SUBREG_REG (oldval)) == CONST_INT)
457	  || (GET_CODE (oldval) == ZERO_EXTEND
458	      && GET_CODE (XEXP (oldval, 0)) == CONST_INT))
459	abort ();
460    }
461
462  if (undobuf.frees)
463    buf = undobuf.frees, undobuf.frees = buf->next;
464  else
465    buf = xmalloc (sizeof (struct undo));
466
467  buf->is_int = 0;
468  buf->where.r = into;
469  buf->old_contents.r = oldval;
470  *into = newval;
471
472  buf->next = undobuf.undos, undobuf.undos = buf;
473}
474
475#define SUBST(INTO, NEWVAL)	do_SUBST(&(INTO), (NEWVAL))
476
477/* Similar to SUBST, but NEWVAL is an int expression.  Note that substitution
478   for the value of a HOST_WIDE_INT value (including CONST_INT) is
479   not safe.  */
480
481static void
482do_SUBST_INT (int *into, int newval)
483{
484  struct undo *buf;
485  int oldval = *into;
486
487  if (oldval == newval)
488    return;
489
490  if (undobuf.frees)
491    buf = undobuf.frees, undobuf.frees = buf->next;
492  else
493    buf = xmalloc (sizeof (struct undo));
494
495  buf->is_int = 1;
496  buf->where.i = into;
497  buf->old_contents.i = oldval;
498  *into = newval;
499
500  buf->next = undobuf.undos, undobuf.undos = buf;
501}
502
503#define SUBST_INT(INTO, NEWVAL)  do_SUBST_INT(&(INTO), (NEWVAL))
504
505/* Main entry point for combiner.  F is the first insn of the function.
506   NREGS is the first unused pseudo-reg number.
507
508   Return nonzero if the combiner has turned an indirect jump
509   instruction into a direct jump.  */
510int
511combine_instructions (rtx f, unsigned int nregs)
512{
513  rtx insn, next;
514#ifdef HAVE_cc0
515  rtx prev;
516#endif
517  int i;
518  rtx links, nextlinks;
519
520  int new_direct_jump_p = 0;
521
522  combine_attempts = 0;
523  combine_merges = 0;
524  combine_extras = 0;
525  combine_successes = 0;
526
527  combine_max_regno = nregs;
528
529  reg_nonzero_bits = xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT));
530  reg_sign_bit_copies = xcalloc (nregs, sizeof (unsigned char));
531
532  reg_last_death = xmalloc (nregs * sizeof (rtx));
533  reg_last_set = xmalloc (nregs * sizeof (rtx));
534  reg_last_set_value = xmalloc (nregs * sizeof (rtx));
535  reg_last_set_table_tick = xmalloc (nregs * sizeof (int));
536  reg_last_set_label = xmalloc (nregs * sizeof (int));
537  reg_last_set_invalid = xmalloc (nregs * sizeof (char));
538  reg_last_set_mode = xmalloc (nregs * sizeof (enum machine_mode));
539  reg_last_set_nonzero_bits = xmalloc (nregs * sizeof (HOST_WIDE_INT));
540  reg_last_set_sign_bit_copies = xmalloc (nregs * sizeof (char));
541
542  init_reg_last_arrays ();
543
544  init_recog_no_volatile ();
545
546  /* Compute maximum uid value so uid_cuid can be allocated.  */
547
548  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
549    if (INSN_UID (insn) > i)
550      i = INSN_UID (insn);
551
552  uid_cuid = xmalloc ((i + 1) * sizeof (int));
553  max_uid_cuid = i;
554
555  nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
556
557  /* Don't use reg_nonzero_bits when computing it.  This can cause problems
558     when, for example, we have j <<= 1 in a loop.  */
559
560  nonzero_sign_valid = 0;
561
562  /* Compute the mapping from uids to cuids.
563     Cuids are numbers assigned to insns, like uids,
564     except that cuids increase monotonically through the code.
565
566     Scan all SETs and see if we can deduce anything about what
567     bits are known to be zero for some registers and how many copies
568     of the sign bit are known to exist for those registers.
569
570     Also set any known values so that we can use it while searching
571     for what bits are known to be set.  */
572
573  label_tick = 1;
574
575  setup_incoming_promotions ();
576
577  refresh_blocks = sbitmap_alloc (last_basic_block);
578  sbitmap_zero (refresh_blocks);
579
580  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
581    {
582      uid_cuid[INSN_UID (insn)] = ++i;
583      subst_low_cuid = i;
584      subst_insn = insn;
585
586      if (INSN_P (insn))
587	{
588	  note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
589		       NULL);
590	  record_dead_and_set_regs (insn);
591
592#ifdef AUTO_INC_DEC
593	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
594	    if (REG_NOTE_KIND (links) == REG_INC)
595	      set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
596						NULL);
597#endif
598	}
599
600      if (GET_CODE (insn) == CODE_LABEL)
601	label_tick++;
602    }
603
604  nonzero_sign_valid = 1;
605
606  /* Now scan all the insns in forward order.  */
607
608  label_tick = 1;
609  last_call_cuid = 0;
610  mem_last_set = 0;
611  init_reg_last_arrays ();
612  setup_incoming_promotions ();
613
614  FOR_EACH_BB (this_basic_block)
615    {
616      for (insn = BB_HEAD (this_basic_block);
617           insn != NEXT_INSN (BB_END (this_basic_block));
618	   insn = next ? next : NEXT_INSN (insn))
619	{
620	  next = 0;
621
622	  if (GET_CODE (insn) == CODE_LABEL)
623	    label_tick++;
624
625	  else if (INSN_P (insn))
626	    {
627	      /* See if we know about function return values before this
628		 insn based upon SUBREG flags.  */
629	      check_promoted_subreg (insn, PATTERN (insn));
630
631	      /* Try this insn with each insn it links back to.  */
632
633	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
634		if ((next = try_combine (insn, XEXP (links, 0),
635					 NULL_RTX, &new_direct_jump_p)) != 0)
636		  goto retry;
637
638	      /* Try each sequence of three linked insns ending with this one.  */
639
640	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
641		{
642		  rtx link = XEXP (links, 0);
643
644		  /* If the linked insn has been replaced by a note, then there
645		     is no point in pursuing this chain any further.  */
646		  if (GET_CODE (link) == NOTE)
647		    continue;
648
649		  for (nextlinks = LOG_LINKS (link);
650		       nextlinks;
651		       nextlinks = XEXP (nextlinks, 1))
652		    if ((next = try_combine (insn, link,
653					     XEXP (nextlinks, 0),
654					     &new_direct_jump_p)) != 0)
655		      goto retry;
656		}
657
658#ifdef HAVE_cc0
659	      /* Try to combine a jump insn that uses CC0
660		 with a preceding insn that sets CC0, and maybe with its
661		 logical predecessor as well.
662		 This is how we make decrement-and-branch insns.
663		 We need this special code because data flow connections
664		 via CC0 do not get entered in LOG_LINKS.  */
665
666	      if (GET_CODE (insn) == JUMP_INSN
667		  && (prev = prev_nonnote_insn (insn)) != 0
668		  && GET_CODE (prev) == INSN
669		  && sets_cc0_p (PATTERN (prev)))
670		{
671		  if ((next = try_combine (insn, prev,
672					   NULL_RTX, &new_direct_jump_p)) != 0)
673		    goto retry;
674
675		  for (nextlinks = LOG_LINKS (prev); nextlinks;
676		       nextlinks = XEXP (nextlinks, 1))
677		    if ((next = try_combine (insn, prev,
678					     XEXP (nextlinks, 0),
679					     &new_direct_jump_p)) != 0)
680		      goto retry;
681		}
682
683	      /* Do the same for an insn that explicitly references CC0.  */
684	      if (GET_CODE (insn) == INSN
685		  && (prev = prev_nonnote_insn (insn)) != 0
686		  && GET_CODE (prev) == INSN
687		  && sets_cc0_p (PATTERN (prev))
688		  && GET_CODE (PATTERN (insn)) == SET
689		  && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
690		{
691		  if ((next = try_combine (insn, prev,
692					   NULL_RTX, &new_direct_jump_p)) != 0)
693		    goto retry;
694
695		  for (nextlinks = LOG_LINKS (prev); nextlinks;
696		       nextlinks = XEXP (nextlinks, 1))
697		    if ((next = try_combine (insn, prev,
698					     XEXP (nextlinks, 0),
699					     &new_direct_jump_p)) != 0)
700		      goto retry;
701		}
702
703	      /* Finally, see if any of the insns that this insn links to
704		 explicitly references CC0.  If so, try this insn, that insn,
705		 and its predecessor if it sets CC0.  */
706	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
707		if (GET_CODE (XEXP (links, 0)) == INSN
708		    && GET_CODE (PATTERN (XEXP (links, 0))) == SET
709		    && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
710		    && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
711		    && GET_CODE (prev) == INSN
712		    && sets_cc0_p (PATTERN (prev))
713		    && (next = try_combine (insn, XEXP (links, 0),
714					    prev, &new_direct_jump_p)) != 0)
715		  goto retry;
716#endif
717
718	      /* Try combining an insn with two different insns whose results it
719		 uses.  */
720	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
721		for (nextlinks = XEXP (links, 1); nextlinks;
722		     nextlinks = XEXP (nextlinks, 1))
723		  if ((next = try_combine (insn, XEXP (links, 0),
724					   XEXP (nextlinks, 0),
725					   &new_direct_jump_p)) != 0)
726		    goto retry;
727
728	      if (GET_CODE (insn) != NOTE)
729		record_dead_and_set_regs (insn);
730
731	    retry:
732	      ;
733	    }
734	}
735    }
736  clear_bb_flags ();
737
738  EXECUTE_IF_SET_IN_SBITMAP (refresh_blocks, 0, i,
739			     BASIC_BLOCK (i)->flags |= BB_DIRTY);
740  new_direct_jump_p |= purge_all_dead_edges (0);
741  delete_noop_moves (f);
742
743  update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
744				    PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
745				    | PROP_KILL_DEAD_CODE);
746
747  /* Clean up.  */
748  sbitmap_free (refresh_blocks);
749  free (reg_nonzero_bits);
750  free (reg_sign_bit_copies);
751  free (reg_last_death);
752  free (reg_last_set);
753  free (reg_last_set_value);
754  free (reg_last_set_table_tick);
755  free (reg_last_set_label);
756  free (reg_last_set_invalid);
757  free (reg_last_set_mode);
758  free (reg_last_set_nonzero_bits);
759  free (reg_last_set_sign_bit_copies);
760  free (uid_cuid);
761
762  {
763    struct undo *undo, *next;
764    for (undo = undobuf.frees; undo; undo = next)
765      {
766	next = undo->next;
767	free (undo);
768      }
769    undobuf.frees = 0;
770  }
771
772  total_attempts += combine_attempts;
773  total_merges += combine_merges;
774  total_extras += combine_extras;
775  total_successes += combine_successes;
776
777  nonzero_sign_valid = 0;
778
779  /* Make recognizer allow volatile MEMs again.  */
780  init_recog ();
781
782  return new_direct_jump_p;
783}
784
785/* Wipe the reg_last_xxx arrays in preparation for another pass.  */
786
787static void
788init_reg_last_arrays (void)
789{
790  unsigned int nregs = combine_max_regno;
791
792  memset (reg_last_death, 0, nregs * sizeof (rtx));
793  memset (reg_last_set, 0, nregs * sizeof (rtx));
794  memset (reg_last_set_value, 0, nregs * sizeof (rtx));
795  memset (reg_last_set_table_tick, 0, nregs * sizeof (int));
796  memset (reg_last_set_label, 0, nregs * sizeof (int));
797  memset (reg_last_set_invalid, 0, nregs * sizeof (char));
798  memset (reg_last_set_mode, 0, nregs * sizeof (enum machine_mode));
799  memset (reg_last_set_nonzero_bits, 0, nregs * sizeof (HOST_WIDE_INT));
800  memset (reg_last_set_sign_bit_copies, 0, nregs * sizeof (char));
801}
802
803/* Set up any promoted values for incoming argument registers.  */
804
805static void
806setup_incoming_promotions (void)
807{
808  unsigned int regno;
809  rtx reg;
810  enum machine_mode mode;
811  int unsignedp;
812  rtx first = get_insns ();
813
814  if (targetm.calls.promote_function_args (TREE_TYPE (cfun->decl)))
815    {
816#ifndef OUTGOING_REGNO
817#define OUTGOING_REGNO(N) N
818#endif
819      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
820	/* Check whether this register can hold an incoming pointer
821	   argument.  FUNCTION_ARG_REGNO_P tests outgoing register
822	   numbers, so translate if necessary due to register windows.  */
823	if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
824	    && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
825	  {
826	    record_value_for_reg
827	      (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
828					   : SIGN_EXTEND),
829					  GET_MODE (reg),
830					  gen_rtx_CLOBBER (mode, const0_rtx)));
831	  }
832    }
833}
834
835/* Called via note_stores.  If X is a pseudo that is narrower than
836   HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
837
838   If we are setting only a portion of X and we can't figure out what
839   portion, assume all bits will be used since we don't know what will
840   be happening.
841
842   Similarly, set how many bits of X are known to be copies of the sign bit
843   at all locations in the function.  This is the smallest number implied
844   by any set of X.  */
845
846static void
847set_nonzero_bits_and_sign_copies (rtx x, rtx set,
848				  void *data ATTRIBUTE_UNUSED)
849{
850  unsigned int num;
851
852  if (GET_CODE (x) == REG
853      && REGNO (x) >= FIRST_PSEUDO_REGISTER
854      /* If this register is undefined at the start of the file, we can't
855	 say what its contents were.  */
856      && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, REGNO (x))
857      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
858    {
859      if (set == 0 || GET_CODE (set) == CLOBBER)
860	{
861	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
862	  reg_sign_bit_copies[REGNO (x)] = 1;
863	  return;
864	}
865
866      /* If this is a complex assignment, see if we can convert it into a
867	 simple assignment.  */
868      set = expand_field_assignment (set);
869
870      /* If this is a simple assignment, or we have a paradoxical SUBREG,
871	 set what we know about X.  */
872
873      if (SET_DEST (set) == x
874	  || (GET_CODE (SET_DEST (set)) == SUBREG
875	      && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
876		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
877	      && SUBREG_REG (SET_DEST (set)) == x))
878	{
879	  rtx src = SET_SRC (set);
880
881#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
882	  /* If X is narrower than a word and SRC is a non-negative
883	     constant that would appear negative in the mode of X,
884	     sign-extend it for use in reg_nonzero_bits because some
885	     machines (maybe most) will actually do the sign-extension
886	     and this is the conservative approach.
887
888	     ??? For 2.5, try to tighten up the MD files in this regard
889	     instead of this kludge.  */
890
891	  if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
892	      && GET_CODE (src) == CONST_INT
893	      && INTVAL (src) > 0
894	      && 0 != (INTVAL (src)
895		       & ((HOST_WIDE_INT) 1
896			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
897	    src = GEN_INT (INTVAL (src)
898			   | ((HOST_WIDE_INT) (-1)
899			      << GET_MODE_BITSIZE (GET_MODE (x))));
900#endif
901
902	  /* Don't call nonzero_bits if it cannot change anything.  */
903	  if (reg_nonzero_bits[REGNO (x)] != ~(unsigned HOST_WIDE_INT) 0)
904	    reg_nonzero_bits[REGNO (x)]
905	      |= nonzero_bits (src, nonzero_bits_mode);
906	  num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
907	  if (reg_sign_bit_copies[REGNO (x)] == 0
908	      || reg_sign_bit_copies[REGNO (x)] > num)
909	    reg_sign_bit_copies[REGNO (x)] = num;
910	}
911      else
912	{
913	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
914	  reg_sign_bit_copies[REGNO (x)] = 1;
915	}
916    }
917}
918
919/* See if INSN can be combined into I3.  PRED and SUCC are optionally
920   insns that were previously combined into I3 or that will be combined
921   into the merger of INSN and I3.
922
923   Return 0 if the combination is not allowed for any reason.
924
925   If the combination is allowed, *PDEST will be set to the single
926   destination of INSN and *PSRC to the single source, and this function
927   will return 1.  */
928
929static int
930can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
931	       rtx *pdest, rtx *psrc)
932{
933  int i;
934  rtx set = 0, src, dest;
935  rtx p;
936#ifdef AUTO_INC_DEC
937  rtx link;
938#endif
939  int all_adjacent = (succ ? (next_active_insn (insn) == succ
940			      && next_active_insn (succ) == i3)
941		      : next_active_insn (insn) == i3);
942
943  /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
944     or a PARALLEL consisting of such a SET and CLOBBERs.
945
946     If INSN has CLOBBER parallel parts, ignore them for our processing.
947     By definition, these happen during the execution of the insn.  When it
948     is merged with another insn, all bets are off.  If they are, in fact,
949     needed and aren't also supplied in I3, they may be added by
950     recog_for_combine.  Otherwise, it won't match.
951
952     We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
953     note.
954
955     Get the source and destination of INSN.  If more than one, can't
956     combine.  */
957
958  if (GET_CODE (PATTERN (insn)) == SET)
959    set = PATTERN (insn);
960  else if (GET_CODE (PATTERN (insn)) == PARALLEL
961	   && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
962    {
963      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
964	{
965	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
966	  rtx note;
967
968	  switch (GET_CODE (elt))
969	    {
970	    /* This is important to combine floating point insns
971	       for the SH4 port.  */
972	    case USE:
973	      /* Combining an isolated USE doesn't make sense.
974		 We depend here on combinable_i3pat to reject them.  */
975	      /* The code below this loop only verifies that the inputs of
976		 the SET in INSN do not change.  We call reg_set_between_p
977		 to verify that the REG in the USE does not change between
978		 I3 and INSN.
979		 If the USE in INSN was for a pseudo register, the matching
980		 insn pattern will likely match any register; combining this
981		 with any other USE would only be safe if we knew that the
982		 used registers have identical values, or if there was
983		 something to tell them apart, e.g. different modes.  For
984		 now, we forgo such complicated tests and simply disallow
985		 combining of USES of pseudo registers with any other USE.  */
986	      if (GET_CODE (XEXP (elt, 0)) == REG
987		  && GET_CODE (PATTERN (i3)) == PARALLEL)
988		{
989		  rtx i3pat = PATTERN (i3);
990		  int i = XVECLEN (i3pat, 0) - 1;
991		  unsigned int regno = REGNO (XEXP (elt, 0));
992
993		  do
994		    {
995		      rtx i3elt = XVECEXP (i3pat, 0, i);
996
997		      if (GET_CODE (i3elt) == USE
998			  && GET_CODE (XEXP (i3elt, 0)) == REG
999			  && (REGNO (XEXP (i3elt, 0)) == regno
1000			      ? reg_set_between_p (XEXP (elt, 0),
1001						   PREV_INSN (insn), i3)
1002			      : regno >= FIRST_PSEUDO_REGISTER))
1003			return 0;
1004		    }
1005		  while (--i >= 0);
1006		}
1007	      break;
1008
1009	      /* We can ignore CLOBBERs.  */
1010	    case CLOBBER:
1011	      break;
1012
1013	    case SET:
1014	      /* Ignore SETs whose result isn't used but not those that
1015		 have side-effects.  */
1016	      if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1017		  && (!(note = find_reg_note (insn, REG_EH_REGION, NULL_RTX))
1018		      || INTVAL (XEXP (note, 0)) <= 0)
1019		  && ! side_effects_p (elt))
1020		break;
1021
1022	      /* If we have already found a SET, this is a second one and
1023		 so we cannot combine with this insn.  */
1024	      if (set)
1025		return 0;
1026
1027	      set = elt;
1028	      break;
1029
1030	    default:
1031	      /* Anything else means we can't combine.  */
1032	      return 0;
1033	    }
1034	}
1035
1036      if (set == 0
1037	  /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1038	     so don't do anything with it.  */
1039	  || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1040	return 0;
1041    }
1042  else
1043    return 0;
1044
1045  if (set == 0)
1046    return 0;
1047
1048  set = expand_field_assignment (set);
1049  src = SET_SRC (set), dest = SET_DEST (set);
1050
1051  /* Don't eliminate a store in the stack pointer.  */
1052  if (dest == stack_pointer_rtx
1053      /* Don't combine with an insn that sets a register to itself if it has
1054	 a REG_EQUAL note.  This may be part of a REG_NO_CONFLICT sequence.  */
1055      || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1056      /* Can't merge an ASM_OPERANDS.  */
1057      || GET_CODE (src) == ASM_OPERANDS
1058      /* Can't merge a function call.  */
1059      || GET_CODE (src) == CALL
1060      /* Don't eliminate a function call argument.  */
1061      || (GET_CODE (i3) == CALL_INSN
1062	  && (find_reg_fusage (i3, USE, dest)
1063	      || (GET_CODE (dest) == REG
1064		  && REGNO (dest) < FIRST_PSEUDO_REGISTER
1065		  && global_regs[REGNO (dest)])))
1066      /* Don't substitute into an incremented register.  */
1067      || FIND_REG_INC_NOTE (i3, dest)
1068      || (succ && FIND_REG_INC_NOTE (succ, dest))
1069#if 0
1070      /* Don't combine the end of a libcall into anything.  */
1071      /* ??? This gives worse code, and appears to be unnecessary, since no
1072	 pass after flow uses REG_LIBCALL/REG_RETVAL notes.  Local-alloc does
1073	 use REG_RETVAL notes for noconflict blocks, but other code here
1074	 makes sure that those insns don't disappear.  */
1075      || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1076#endif
1077      /* Make sure that DEST is not used after SUCC but before I3.  */
1078      || (succ && ! all_adjacent
1079	  && reg_used_between_p (dest, succ, i3))
1080      /* Make sure that the value that is to be substituted for the register
1081	 does not use any registers whose values alter in between.  However,
1082	 If the insns are adjacent, a use can't cross a set even though we
1083	 think it might (this can happen for a sequence of insns each setting
1084	 the same destination; reg_last_set of that register might point to
1085	 a NOTE).  If INSN has a REG_EQUIV note, the register is always
1086	 equivalent to the memory so the substitution is valid even if there
1087	 are intervening stores.  Also, don't move a volatile asm or
1088	 UNSPEC_VOLATILE across any other insns.  */
1089      || (! all_adjacent
1090	  && (((GET_CODE (src) != MEM
1091		|| ! find_reg_note (insn, REG_EQUIV, src))
1092	       && use_crosses_set_p (src, INSN_CUID (insn)))
1093	      || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1094	      || GET_CODE (src) == UNSPEC_VOLATILE))
1095      /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1096	 better register allocation by not doing the combine.  */
1097      || find_reg_note (i3, REG_NO_CONFLICT, dest)
1098      || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1099      /* Don't combine across a CALL_INSN, because that would possibly
1100	 change whether the life span of some REGs crosses calls or not,
1101	 and it is a pain to update that information.
1102	 Exception: if source is a constant, moving it later can't hurt.
1103	 Accept that special case, because it helps -fforce-addr a lot.  */
1104      || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1105    return 0;
1106
1107  /* DEST must either be a REG or CC0.  */
1108  if (GET_CODE (dest) == REG)
1109    {
1110      /* If register alignment is being enforced for multi-word items in all
1111	 cases except for parameters, it is possible to have a register copy
1112	 insn referencing a hard register that is not allowed to contain the
1113	 mode being copied and which would not be valid as an operand of most
1114	 insns.  Eliminate this problem by not combining with such an insn.
1115
1116	 Also, on some machines we don't want to extend the life of a hard
1117	 register.  */
1118
1119      if (GET_CODE (src) == REG
1120	  && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1121	       && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1122	      /* Don't extend the life of a hard register unless it is
1123		 user variable (if we have few registers) or it can't
1124		 fit into the desired register (meaning something special
1125		 is going on).
1126		 Also avoid substituting a return register into I3, because
1127		 reload can't handle a conflict with constraints of other
1128		 inputs.  */
1129	      || (REGNO (src) < FIRST_PSEUDO_REGISTER
1130		  && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1131	return 0;
1132    }
1133  else if (GET_CODE (dest) != CC0)
1134    return 0;
1135
1136  /* Don't substitute for a register intended as a clobberable operand.
1137     Similarly, don't substitute an expression containing a register that
1138     will be clobbered in I3.  */
1139  if (GET_CODE (PATTERN (i3)) == PARALLEL)
1140    for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1141      if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
1142	  && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1143				       src)
1144	      || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
1145	return 0;
1146
1147  /* If INSN contains anything volatile, or is an `asm' (whether volatile
1148     or not), reject, unless nothing volatile comes between it and I3 */
1149
1150  if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1151    {
1152      /* Make sure succ doesn't contain a volatile reference.  */
1153      if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1154        return 0;
1155
1156      for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1157        if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1158	  return 0;
1159    }
1160
1161  /* If INSN is an asm, and DEST is a hard register, reject, since it has
1162     to be an explicit register variable, and was chosen for a reason.  */
1163
1164  if (GET_CODE (src) == ASM_OPERANDS
1165      && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1166    return 0;
1167
1168  /* If there are any volatile insns between INSN and I3, reject, because
1169     they might affect machine state.  */
1170
1171  for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1172    if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1173      return 0;
1174
1175  /* If INSN or I2 contains an autoincrement or autodecrement,
1176     make sure that register is not used between there and I3,
1177     and not already used in I3 either.
1178     Also insist that I3 not be a jump; if it were one
1179     and the incremented register were spilled, we would lose.  */
1180
1181#ifdef AUTO_INC_DEC
1182  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1183    if (REG_NOTE_KIND (link) == REG_INC
1184	&& (GET_CODE (i3) == JUMP_INSN
1185	    || reg_used_between_p (XEXP (link, 0), insn, i3)
1186	    || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1187      return 0;
1188#endif
1189
1190#ifdef HAVE_cc0
1191  /* Don't combine an insn that follows a CC0-setting insn.
1192     An insn that uses CC0 must not be separated from the one that sets it.
1193     We do, however, allow I2 to follow a CC0-setting insn if that insn
1194     is passed as I1; in that case it will be deleted also.
1195     We also allow combining in this case if all the insns are adjacent
1196     because that would leave the two CC0 insns adjacent as well.
1197     It would be more logical to test whether CC0 occurs inside I1 or I2,
1198     but that would be much slower, and this ought to be equivalent.  */
1199
1200  p = prev_nonnote_insn (insn);
1201  if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1202      && ! all_adjacent)
1203    return 0;
1204#endif
1205
1206  /* If we get here, we have passed all the tests and the combination is
1207     to be allowed.  */
1208
1209  *pdest = dest;
1210  *psrc = src;
1211
1212  return 1;
1213}
1214
1215/* LOC is the location within I3 that contains its pattern or the component
1216   of a PARALLEL of the pattern.  We validate that it is valid for combining.
1217
1218   One problem is if I3 modifies its output, as opposed to replacing it
1219   entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1220   so would produce an insn that is not equivalent to the original insns.
1221
1222   Consider:
1223
1224         (set (reg:DI 101) (reg:DI 100))
1225	 (set (subreg:SI (reg:DI 101) 0) <foo>)
1226
1227   This is NOT equivalent to:
1228
1229         (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1230		    (set (reg:DI 101) (reg:DI 100))])
1231
1232   Not only does this modify 100 (in which case it might still be valid
1233   if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1234
1235   We can also run into a problem if I2 sets a register that I1
1236   uses and I1 gets directly substituted into I3 (not via I2).  In that
1237   case, we would be getting the wrong value of I2DEST into I3, so we
1238   must reject the combination.  This case occurs when I2 and I1 both
1239   feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1240   If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
1241   of a SET must prevent combination from occurring.
1242
1243   Before doing the above check, we first try to expand a field assignment
1244   into a set of logical operations.
1245
1246   If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
1247   we place a register that is both set and used within I3.  If more than one
1248   such register is detected, we fail.
1249
1250   Return 1 if the combination is valid, zero otherwise.  */
1251
1252static int
1253combinable_i3pat (rtx i3, rtx *loc, rtx i2dest, rtx i1dest,
1254		  int i1_not_in_src, rtx *pi3dest_killed)
1255{
1256  rtx x = *loc;
1257
1258  if (GET_CODE (x) == SET)
1259    {
1260      rtx set = x ;
1261      rtx dest = SET_DEST (set);
1262      rtx src = SET_SRC (set);
1263      rtx inner_dest = dest;
1264
1265      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1266	     || GET_CODE (inner_dest) == SUBREG
1267	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
1268	inner_dest = XEXP (inner_dest, 0);
1269
1270      /* Check for the case where I3 modifies its output, as discussed
1271	 above.  We don't want to prevent pseudos from being combined
1272	 into the address of a MEM, so only prevent the combination if
1273	 i1 or i2 set the same MEM.  */
1274      if ((inner_dest != dest &&
1275	   (GET_CODE (inner_dest) != MEM
1276	    || rtx_equal_p (i2dest, inner_dest)
1277	    || (i1dest && rtx_equal_p (i1dest, inner_dest)))
1278	   && (reg_overlap_mentioned_p (i2dest, inner_dest)
1279	       || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1280
1281	  /* This is the same test done in can_combine_p except we can't test
1282	     all_adjacent; we don't have to, since this instruction will stay
1283	     in place, thus we are not considering increasing the lifetime of
1284	     INNER_DEST.
1285
1286	     Also, if this insn sets a function argument, combining it with
1287	     something that might need a spill could clobber a previous
1288	     function argument; the all_adjacent test in can_combine_p also
1289	     checks this; here, we do a more specific test for this case.  */
1290
1291	  || (GET_CODE (inner_dest) == REG
1292	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1293	      && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1294					GET_MODE (inner_dest))))
1295	  || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1296	return 0;
1297
1298      /* If DEST is used in I3, it is being killed in this insn,
1299	 so record that for later.
1300	 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1301	 STACK_POINTER_REGNUM, since these are always considered to be
1302	 live.  Similarly for ARG_POINTER_REGNUM if it is fixed.  */
1303      if (pi3dest_killed && GET_CODE (dest) == REG
1304	  && reg_referenced_p (dest, PATTERN (i3))
1305	  && REGNO (dest) != FRAME_POINTER_REGNUM
1306#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1307	  && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1308#endif
1309#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1310	  && (REGNO (dest) != ARG_POINTER_REGNUM
1311	      || ! fixed_regs [REGNO (dest)])
1312#endif
1313	  && REGNO (dest) != STACK_POINTER_REGNUM)
1314	{
1315	  if (*pi3dest_killed)
1316	    return 0;
1317
1318	  *pi3dest_killed = dest;
1319	}
1320    }
1321
1322  else if (GET_CODE (x) == PARALLEL)
1323    {
1324      int i;
1325
1326      for (i = 0; i < XVECLEN (x, 0); i++)
1327	if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1328				i1_not_in_src, pi3dest_killed))
1329	  return 0;
1330    }
1331
1332  return 1;
1333}
1334
1335/* Return 1 if X is an arithmetic expression that contains a multiplication
1336   and division.  We don't count multiplications by powers of two here.  */
1337
1338static int
1339contains_muldiv (rtx x)
1340{
1341  switch (GET_CODE (x))
1342    {
1343    case MOD:  case DIV:  case UMOD:  case UDIV:
1344      return 1;
1345
1346    case MULT:
1347      return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1348		&& exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1349    default:
1350      switch (GET_RTX_CLASS (GET_CODE (x)))
1351	{
1352	case 'c':  case '<':  case '2':
1353	  return contains_muldiv (XEXP (x, 0))
1354	    || contains_muldiv (XEXP (x, 1));
1355
1356	case '1':
1357	  return contains_muldiv (XEXP (x, 0));
1358
1359	default:
1360	  return 0;
1361	}
1362    }
1363}
1364
1365/* Determine whether INSN can be used in a combination.  Return nonzero if
1366   not.  This is used in try_combine to detect early some cases where we
1367   can't perform combinations.  */
1368
1369static int
1370cant_combine_insn_p (rtx insn)
1371{
1372  rtx set;
1373  rtx src, dest;
1374
1375  /* If this isn't really an insn, we can't do anything.
1376     This can occur when flow deletes an insn that it has merged into an
1377     auto-increment address.  */
1378  if (! INSN_P (insn))
1379    return 1;
1380
1381  /* Never combine loads and stores involving hard regs that are likely
1382     to be spilled.  The register allocator can usually handle such
1383     reg-reg moves by tying.  If we allow the combiner to make
1384     substitutions of likely-spilled regs, we may abort in reload.
1385     As an exception, we allow combinations involving fixed regs; these are
1386     not available to the register allocator so there's no risk involved.  */
1387
1388  set = single_set (insn);
1389  if (! set)
1390    return 0;
1391  src = SET_SRC (set);
1392  dest = SET_DEST (set);
1393  if (GET_CODE (src) == SUBREG)
1394    src = SUBREG_REG (src);
1395  if (GET_CODE (dest) == SUBREG)
1396    dest = SUBREG_REG (dest);
1397  if (REG_P (src) && REG_P (dest)
1398      && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1399	   && ! fixed_regs[REGNO (src)]
1400	   && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (src))))
1401	  || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1402	      && ! fixed_regs[REGNO (dest)]
1403	      && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
1404    return 1;
1405
1406  return 0;
1407}
1408
1409/* Adjust INSN after we made a change to its destination.
1410
1411   Changing the destination can invalidate notes that say something about
1412   the results of the insn and a LOG_LINK pointing to the insn.  */
1413
1414static void
1415adjust_for_new_dest (rtx insn)
1416{
1417  rtx *loc;
1418
1419  /* For notes, be conservative and simply remove them.  */
1420  loc = &REG_NOTES (insn);
1421  while (*loc)
1422    {
1423      enum reg_note kind = REG_NOTE_KIND (*loc);
1424      if (kind == REG_EQUAL || kind == REG_EQUIV)
1425	*loc = XEXP (*loc, 1);
1426      else
1427	loc = &XEXP (*loc, 1);
1428    }
1429
1430  /* The new insn will have a destination that was previously the destination
1431     of an insn just above it.  Call distribute_links to make a LOG_LINK from
1432     the next use of that destination.  */
1433  distribute_links (gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX));
1434}
1435
1436/* Try to combine the insns I1 and I2 into I3.
1437   Here I1 and I2 appear earlier than I3.
1438   I1 can be zero; then we combine just I2 into I3.
1439
1440   If we are combining three insns and the resulting insn is not recognized,
1441   try splitting it into two insns.  If that happens, I2 and I3 are retained
1442   and I1 is pseudo-deleted by turning it into a NOTE.  Otherwise, I1 and I2
1443   are pseudo-deleted.
1444
1445   Return 0 if the combination does not work.  Then nothing is changed.
1446   If we did the combination, return the insn at which combine should
1447   resume scanning.
1448
1449   Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
1450   new direct jump instruction.  */
1451
1452static rtx
1453try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
1454{
1455  /* New patterns for I3 and I2, respectively.  */
1456  rtx newpat, newi2pat = 0;
1457  int substed_i2 = 0, substed_i1 = 0;
1458  /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead.  */
1459  int added_sets_1, added_sets_2;
1460  /* Total number of SETs to put into I3.  */
1461  int total_sets;
1462  /* Nonzero is I2's body now appears in I3.  */
1463  int i2_is_used;
1464  /* INSN_CODEs for new I3, new I2, and user of condition code.  */
1465  int insn_code_number, i2_code_number = 0, other_code_number = 0;
1466  /* Contains I3 if the destination of I3 is used in its source, which means
1467     that the old life of I3 is being killed.  If that usage is placed into
1468     I2 and not in I3, a REG_DEAD note must be made.  */
1469  rtx i3dest_killed = 0;
1470  /* SET_DEST and SET_SRC of I2 and I1.  */
1471  rtx i2dest, i2src, i1dest = 0, i1src = 0;
1472  /* PATTERN (I2), or a copy of it in certain cases.  */
1473  rtx i2pat;
1474  /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC.  */
1475  int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1476  int i1_feeds_i3 = 0;
1477  /* Notes that must be added to REG_NOTES in I3 and I2.  */
1478  rtx new_i3_notes, new_i2_notes;
1479  /* Notes that we substituted I3 into I2 instead of the normal case.  */
1480  int i3_subst_into_i2 = 0;
1481  /* Notes that I1, I2 or I3 is a MULT operation.  */
1482  int have_mult = 0;
1483
1484  int maxreg;
1485  rtx temp;
1486  rtx link;
1487  int i;
1488
1489  /* Exit early if one of the insns involved can't be used for
1490     combinations.  */
1491  if (cant_combine_insn_p (i3)
1492      || cant_combine_insn_p (i2)
1493      || (i1 && cant_combine_insn_p (i1))
1494      /* We also can't do anything if I3 has a
1495	 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1496	 libcall.  */
1497#if 0
1498      /* ??? This gives worse code, and appears to be unnecessary, since no
1499	 pass after flow uses REG_LIBCALL/REG_RETVAL notes.  */
1500      || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1501#endif
1502      )
1503    return 0;
1504
1505  combine_attempts++;
1506  undobuf.other_insn = 0;
1507
1508  /* Reset the hard register usage information.  */
1509  CLEAR_HARD_REG_SET (newpat_used_regs);
1510
1511  /* If I1 and I2 both feed I3, they can be in any order.  To simplify the
1512     code below, set I1 to be the earlier of the two insns.  */
1513  if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1514    temp = i1, i1 = i2, i2 = temp;
1515
1516  added_links_insn = 0;
1517
1518  /* First check for one important special-case that the code below will
1519     not handle.  Namely, the case where I1 is zero, I2 is a PARALLEL
1520     and I3 is a SET whose SET_SRC is a SET_DEST in I2.  In that case,
1521     we may be able to replace that destination with the destination of I3.
1522     This occurs in the common code where we compute both a quotient and
1523     remainder into a structure, in which case we want to do the computation
1524     directly into the structure to avoid register-register copies.
1525
1526     Note that this case handles both multiple sets in I2 and also
1527     cases where I2 has a number of CLOBBER or PARALLELs.
1528
1529     We make very conservative checks below and only try to handle the
1530     most common cases of this.  For example, we only handle the case
1531     where I2 and I3 are adjacent to avoid making difficult register
1532     usage tests.  */
1533
1534  if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1535      && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1536      && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1537      && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1538      && GET_CODE (PATTERN (i2)) == PARALLEL
1539      && ! side_effects_p (SET_DEST (PATTERN (i3)))
1540      /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1541	 below would need to check what is inside (and reg_overlap_mentioned_p
1542	 doesn't support those codes anyway).  Don't allow those destinations;
1543	 the resulting insn isn't likely to be recognized anyway.  */
1544      && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1545      && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1546      && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1547				    SET_DEST (PATTERN (i3)))
1548      && next_real_insn (i2) == i3)
1549    {
1550      rtx p2 = PATTERN (i2);
1551
1552      /* Make sure that the destination of I3,
1553	 which we are going to substitute into one output of I2,
1554	 is not used within another output of I2.  We must avoid making this:
1555	 (parallel [(set (mem (reg 69)) ...)
1556		    (set (reg 69) ...)])
1557	 which is not well-defined as to order of actions.
1558	 (Besides, reload can't handle output reloads for this.)
1559
1560	 The problem can also happen if the dest of I3 is a memory ref,
1561	 if another dest in I2 is an indirect memory ref.  */
1562      for (i = 0; i < XVECLEN (p2, 0); i++)
1563	if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1564	     || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1565	    && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1566					SET_DEST (XVECEXP (p2, 0, i))))
1567	  break;
1568
1569      if (i == XVECLEN (p2, 0))
1570	for (i = 0; i < XVECLEN (p2, 0); i++)
1571	  if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1572	       || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1573	      && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1574	    {
1575	      combine_merges++;
1576
1577	      subst_insn = i3;
1578	      subst_low_cuid = INSN_CUID (i2);
1579
1580	      added_sets_2 = added_sets_1 = 0;
1581	      i2dest = SET_SRC (PATTERN (i3));
1582
1583	      /* Replace the dest in I2 with our dest and make the resulting
1584		 insn the new pattern for I3.  Then skip to where we
1585		 validate the pattern.  Everything was set up above.  */
1586	      SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1587		     SET_DEST (PATTERN (i3)));
1588
1589	      newpat = p2;
1590	      i3_subst_into_i2 = 1;
1591	      goto validate_replacement;
1592	    }
1593    }
1594
1595  /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1596     one of those words to another constant, merge them by making a new
1597     constant.  */
1598  if (i1 == 0
1599      && (temp = single_set (i2)) != 0
1600      && (GET_CODE (SET_SRC (temp)) == CONST_INT
1601	  || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1602      && GET_CODE (SET_DEST (temp)) == REG
1603      && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1604      && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1605      && GET_CODE (PATTERN (i3)) == SET
1606      && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1607      && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1608      && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1609      && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1610      && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1611    {
1612      HOST_WIDE_INT lo, hi;
1613
1614      if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1615	lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1616      else
1617	{
1618	  lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1619	  hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1620	}
1621
1622      if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
1623	{
1624	  /* We don't handle the case of the target word being wider
1625	     than a host wide int.  */
1626	  if (HOST_BITS_PER_WIDE_INT < BITS_PER_WORD)
1627	    abort ();
1628
1629	  lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
1630	  lo |= (INTVAL (SET_SRC (PATTERN (i3)))
1631		 & (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1632	}
1633      else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1634	hi = INTVAL (SET_SRC (PATTERN (i3)));
1635      else if (HOST_BITS_PER_WIDE_INT >= 2 * BITS_PER_WORD)
1636	{
1637	  int sign = -(int) ((unsigned HOST_WIDE_INT) lo
1638			     >> (HOST_BITS_PER_WIDE_INT - 1));
1639
1640	  lo &= ~ (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1641		   (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1642	  lo |= (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1643		 (INTVAL (SET_SRC (PATTERN (i3)))));
1644	  if (hi == sign)
1645	    hi = lo < 0 ? -1 : 0;
1646	}
1647      else
1648	/* We don't handle the case of the higher word not fitting
1649	   entirely in either hi or lo.  */
1650	abort ();
1651
1652      combine_merges++;
1653      subst_insn = i3;
1654      subst_low_cuid = INSN_CUID (i2);
1655      added_sets_2 = added_sets_1 = 0;
1656      i2dest = SET_DEST (temp);
1657
1658      SUBST (SET_SRC (temp),
1659	     immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1660
1661      newpat = PATTERN (i2);
1662      goto validate_replacement;
1663    }
1664
1665#ifndef HAVE_cc0
1666  /* If we have no I1 and I2 looks like:
1667	(parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1668		   (set Y OP)])
1669     make up a dummy I1 that is
1670	(set Y OP)
1671     and change I2 to be
1672        (set (reg:CC X) (compare:CC Y (const_int 0)))
1673
1674     (We can ignore any trailing CLOBBERs.)
1675
1676     This undoes a previous combination and allows us to match a branch-and-
1677     decrement insn.  */
1678
1679  if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1680      && XVECLEN (PATTERN (i2), 0) >= 2
1681      && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1682      && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1683	  == MODE_CC)
1684      && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1685      && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1686      && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1687      && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1688      && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1689		      SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1690    {
1691      for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1692	if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1693	  break;
1694
1695      if (i == 1)
1696	{
1697	  /* We make I1 with the same INSN_UID as I2.  This gives it
1698	     the same INSN_CUID for value tracking.  Our fake I1 will
1699	     never appear in the insn stream so giving it the same INSN_UID
1700	     as I2 will not cause a problem.  */
1701
1702	  i1 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1703			     BLOCK_FOR_INSN (i2), INSN_LOCATOR (i2),
1704			     XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1705			     NULL_RTX);
1706
1707	  SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1708	  SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1709		 SET_DEST (PATTERN (i1)));
1710	}
1711    }
1712#endif
1713
1714  /* Verify that I2 and I1 are valid for combining.  */
1715  if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1716      || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1717    {
1718      undo_all ();
1719      return 0;
1720    }
1721
1722  /* Record whether I2DEST is used in I2SRC and similarly for the other
1723     cases.  Knowing this will help in register status updating below.  */
1724  i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1725  i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1726  i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1727
1728  /* See if I1 directly feeds into I3.  It does if I1DEST is not used
1729     in I2SRC.  */
1730  i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1731
1732  /* Ensure that I3's pattern can be the destination of combines.  */
1733  if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1734			  i1 && i2dest_in_i1src && i1_feeds_i3,
1735			  &i3dest_killed))
1736    {
1737      undo_all ();
1738      return 0;
1739    }
1740
1741  /* See if any of the insns is a MULT operation.  Unless one is, we will
1742     reject a combination that is, since it must be slower.  Be conservative
1743     here.  */
1744  if (GET_CODE (i2src) == MULT
1745      || (i1 != 0 && GET_CODE (i1src) == MULT)
1746      || (GET_CODE (PATTERN (i3)) == SET
1747	  && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1748    have_mult = 1;
1749
1750  /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1751     We used to do this EXCEPT in one case: I3 has a post-inc in an
1752     output operand.  However, that exception can give rise to insns like
1753	mov r3,(r3)+
1754     which is a famous insn on the PDP-11 where the value of r3 used as the
1755     source was model-dependent.  Avoid this sort of thing.  */
1756
1757#if 0
1758  if (!(GET_CODE (PATTERN (i3)) == SET
1759	&& GET_CODE (SET_SRC (PATTERN (i3))) == REG
1760	&& GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1761	&& (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1762	    || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1763    /* It's not the exception.  */
1764#endif
1765#ifdef AUTO_INC_DEC
1766    for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1767      if (REG_NOTE_KIND (link) == REG_INC
1768	  && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1769	      || (i1 != 0
1770		  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1771	{
1772	  undo_all ();
1773	  return 0;
1774	}
1775#endif
1776
1777  /* See if the SETs in I1 or I2 need to be kept around in the merged
1778     instruction: whenever the value set there is still needed past I3.
1779     For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1780
1781     For the SET in I1, we have two cases:  If I1 and I2 independently
1782     feed into I3, the set in I1 needs to be kept around if I1DEST dies
1783     or is set in I3.  Otherwise (if I1 feeds I2 which feeds I3), the set
1784     in I1 needs to be kept around unless I1DEST dies or is set in either
1785     I2 or I3.  We can distinguish these cases by seeing if I2SRC mentions
1786     I1DEST.  If so, we know I1 feeds into I2.  */
1787
1788  added_sets_2 = ! dead_or_set_p (i3, i2dest);
1789
1790  added_sets_1
1791    = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1792	       : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1793
1794  /* If the set in I2 needs to be kept around, we must make a copy of
1795     PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1796     PATTERN (I2), we are only substituting for the original I1DEST, not into
1797     an already-substituted copy.  This also prevents making self-referential
1798     rtx.  If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1799     I2DEST.  */
1800
1801  i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1802	   ? gen_rtx_SET (VOIDmode, i2dest, i2src)
1803	   : PATTERN (i2));
1804
1805  if (added_sets_2)
1806    i2pat = copy_rtx (i2pat);
1807
1808  combine_merges++;
1809
1810  /* Substitute in the latest insn for the regs set by the earlier ones.  */
1811
1812  maxreg = max_reg_num ();
1813
1814  subst_insn = i3;
1815
1816  /* It is possible that the source of I2 or I1 may be performing an
1817     unneeded operation, such as a ZERO_EXTEND of something that is known
1818     to have the high part zero.  Handle that case by letting subst look at
1819     the innermost one of them.
1820
1821     Another way to do this would be to have a function that tries to
1822     simplify a single insn instead of merging two or more insns.  We don't
1823     do this because of the potential of infinite loops and because
1824     of the potential extra memory required.  However, doing it the way
1825     we are is a bit of a kludge and doesn't catch all cases.
1826
1827     But only do this if -fexpensive-optimizations since it slows things down
1828     and doesn't usually win.  */
1829
1830  if (flag_expensive_optimizations)
1831    {
1832      /* Pass pc_rtx so no substitutions are done, just simplifications.
1833	 The cases that we are interested in here do not involve the few
1834	 cases were is_replaced is checked.  */
1835      if (i1)
1836	{
1837	  subst_low_cuid = INSN_CUID (i1);
1838	  i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1839	}
1840      else
1841	{
1842	  subst_low_cuid = INSN_CUID (i2);
1843	  i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1844	}
1845    }
1846
1847#ifndef HAVE_cc0
1848  /* Many machines that don't use CC0 have insns that can both perform an
1849     arithmetic operation and set the condition code.  These operations will
1850     be represented as a PARALLEL with the first element of the vector
1851     being a COMPARE of an arithmetic operation with the constant zero.
1852     The second element of the vector will set some pseudo to the result
1853     of the same arithmetic operation.  If we simplify the COMPARE, we won't
1854     match such a pattern and so will generate an extra insn.   Here we test
1855     for this case, where both the comparison and the operation result are
1856     needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1857     I2SRC.  Later we will make the PARALLEL that contains I2.  */
1858
1859  if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1860      && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1861      && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1862      && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1863    {
1864#ifdef SELECT_CC_MODE
1865      rtx *cc_use;
1866      enum machine_mode compare_mode;
1867#endif
1868
1869      newpat = PATTERN (i3);
1870      SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1871
1872      i2_is_used = 1;
1873
1874#ifdef SELECT_CC_MODE
1875      /* See if a COMPARE with the operand we substituted in should be done
1876	 with the mode that is currently being used.  If not, do the same
1877	 processing we do in `subst' for a SET; namely, if the destination
1878	 is used only once, try to replace it with a register of the proper
1879	 mode and also replace the COMPARE.  */
1880      if (undobuf.other_insn == 0
1881	  && (cc_use = find_single_use (SET_DEST (newpat), i3,
1882					&undobuf.other_insn))
1883	  && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1884					      i2src, const0_rtx))
1885	      != GET_MODE (SET_DEST (newpat))))
1886	{
1887	  unsigned int regno = REGNO (SET_DEST (newpat));
1888	  rtx new_dest = gen_rtx_REG (compare_mode, regno);
1889
1890	  if (regno < FIRST_PSEUDO_REGISTER
1891	      || (REG_N_SETS (regno) == 1 && ! added_sets_2
1892		  && ! REG_USERVAR_P (SET_DEST (newpat))))
1893	    {
1894	      if (regno >= FIRST_PSEUDO_REGISTER)
1895		SUBST (regno_reg_rtx[regno], new_dest);
1896
1897	      SUBST (SET_DEST (newpat), new_dest);
1898	      SUBST (XEXP (*cc_use, 0), new_dest);
1899	      SUBST (SET_SRC (newpat),
1900		     gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
1901	    }
1902	  else
1903	    undobuf.other_insn = 0;
1904	}
1905#endif
1906    }
1907  else
1908#endif
1909    {
1910      n_occurrences = 0;		/* `subst' counts here */
1911
1912      /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1913	 need to make a unique copy of I2SRC each time we substitute it
1914	 to avoid self-referential rtl.  */
1915
1916      subst_low_cuid = INSN_CUID (i2);
1917      newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1918		      ! i1_feeds_i3 && i1dest_in_i1src);
1919      substed_i2 = 1;
1920
1921      /* Record whether i2's body now appears within i3's body.  */
1922      i2_is_used = n_occurrences;
1923    }
1924
1925  /* If we already got a failure, don't try to do more.  Otherwise,
1926     try to substitute in I1 if we have it.  */
1927
1928  if (i1 && GET_CODE (newpat) != CLOBBER)
1929    {
1930      /* Before we can do this substitution, we must redo the test done
1931	 above (see detailed comments there) that ensures  that I1DEST
1932	 isn't mentioned in any SETs in NEWPAT that are field assignments.  */
1933
1934      if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1935			      0, (rtx*) 0))
1936	{
1937	  undo_all ();
1938	  return 0;
1939	}
1940
1941      n_occurrences = 0;
1942      subst_low_cuid = INSN_CUID (i1);
1943      newpat = subst (newpat, i1dest, i1src, 0, 0);
1944      substed_i1 = 1;
1945    }
1946
1947  /* Fail if an autoincrement side-effect has been duplicated.  Be careful
1948     to count all the ways that I2SRC and I1SRC can be used.  */
1949  if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1950       && i2_is_used + added_sets_2 > 1)
1951      || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1952	  && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1953	      > 1))
1954      /* Fail if we tried to make a new register (we used to abort, but there's
1955	 really no reason to).  */
1956      || max_reg_num () != maxreg
1957      /* Fail if we couldn't do something and have a CLOBBER.  */
1958      || GET_CODE (newpat) == CLOBBER
1959      /* Fail if this new pattern is a MULT and we didn't have one before
1960	 at the outer level.  */
1961      || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1962	  && ! have_mult))
1963    {
1964      undo_all ();
1965      return 0;
1966    }
1967
1968  /* If the actions of the earlier insns must be kept
1969     in addition to substituting them into the latest one,
1970     we must make a new PARALLEL for the latest insn
1971     to hold additional the SETs.  */
1972
1973  if (added_sets_1 || added_sets_2)
1974    {
1975      combine_extras++;
1976
1977      if (GET_CODE (newpat) == PARALLEL)
1978	{
1979	  rtvec old = XVEC (newpat, 0);
1980	  total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1981	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
1982	  memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
1983		  sizeof (old->elem[0]) * old->num_elem);
1984	}
1985      else
1986	{
1987	  rtx old = newpat;
1988	  total_sets = 1 + added_sets_1 + added_sets_2;
1989	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
1990	  XVECEXP (newpat, 0, 0) = old;
1991	}
1992
1993      if (added_sets_1)
1994	XVECEXP (newpat, 0, --total_sets)
1995	  = (GET_CODE (PATTERN (i1)) == PARALLEL
1996	     ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
1997
1998      if (added_sets_2)
1999	{
2000	  /* If there is no I1, use I2's body as is.  We used to also not do
2001	     the subst call below if I2 was substituted into I3,
2002	     but that could lose a simplification.  */
2003	  if (i1 == 0)
2004	    XVECEXP (newpat, 0, --total_sets) = i2pat;
2005	  else
2006	    /* See comment where i2pat is assigned.  */
2007	    XVECEXP (newpat, 0, --total_sets)
2008	      = subst (i2pat, i1dest, i1src, 0, 0);
2009	}
2010    }
2011
2012  /* We come here when we are replacing a destination in I2 with the
2013     destination of I3.  */
2014 validate_replacement:
2015
2016  /* Note which hard regs this insn has as inputs.  */
2017  mark_used_regs_combine (newpat);
2018
2019  /* Is the result of combination a valid instruction?  */
2020  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2021
2022  /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2023     the second SET's destination is a register that is unused and isn't
2024     marked as an instruction that might trap in an EH region.  In that case,
2025     we just need the first SET.   This can occur when simplifying a divmod
2026     insn.  We *must* test for this case here because the code below that
2027     splits two independent SETs doesn't handle this case correctly when it
2028     updates the register status.  Also check the case where the first
2029     SET's destination is unused.  That would not cause incorrect code, but
2030     does cause an unneeded insn to remain.  */
2031
2032  if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2033      && XVECLEN (newpat, 0) == 2
2034      && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2035      && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2036      && asm_noperands (newpat) < 0)
2037    {
2038      rtx set0 = XVECEXP (newpat, 0, 0);
2039      rtx set1 = XVECEXP (newpat, 0, 1);
2040      rtx note;
2041
2042      if (((GET_CODE (SET_DEST (set1)) == REG
2043	    && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
2044	   || (GET_CODE (SET_DEST (set1)) == SUBREG
2045	       && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
2046	  && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX))
2047	      || INTVAL (XEXP (note, 0)) <= 0)
2048	  && ! side_effects_p (SET_SRC (set1)))
2049	{
2050	  newpat = set0;
2051	  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2052	}
2053
2054      else if (((GET_CODE (SET_DEST (set0)) == REG
2055		 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
2056		|| (GET_CODE (SET_DEST (set0)) == SUBREG
2057		    && find_reg_note (i3, REG_UNUSED,
2058				      SUBREG_REG (SET_DEST (set0)))))
2059	       && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX))
2060		   || INTVAL (XEXP (note, 0)) <= 0)
2061	       && ! side_effects_p (SET_SRC (set0)))
2062	{
2063	  newpat = set1;
2064	  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2065
2066	  if (insn_code_number >= 0)
2067	    {
2068	      /* If we will be able to accept this, we have made a
2069		 change to the destination of I3.  This requires us to
2070		 do a few adjustments.  */
2071
2072	      PATTERN (i3) = newpat;
2073	      adjust_for_new_dest (i3);
2074	    }
2075	}
2076    }
2077
2078  /* If we were combining three insns and the result is a simple SET
2079     with no ASM_OPERANDS that wasn't recognized, try to split it into two
2080     insns.  There are two ways to do this.  It can be split using a
2081     machine-specific method (like when you have an addition of a large
2082     constant) or by combine in the function find_split_point.  */
2083
2084  if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2085      && asm_noperands (newpat) < 0)
2086    {
2087      rtx m_split, *split;
2088      rtx ni2dest = i2dest;
2089
2090      /* See if the MD file can split NEWPAT.  If it can't, see if letting it
2091	 use I2DEST as a scratch register will help.  In the latter case,
2092	 convert I2DEST to the mode of the source of NEWPAT if we can.  */
2093
2094      m_split = split_insns (newpat, i3);
2095
2096      /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2097	 inputs of NEWPAT.  */
2098
2099      /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2100	 possible to try that as a scratch reg.  This would require adding
2101	 more code to make it work though.  */
2102
2103      if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
2104	{
2105	  /* If I2DEST is a hard register or the only use of a pseudo,
2106	     we can change its mode.  */
2107	  if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
2108	      && GET_MODE (SET_DEST (newpat)) != VOIDmode
2109	      && GET_CODE (i2dest) == REG
2110	      && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2111		  || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2112		      && ! REG_USERVAR_P (i2dest))))
2113	    ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
2114				   REGNO (i2dest));
2115
2116	  m_split = split_insns (gen_rtx_PARALLEL
2117				 (VOIDmode,
2118				  gen_rtvec (2, newpat,
2119					     gen_rtx_CLOBBER (VOIDmode,
2120							      ni2dest))),
2121				 i3);
2122	  /* If the split with the mode-changed register didn't work, try
2123	     the original register.  */
2124	  if (! m_split && ni2dest != i2dest)
2125	    {
2126	      ni2dest = i2dest;
2127	      m_split = split_insns (gen_rtx_PARALLEL
2128				     (VOIDmode,
2129				      gen_rtvec (2, newpat,
2130						 gen_rtx_CLOBBER (VOIDmode,
2131								  i2dest))),
2132				     i3);
2133	    }
2134	}
2135
2136      if (m_split && NEXT_INSN (m_split) == NULL_RTX)
2137	{
2138	  m_split = PATTERN (m_split);
2139	  insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2140	  if (insn_code_number >= 0)
2141	    newpat = m_split;
2142	}
2143      else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
2144	       && (next_real_insn (i2) == i3
2145		   || ! use_crosses_set_p (PATTERN (m_split), INSN_CUID (i2))))
2146	{
2147	  rtx i2set, i3set;
2148	  rtx newi3pat = PATTERN (NEXT_INSN (m_split));
2149	  newi2pat = PATTERN (m_split);
2150
2151	  i3set = single_set (NEXT_INSN (m_split));
2152	  i2set = single_set (m_split);
2153
2154	  /* In case we changed the mode of I2DEST, replace it in the
2155	     pseudo-register table here.  We can't do it above in case this
2156	     code doesn't get executed and we do a split the other way.  */
2157
2158	  if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2159	    SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2160
2161	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2162
2163	  /* If I2 or I3 has multiple SETs, we won't know how to track
2164	     register status, so don't use these insns.  If I2's destination
2165	     is used between I2 and I3, we also can't use these insns.  */
2166
2167	  if (i2_code_number >= 0 && i2set && i3set
2168	      && (next_real_insn (i2) == i3
2169		  || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
2170	    insn_code_number = recog_for_combine (&newi3pat, i3,
2171						  &new_i3_notes);
2172	  if (insn_code_number >= 0)
2173	    newpat = newi3pat;
2174
2175	  /* It is possible that both insns now set the destination of I3.
2176	     If so, we must show an extra use of it.  */
2177
2178	  if (insn_code_number >= 0)
2179	    {
2180	      rtx new_i3_dest = SET_DEST (i3set);
2181	      rtx new_i2_dest = SET_DEST (i2set);
2182
2183	      while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2184		     || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2185		     || GET_CODE (new_i3_dest) == SUBREG)
2186		new_i3_dest = XEXP (new_i3_dest, 0);
2187
2188	      while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2189		     || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2190		     || GET_CODE (new_i2_dest) == SUBREG)
2191		new_i2_dest = XEXP (new_i2_dest, 0);
2192
2193	      if (GET_CODE (new_i3_dest) == REG
2194		  && GET_CODE (new_i2_dest) == REG
2195		  && REGNO (new_i3_dest) == REGNO (new_i2_dest))
2196		REG_N_SETS (REGNO (new_i2_dest))++;
2197	    }
2198	}
2199
2200      /* If we can split it and use I2DEST, go ahead and see if that
2201	 helps things be recognized.  Verify that none of the registers
2202	 are set between I2 and I3.  */
2203      if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
2204#ifdef HAVE_cc0
2205	  && GET_CODE (i2dest) == REG
2206#endif
2207	  /* We need I2DEST in the proper mode.  If it is a hard register
2208	     or the only use of a pseudo, we can change its mode.  */
2209	  && (GET_MODE (*split) == GET_MODE (i2dest)
2210	      || GET_MODE (*split) == VOIDmode
2211	      || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2212	      || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2213		  && ! REG_USERVAR_P (i2dest)))
2214	  && (next_real_insn (i2) == i3
2215	      || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2216	  /* We can't overwrite I2DEST if its value is still used by
2217	     NEWPAT.  */
2218	  && ! reg_referenced_p (i2dest, newpat))
2219	{
2220	  rtx newdest = i2dest;
2221	  enum rtx_code split_code = GET_CODE (*split);
2222	  enum machine_mode split_mode = GET_MODE (*split);
2223
2224	  /* Get NEWDEST as a register in the proper mode.  We have already
2225	     validated that we can do this.  */
2226	  if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2227	    {
2228	      newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
2229
2230	      if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2231		SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2232	    }
2233
2234	  /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2235	     an ASHIFT.  This can occur if it was inside a PLUS and hence
2236	     appeared to be a memory address.  This is a kludge.  */
2237	  if (split_code == MULT
2238	      && GET_CODE (XEXP (*split, 1)) == CONST_INT
2239	      && INTVAL (XEXP (*split, 1)) > 0
2240	      && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2241	    {
2242	      SUBST (*split, gen_rtx_ASHIFT (split_mode,
2243					     XEXP (*split, 0), GEN_INT (i)));
2244	      /* Update split_code because we may not have a multiply
2245		 anymore.  */
2246	      split_code = GET_CODE (*split);
2247	    }
2248
2249#ifdef INSN_SCHEDULING
2250	  /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2251	     be written as a ZERO_EXTEND.  */
2252	  if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2253	    {
2254#ifdef LOAD_EXTEND_OP
2255	      /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
2256		 what it really is.  */
2257	      if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split)))
2258		  == SIGN_EXTEND)
2259		SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
2260						    SUBREG_REG (*split)));
2261	      else
2262#endif
2263		SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
2264						    SUBREG_REG (*split)));
2265	    }
2266#endif
2267
2268	  newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
2269	  SUBST (*split, newdest);
2270	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2271
2272	  /* If the split point was a MULT and we didn't have one before,
2273	     don't use one now.  */
2274	  if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2275	    insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2276	}
2277    }
2278
2279  /* Check for a case where we loaded from memory in a narrow mode and
2280     then sign extended it, but we need both registers.  In that case,
2281     we have a PARALLEL with both loads from the same memory location.
2282     We can split this into a load from memory followed by a register-register
2283     copy.  This saves at least one insn, more if register allocation can
2284     eliminate the copy.
2285
2286     We cannot do this if the destination of the first assignment is a
2287     condition code register or cc0.  We eliminate this case by making sure
2288     the SET_DEST and SET_SRC have the same mode.
2289
2290     We cannot do this if the destination of the second assignment is
2291     a register that we have already assumed is zero-extended.  Similarly
2292     for a SUBREG of such a register.  */
2293
2294  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2295	   && GET_CODE (newpat) == PARALLEL
2296	   && XVECLEN (newpat, 0) == 2
2297	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2298	   && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2299	   && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
2300	       == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
2301	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2302	   && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2303			   XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2304	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2305				   INSN_CUID (i2))
2306	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2307	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2308	   && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2309		 (GET_CODE (temp) == REG
2310		  && reg_nonzero_bits[REGNO (temp)] != 0
2311		  && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2312		  && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2313		  && (reg_nonzero_bits[REGNO (temp)]
2314		      != GET_MODE_MASK (word_mode))))
2315	   && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2316		 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2317		     (GET_CODE (temp) == REG
2318		      && reg_nonzero_bits[REGNO (temp)] != 0
2319		      && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2320		      && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2321		      && (reg_nonzero_bits[REGNO (temp)]
2322			  != GET_MODE_MASK (word_mode)))))
2323	   && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2324					 SET_SRC (XVECEXP (newpat, 0, 1)))
2325	   && ! find_reg_note (i3, REG_UNUSED,
2326			       SET_DEST (XVECEXP (newpat, 0, 0))))
2327    {
2328      rtx ni2dest;
2329
2330      newi2pat = XVECEXP (newpat, 0, 0);
2331      ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2332      newpat = XVECEXP (newpat, 0, 1);
2333      SUBST (SET_SRC (newpat),
2334	     gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
2335      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2336
2337      if (i2_code_number >= 0)
2338	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2339
2340      if (insn_code_number >= 0)
2341	{
2342	  rtx insn;
2343	  rtx link;
2344
2345	  /* If we will be able to accept this, we have made a change to the
2346	     destination of I3.  This requires us to do a few adjustments.  */
2347	  PATTERN (i3) = newpat;
2348	  adjust_for_new_dest (i3);
2349
2350	  /* I3 now uses what used to be its destination and which is
2351	     now I2's destination.  That means we need a LOG_LINK from
2352	     I3 to I2.  But we used to have one, so we still will.
2353
2354	     However, some later insn might be using I2's dest and have
2355	     a LOG_LINK pointing at I3.  We must remove this link.
2356	     The simplest way to remove the link is to point it at I1,
2357	     which we know will be a NOTE.  */
2358
2359	  for (insn = NEXT_INSN (i3);
2360	       insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2361			|| insn != BB_HEAD (this_basic_block->next_bb));
2362	       insn = NEXT_INSN (insn))
2363	    {
2364	      if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
2365		{
2366		  for (link = LOG_LINKS (insn); link;
2367		       link = XEXP (link, 1))
2368		    if (XEXP (link, 0) == i3)
2369		      XEXP (link, 0) = i1;
2370
2371		  break;
2372		}
2373	    }
2374	}
2375    }
2376
2377  /* Similarly, check for a case where we have a PARALLEL of two independent
2378     SETs but we started with three insns.  In this case, we can do the sets
2379     as two separate insns.  This case occurs when some SET allows two
2380     other insns to combine, but the destination of that SET is still live.  */
2381
2382  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2383	   && GET_CODE (newpat) == PARALLEL
2384	   && XVECLEN (newpat, 0) == 2
2385	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2386	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2387	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2388	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2389	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2390	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2391	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2392				   INSN_CUID (i2))
2393	   /* Don't pass sets with (USE (MEM ...)) dests to the following.  */
2394	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2395	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2396	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2397				  XVECEXP (newpat, 0, 0))
2398	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2399				  XVECEXP (newpat, 0, 1))
2400	   && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2401		 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
2402    {
2403      /* Normally, it doesn't matter which of the two is done first,
2404	 but it does if one references cc0.  In that case, it has to
2405	 be first.  */
2406#ifdef HAVE_cc0
2407      if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2408	{
2409	  newi2pat = XVECEXP (newpat, 0, 0);
2410	  newpat = XVECEXP (newpat, 0, 1);
2411	}
2412      else
2413#endif
2414	{
2415	  newi2pat = XVECEXP (newpat, 0, 1);
2416	  newpat = XVECEXP (newpat, 0, 0);
2417	}
2418
2419      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2420
2421      if (i2_code_number >= 0)
2422	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2423    }
2424
2425  /* If it still isn't recognized, fail and change things back the way they
2426     were.  */
2427  if ((insn_code_number < 0
2428       /* Is the result a reasonable ASM_OPERANDS?  */
2429       && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2430    {
2431      undo_all ();
2432      return 0;
2433    }
2434
2435  /* If we had to change another insn, make sure it is valid also.  */
2436  if (undobuf.other_insn)
2437    {
2438      rtx other_pat = PATTERN (undobuf.other_insn);
2439      rtx new_other_notes;
2440      rtx note, next;
2441
2442      CLEAR_HARD_REG_SET (newpat_used_regs);
2443
2444      other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2445					     &new_other_notes);
2446
2447      if (other_code_number < 0 && ! check_asm_operands (other_pat))
2448	{
2449	  undo_all ();
2450	  return 0;
2451	}
2452
2453      PATTERN (undobuf.other_insn) = other_pat;
2454
2455      /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2456	 are still valid.  Then add any non-duplicate notes added by
2457	 recog_for_combine.  */
2458      for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2459	{
2460	  next = XEXP (note, 1);
2461
2462	  if (REG_NOTE_KIND (note) == REG_UNUSED
2463	      && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2464	    {
2465	      if (GET_CODE (XEXP (note, 0)) == REG)
2466		REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2467
2468	      remove_note (undobuf.other_insn, note);
2469	    }
2470	}
2471
2472      for (note = new_other_notes; note; note = XEXP (note, 1))
2473	if (GET_CODE (XEXP (note, 0)) == REG)
2474	  REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
2475
2476      distribute_notes (new_other_notes, undobuf.other_insn,
2477			undobuf.other_insn, NULL_RTX);
2478    }
2479#ifdef HAVE_cc0
2480  /* If I2 is the setter CC0 and I3 is the user CC0 then check whether
2481     they are adjacent to each other or not.  */
2482  {
2483    rtx p = prev_nonnote_insn (i3);
2484    if (p && p != i2 && GET_CODE (p) == INSN && newi2pat
2485	&& sets_cc0_p (newi2pat))
2486      {
2487	undo_all ();
2488	return 0;
2489      }
2490  }
2491#endif
2492
2493  /* We now know that we can do this combination.  Merge the insns and
2494     update the status of registers and LOG_LINKS.  */
2495
2496  {
2497    rtx i3notes, i2notes, i1notes = 0;
2498    rtx i3links, i2links, i1links = 0;
2499    rtx midnotes = 0;
2500    unsigned int regno;
2501
2502    /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2503       clear them.  */
2504    i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2505    i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2506    if (i1)
2507      i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2508
2509    /* Ensure that we do not have something that should not be shared but
2510       occurs multiple times in the new insns.  Check this by first
2511       resetting all the `used' flags and then copying anything is shared.  */
2512
2513    reset_used_flags (i3notes);
2514    reset_used_flags (i2notes);
2515    reset_used_flags (i1notes);
2516    reset_used_flags (newpat);
2517    reset_used_flags (newi2pat);
2518    if (undobuf.other_insn)
2519      reset_used_flags (PATTERN (undobuf.other_insn));
2520
2521    i3notes = copy_rtx_if_shared (i3notes);
2522    i2notes = copy_rtx_if_shared (i2notes);
2523    i1notes = copy_rtx_if_shared (i1notes);
2524    newpat = copy_rtx_if_shared (newpat);
2525    newi2pat = copy_rtx_if_shared (newi2pat);
2526    if (undobuf.other_insn)
2527      reset_used_flags (PATTERN (undobuf.other_insn));
2528
2529    INSN_CODE (i3) = insn_code_number;
2530    PATTERN (i3) = newpat;
2531
2532    if (GET_CODE (i3) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (i3))
2533      {
2534	rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
2535
2536	reset_used_flags (call_usage);
2537	call_usage = copy_rtx (call_usage);
2538
2539	if (substed_i2)
2540	  replace_rtx (call_usage, i2dest, i2src);
2541
2542	if (substed_i1)
2543	  replace_rtx (call_usage, i1dest, i1src);
2544
2545	CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
2546      }
2547
2548    if (undobuf.other_insn)
2549      INSN_CODE (undobuf.other_insn) = other_code_number;
2550
2551    /* We had one special case above where I2 had more than one set and
2552       we replaced a destination of one of those sets with the destination
2553       of I3.  In that case, we have to update LOG_LINKS of insns later
2554       in this basic block.  Note that this (expensive) case is rare.
2555
2556       Also, in this case, we must pretend that all REG_NOTEs for I2
2557       actually came from I3, so that REG_UNUSED notes from I2 will be
2558       properly handled.  */
2559
2560    if (i3_subst_into_i2)
2561      {
2562	for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2563	  if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != USE
2564	      && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2565	      && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2566	      && ! find_reg_note (i2, REG_UNUSED,
2567				  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2568	    for (temp = NEXT_INSN (i2);
2569		 temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2570			  || BB_HEAD (this_basic_block) != temp);
2571		 temp = NEXT_INSN (temp))
2572	      if (temp != i3 && INSN_P (temp))
2573		for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2574		  if (XEXP (link, 0) == i2)
2575		    XEXP (link, 0) = i3;
2576
2577	if (i3notes)
2578	  {
2579	    rtx link = i3notes;
2580	    while (XEXP (link, 1))
2581	      link = XEXP (link, 1);
2582	    XEXP (link, 1) = i2notes;
2583	  }
2584	else
2585	  i3notes = i2notes;
2586	i2notes = 0;
2587      }
2588
2589    LOG_LINKS (i3) = 0;
2590    REG_NOTES (i3) = 0;
2591    LOG_LINKS (i2) = 0;
2592    REG_NOTES (i2) = 0;
2593
2594    if (newi2pat)
2595      {
2596	INSN_CODE (i2) = i2_code_number;
2597	PATTERN (i2) = newi2pat;
2598      }
2599    else
2600      {
2601	PUT_CODE (i2, NOTE);
2602	NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2603	NOTE_SOURCE_FILE (i2) = 0;
2604      }
2605
2606    if (i1)
2607      {
2608	LOG_LINKS (i1) = 0;
2609	REG_NOTES (i1) = 0;
2610	PUT_CODE (i1, NOTE);
2611	NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2612	NOTE_SOURCE_FILE (i1) = 0;
2613      }
2614
2615    /* Get death notes for everything that is now used in either I3 or
2616       I2 and used to die in a previous insn.  If we built two new
2617       patterns, move from I1 to I2 then I2 to I3 so that we get the
2618       proper movement on registers that I2 modifies.  */
2619
2620    if (newi2pat)
2621      {
2622	move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2623	move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2624      }
2625    else
2626      move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2627		   i3, &midnotes);
2628
2629    /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3.  */
2630    if (i3notes)
2631      distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX);
2632    if (i2notes)
2633      distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX);
2634    if (i1notes)
2635      distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX);
2636    if (midnotes)
2637      distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX);
2638
2639    /* Distribute any notes added to I2 or I3 by recog_for_combine.  We
2640       know these are REG_UNUSED and want them to go to the desired insn,
2641       so we always pass it as i3.  We have not counted the notes in
2642       reg_n_deaths yet, so we need to do so now.  */
2643
2644    if (newi2pat && new_i2_notes)
2645      {
2646	for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2647	  if (GET_CODE (XEXP (temp, 0)) == REG)
2648	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2649
2650	distribute_notes (new_i2_notes, i2, i2, NULL_RTX);
2651      }
2652
2653    if (new_i3_notes)
2654      {
2655	for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2656	  if (GET_CODE (XEXP (temp, 0)) == REG)
2657	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2658
2659	distribute_notes (new_i3_notes, i3, i3, NULL_RTX);
2660      }
2661
2662    /* If I3DEST was used in I3SRC, it really died in I3.  We may need to
2663       put a REG_DEAD note for it somewhere.  If NEWI2PAT exists and sets
2664       I3DEST, the death must be somewhere before I2, not I3.  If we passed I3
2665       in that case, it might delete I2.  Similarly for I2 and I1.
2666       Show an additional death due to the REG_DEAD note we make here.  If
2667       we discard it in distribute_notes, we will decrement it again.  */
2668
2669    if (i3dest_killed)
2670      {
2671	if (GET_CODE (i3dest_killed) == REG)
2672	  REG_N_DEATHS (REGNO (i3dest_killed))++;
2673
2674	if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
2675	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2676					       NULL_RTX),
2677			    NULL_RTX, i2, NULL_RTX);
2678	else
2679	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2680					       NULL_RTX),
2681			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX);
2682      }
2683
2684    if (i2dest_in_i2src)
2685      {
2686	if (GET_CODE (i2dest) == REG)
2687	  REG_N_DEATHS (REGNO (i2dest))++;
2688
2689	if (newi2pat && reg_set_p (i2dest, newi2pat))
2690	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2691			    NULL_RTX, i2, NULL_RTX);
2692	else
2693	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2694			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX);
2695      }
2696
2697    if (i1dest_in_i1src)
2698      {
2699	if (GET_CODE (i1dest) == REG)
2700	  REG_N_DEATHS (REGNO (i1dest))++;
2701
2702	if (newi2pat && reg_set_p (i1dest, newi2pat))
2703	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2704			    NULL_RTX, i2, NULL_RTX);
2705	else
2706	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2707			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX);
2708      }
2709
2710    distribute_links (i3links);
2711    distribute_links (i2links);
2712    distribute_links (i1links);
2713
2714    if (GET_CODE (i2dest) == REG)
2715      {
2716	rtx link;
2717	rtx i2_insn = 0, i2_val = 0, set;
2718
2719	/* The insn that used to set this register doesn't exist, and
2720	   this life of the register may not exist either.  See if one of
2721	   I3's links points to an insn that sets I2DEST.  If it does,
2722	   that is now the last known value for I2DEST. If we don't update
2723	   this and I2 set the register to a value that depended on its old
2724	   contents, we will get confused.  If this insn is used, thing
2725	   will be set correctly in combine_instructions.  */
2726
2727	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2728	  if ((set = single_set (XEXP (link, 0))) != 0
2729	      && rtx_equal_p (i2dest, SET_DEST (set)))
2730	    i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2731
2732	record_value_for_reg (i2dest, i2_insn, i2_val);
2733
2734	/* If the reg formerly set in I2 died only once and that was in I3,
2735	   zero its use count so it won't make `reload' do any work.  */
2736	if (! added_sets_2
2737	    && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2738	    && ! i2dest_in_i2src)
2739	  {
2740	    regno = REGNO (i2dest);
2741	    REG_N_SETS (regno)--;
2742	  }
2743      }
2744
2745    if (i1 && GET_CODE (i1dest) == REG)
2746      {
2747	rtx link;
2748	rtx i1_insn = 0, i1_val = 0, set;
2749
2750	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2751	  if ((set = single_set (XEXP (link, 0))) != 0
2752	      && rtx_equal_p (i1dest, SET_DEST (set)))
2753	    i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2754
2755	record_value_for_reg (i1dest, i1_insn, i1_val);
2756
2757	regno = REGNO (i1dest);
2758	if (! added_sets_1 && ! i1dest_in_i1src)
2759	  REG_N_SETS (regno)--;
2760      }
2761
2762    /* Update reg_nonzero_bits et al for any changes that may have been made
2763       to this insn.  The order of set_nonzero_bits_and_sign_copies() is
2764       important.  Because newi2pat can affect nonzero_bits of newpat */
2765    if (newi2pat)
2766      note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
2767    note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
2768
2769    /* Set new_direct_jump_p if a new return or simple jump instruction
2770       has been created.
2771
2772       If I3 is now an unconditional jump, ensure that it has a
2773       BARRIER following it since it may have initially been a
2774       conditional jump.  It may also be the last nonnote insn.  */
2775
2776    if (returnjump_p (i3) || any_uncondjump_p (i3))
2777      {
2778	*new_direct_jump_p = 1;
2779	mark_jump_label (PATTERN (i3), i3, 0);
2780
2781	if ((temp = next_nonnote_insn (i3)) == NULL_RTX
2782	    || GET_CODE (temp) != BARRIER)
2783	  emit_barrier_after (i3);
2784      }
2785
2786    if (undobuf.other_insn != NULL_RTX
2787	&& (returnjump_p (undobuf.other_insn)
2788	    || any_uncondjump_p (undobuf.other_insn)))
2789      {
2790	*new_direct_jump_p = 1;
2791
2792	if ((temp = next_nonnote_insn (undobuf.other_insn)) == NULL_RTX
2793	    || GET_CODE (temp) != BARRIER)
2794	  emit_barrier_after (undobuf.other_insn);
2795      }
2796
2797    /* An NOOP jump does not need barrier, but it does need cleaning up
2798       of CFG.  */
2799    if (GET_CODE (newpat) == SET
2800	&& SET_SRC (newpat) == pc_rtx
2801	&& SET_DEST (newpat) == pc_rtx)
2802      *new_direct_jump_p = 1;
2803  }
2804
2805  combine_successes++;
2806  undo_commit ();
2807
2808  if (added_links_insn
2809      && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2810      && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2811    return added_links_insn;
2812  else
2813    return newi2pat ? i2 : i3;
2814}
2815
2816/* Undo all the modifications recorded in undobuf.  */
2817
2818static void
2819undo_all (void)
2820{
2821  struct undo *undo, *next;
2822
2823  for (undo = undobuf.undos; undo; undo = next)
2824    {
2825      next = undo->next;
2826      if (undo->is_int)
2827	*undo->where.i = undo->old_contents.i;
2828      else
2829	*undo->where.r = undo->old_contents.r;
2830
2831      undo->next = undobuf.frees;
2832      undobuf.frees = undo;
2833    }
2834
2835  undobuf.undos = 0;
2836}
2837
2838/* We've committed to accepting the changes we made.  Move all
2839   of the undos to the free list.  */
2840
2841static void
2842undo_commit (void)
2843{
2844  struct undo *undo, *next;
2845
2846  for (undo = undobuf.undos; undo; undo = next)
2847    {
2848      next = undo->next;
2849      undo->next = undobuf.frees;
2850      undobuf.frees = undo;
2851    }
2852  undobuf.undos = 0;
2853}
2854
2855
2856/* Find the innermost point within the rtx at LOC, possibly LOC itself,
2857   where we have an arithmetic expression and return that point.  LOC will
2858   be inside INSN.
2859
2860   try_combine will call this function to see if an insn can be split into
2861   two insns.  */
2862
2863static rtx *
2864find_split_point (rtx *loc, rtx insn)
2865{
2866  rtx x = *loc;
2867  enum rtx_code code = GET_CODE (x);
2868  rtx *split;
2869  unsigned HOST_WIDE_INT len = 0;
2870  HOST_WIDE_INT pos = 0;
2871  int unsignedp = 0;
2872  rtx inner = NULL_RTX;
2873
2874  /* First special-case some codes.  */
2875  switch (code)
2876    {
2877    case SUBREG:
2878#ifdef INSN_SCHEDULING
2879      /* If we are making a paradoxical SUBREG invalid, it becomes a split
2880	 point.  */
2881      if (GET_CODE (SUBREG_REG (x)) == MEM)
2882	return loc;
2883#endif
2884      return find_split_point (&SUBREG_REG (x), insn);
2885
2886    case MEM:
2887#ifdef HAVE_lo_sum
2888      /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2889	 using LO_SUM and HIGH.  */
2890      if (GET_CODE (XEXP (x, 0)) == CONST
2891	  || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2892	{
2893	  SUBST (XEXP (x, 0),
2894		 gen_rtx_LO_SUM (Pmode,
2895				 gen_rtx_HIGH (Pmode, XEXP (x, 0)),
2896				 XEXP (x, 0)));
2897	  return &XEXP (XEXP (x, 0), 0);
2898	}
2899#endif
2900
2901      /* If we have a PLUS whose second operand is a constant and the
2902	 address is not valid, perhaps will can split it up using
2903	 the machine-specific way to split large constants.  We use
2904	 the first pseudo-reg (one of the virtual regs) as a placeholder;
2905	 it will not remain in the result.  */
2906      if (GET_CODE (XEXP (x, 0)) == PLUS
2907	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2908	  && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2909	{
2910	  rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2911	  rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
2912				 subst_insn);
2913
2914	  /* This should have produced two insns, each of which sets our
2915	     placeholder.  If the source of the second is a valid address,
2916	     we can make put both sources together and make a split point
2917	     in the middle.  */
2918
2919	  if (seq
2920	      && NEXT_INSN (seq) != NULL_RTX
2921	      && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
2922	      && GET_CODE (seq) == INSN
2923	      && GET_CODE (PATTERN (seq)) == SET
2924	      && SET_DEST (PATTERN (seq)) == reg
2925	      && ! reg_mentioned_p (reg,
2926				    SET_SRC (PATTERN (seq)))
2927	      && GET_CODE (NEXT_INSN (seq)) == INSN
2928	      && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
2929	      && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
2930	      && memory_address_p (GET_MODE (x),
2931				   SET_SRC (PATTERN (NEXT_INSN (seq)))))
2932	    {
2933	      rtx src1 = SET_SRC (PATTERN (seq));
2934	      rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
2935
2936	      /* Replace the placeholder in SRC2 with SRC1.  If we can
2937		 find where in SRC2 it was placed, that can become our
2938		 split point and we can replace this address with SRC2.
2939		 Just try two obvious places.  */
2940
2941	      src2 = replace_rtx (src2, reg, src1);
2942	      split = 0;
2943	      if (XEXP (src2, 0) == src1)
2944		split = &XEXP (src2, 0);
2945	      else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2946		       && XEXP (XEXP (src2, 0), 0) == src1)
2947		split = &XEXP (XEXP (src2, 0), 0);
2948
2949	      if (split)
2950		{
2951		  SUBST (XEXP (x, 0), src2);
2952		  return split;
2953		}
2954	    }
2955
2956	  /* If that didn't work, perhaps the first operand is complex and
2957	     needs to be computed separately, so make a split point there.
2958	     This will occur on machines that just support REG + CONST
2959	     and have a constant moved through some previous computation.  */
2960
2961	  else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2962		   && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2963			 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2964			     == 'o')))
2965	    return &XEXP (XEXP (x, 0), 0);
2966	}
2967      break;
2968
2969    case SET:
2970#ifdef HAVE_cc0
2971      /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2972	 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2973	 we need to put the operand into a register.  So split at that
2974	 point.  */
2975
2976      if (SET_DEST (x) == cc0_rtx
2977	  && GET_CODE (SET_SRC (x)) != COMPARE
2978	  && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2979	  && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2980	  && ! (GET_CODE (SET_SRC (x)) == SUBREG
2981		&& GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2982	return &SET_SRC (x);
2983#endif
2984
2985      /* See if we can split SET_SRC as it stands.  */
2986      split = find_split_point (&SET_SRC (x), insn);
2987      if (split && split != &SET_SRC (x))
2988	return split;
2989
2990      /* See if we can split SET_DEST as it stands.  */
2991      split = find_split_point (&SET_DEST (x), insn);
2992      if (split && split != &SET_DEST (x))
2993	return split;
2994
2995      /* See if this is a bitfield assignment with everything constant.  If
2996	 so, this is an IOR of an AND, so split it into that.  */
2997      if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2998	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2999	      <= HOST_BITS_PER_WIDE_INT)
3000	  && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
3001	  && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
3002	  && GET_CODE (SET_SRC (x)) == CONST_INT
3003	  && ((INTVAL (XEXP (SET_DEST (x), 1))
3004	       + INTVAL (XEXP (SET_DEST (x), 2)))
3005	      <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
3006	  && ! side_effects_p (XEXP (SET_DEST (x), 0)))
3007	{
3008	  HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
3009	  unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
3010	  unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
3011	  rtx dest = XEXP (SET_DEST (x), 0);
3012	  enum machine_mode mode = GET_MODE (dest);
3013	  unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
3014
3015	  if (BITS_BIG_ENDIAN)
3016	    pos = GET_MODE_BITSIZE (mode) - len - pos;
3017
3018	  if (src == mask)
3019	    SUBST (SET_SRC (x),
3020		   gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
3021	  else
3022	    SUBST (SET_SRC (x),
3023		   gen_binary (IOR, mode,
3024			       gen_binary (AND, mode, dest,
3025					   gen_int_mode (~(mask << pos),
3026							 mode)),
3027			       GEN_INT (src << pos)));
3028
3029	  SUBST (SET_DEST (x), dest);
3030
3031	  split = find_split_point (&SET_SRC (x), insn);
3032	  if (split && split != &SET_SRC (x))
3033	    return split;
3034	}
3035
3036      /* Otherwise, see if this is an operation that we can split into two.
3037	 If so, try to split that.  */
3038      code = GET_CODE (SET_SRC (x));
3039
3040      switch (code)
3041	{
3042	case AND:
3043	  /* If we are AND'ing with a large constant that is only a single
3044	     bit and the result is only being used in a context where we
3045	     need to know if it is zero or nonzero, replace it with a bit
3046	     extraction.  This will avoid the large constant, which might
3047	     have taken more than one insn to make.  If the constant were
3048	     not a valid argument to the AND but took only one insn to make,
3049	     this is no worse, but if it took more than one insn, it will
3050	     be better.  */
3051
3052	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3053	      && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
3054	      && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3055	      && GET_CODE (SET_DEST (x)) == REG
3056	      && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
3057	      && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3058	      && XEXP (*split, 0) == SET_DEST (x)
3059	      && XEXP (*split, 1) == const0_rtx)
3060	    {
3061	      rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3062						XEXP (SET_SRC (x), 0),
3063						pos, NULL_RTX, 1, 1, 0, 0);
3064	      if (extraction != 0)
3065		{
3066		  SUBST (SET_SRC (x), extraction);
3067		  return find_split_point (loc, insn);
3068		}
3069	    }
3070	  break;
3071
3072	case NE:
3073	  /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3074	     is known to be on, this can be converted into a NEG of a shift.  */
3075	  if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3076	      && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
3077	      && 1 <= (pos = exact_log2
3078		       (nonzero_bits (XEXP (SET_SRC (x), 0),
3079				      GET_MODE (XEXP (SET_SRC (x), 0))))))
3080	    {
3081	      enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3082
3083	      SUBST (SET_SRC (x),
3084		     gen_rtx_NEG (mode,
3085				  gen_rtx_LSHIFTRT (mode,
3086						    XEXP (SET_SRC (x), 0),
3087						    GEN_INT (pos))));
3088
3089	      split = find_split_point (&SET_SRC (x), insn);
3090	      if (split && split != &SET_SRC (x))
3091		return split;
3092	    }
3093	  break;
3094
3095	case SIGN_EXTEND:
3096	  inner = XEXP (SET_SRC (x), 0);
3097
3098	  /* We can't optimize if either mode is a partial integer
3099	     mode as we don't know how many bits are significant
3100	     in those modes.  */
3101	  if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3102	      || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3103	    break;
3104
3105	  pos = 0;
3106	  len = GET_MODE_BITSIZE (GET_MODE (inner));
3107	  unsignedp = 0;
3108	  break;
3109
3110	case SIGN_EXTRACT:
3111	case ZERO_EXTRACT:
3112	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3113	      && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3114	    {
3115	      inner = XEXP (SET_SRC (x), 0);
3116	      len = INTVAL (XEXP (SET_SRC (x), 1));
3117	      pos = INTVAL (XEXP (SET_SRC (x), 2));
3118
3119	      if (BITS_BIG_ENDIAN)
3120		pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
3121	      unsignedp = (code == ZERO_EXTRACT);
3122	    }
3123	  break;
3124
3125	default:
3126	  break;
3127	}
3128
3129      if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3130	{
3131	  enum machine_mode mode = GET_MODE (SET_SRC (x));
3132
3133	  /* For unsigned, we have a choice of a shift followed by an
3134	     AND or two shifts.  Use two shifts for field sizes where the
3135	     constant might be too large.  We assume here that we can
3136	     always at least get 8-bit constants in an AND insn, which is
3137	     true for every current RISC.  */
3138
3139	  if (unsignedp && len <= 8)
3140	    {
3141	      SUBST (SET_SRC (x),
3142		     gen_rtx_AND (mode,
3143				  gen_rtx_LSHIFTRT
3144				  (mode, gen_lowpart_for_combine (mode, inner),
3145				   GEN_INT (pos)),
3146				  GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
3147
3148	      split = find_split_point (&SET_SRC (x), insn);
3149	      if (split && split != &SET_SRC (x))
3150		return split;
3151	    }
3152	  else
3153	    {
3154	      SUBST (SET_SRC (x),
3155		     gen_rtx_fmt_ee
3156		     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
3157		      gen_rtx_ASHIFT (mode,
3158				      gen_lowpart_for_combine (mode, inner),
3159				      GEN_INT (GET_MODE_BITSIZE (mode)
3160					       - len - pos)),
3161		      GEN_INT (GET_MODE_BITSIZE (mode) - len)));
3162
3163	      split = find_split_point (&SET_SRC (x), insn);
3164	      if (split && split != &SET_SRC (x))
3165		return split;
3166	    }
3167	}
3168
3169      /* See if this is a simple operation with a constant as the second
3170	 operand.  It might be that this constant is out of range and hence
3171	 could be used as a split point.  */
3172      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3173	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3174	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
3175	  && CONSTANT_P (XEXP (SET_SRC (x), 1))
3176	  && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
3177	      || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3178		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
3179		      == 'o'))))
3180	return &XEXP (SET_SRC (x), 1);
3181
3182      /* Finally, see if this is a simple operation with its first operand
3183	 not in a register.  The operation might require this operand in a
3184	 register, so return it as a split point.  We can always do this
3185	 because if the first operand were another operation, we would have
3186	 already found it as a split point.  */
3187      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3188	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3189	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
3190	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
3191	  && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3192	return &XEXP (SET_SRC (x), 0);
3193
3194      return 0;
3195
3196    case AND:
3197    case IOR:
3198      /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3199	 it is better to write this as (not (ior A B)) so we can split it.
3200	 Similarly for IOR.  */
3201      if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3202	{
3203	  SUBST (*loc,
3204		 gen_rtx_NOT (GET_MODE (x),
3205			      gen_rtx_fmt_ee (code == IOR ? AND : IOR,
3206					      GET_MODE (x),
3207					      XEXP (XEXP (x, 0), 0),
3208					      XEXP (XEXP (x, 1), 0))));
3209	  return find_split_point (loc, insn);
3210	}
3211
3212      /* Many RISC machines have a large set of logical insns.  If the
3213	 second operand is a NOT, put it first so we will try to split the
3214	 other operand first.  */
3215      if (GET_CODE (XEXP (x, 1)) == NOT)
3216	{
3217	  rtx tem = XEXP (x, 0);
3218	  SUBST (XEXP (x, 0), XEXP (x, 1));
3219	  SUBST (XEXP (x, 1), tem);
3220	}
3221      break;
3222
3223    default:
3224      break;
3225    }
3226
3227  /* Otherwise, select our actions depending on our rtx class.  */
3228  switch (GET_RTX_CLASS (code))
3229    {
3230    case 'b':			/* This is ZERO_EXTRACT and SIGN_EXTRACT.  */
3231    case '3':
3232      split = find_split_point (&XEXP (x, 2), insn);
3233      if (split)
3234	return split;
3235      /* ... fall through ...  */
3236    case '2':
3237    case 'c':
3238    case '<':
3239      split = find_split_point (&XEXP (x, 1), insn);
3240      if (split)
3241	return split;
3242      /* ... fall through ...  */
3243    case '1':
3244      /* Some machines have (and (shift ...) ...) insns.  If X is not
3245	 an AND, but XEXP (X, 0) is, use it as our split point.  */
3246      if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3247	return &XEXP (x, 0);
3248
3249      split = find_split_point (&XEXP (x, 0), insn);
3250      if (split)
3251	return split;
3252      return loc;
3253    }
3254
3255  /* Otherwise, we don't have a split point.  */
3256  return 0;
3257}
3258
3259/* Throughout X, replace FROM with TO, and return the result.
3260   The result is TO if X is FROM;
3261   otherwise the result is X, but its contents may have been modified.
3262   If they were modified, a record was made in undobuf so that
3263   undo_all will (among other things) return X to its original state.
3264
3265   If the number of changes necessary is too much to record to undo,
3266   the excess changes are not made, so the result is invalid.
3267   The changes already made can still be undone.
3268   undobuf.num_undo is incremented for such changes, so by testing that
3269   the caller can tell whether the result is valid.
3270
3271   `n_occurrences' is incremented each time FROM is replaced.
3272
3273   IN_DEST is nonzero if we are processing the SET_DEST of a SET.
3274
3275   UNIQUE_COPY is nonzero if each substitution must be unique.  We do this
3276   by copying if `n_occurrences' is nonzero.  */
3277
3278static rtx
3279subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
3280{
3281  enum rtx_code code = GET_CODE (x);
3282  enum machine_mode op0_mode = VOIDmode;
3283  const char *fmt;
3284  int len, i;
3285  rtx new;
3286
3287/* Two expressions are equal if they are identical copies of a shared
3288   RTX or if they are both registers with the same register number
3289   and mode.  */
3290
3291#define COMBINE_RTX_EQUAL_P(X,Y)			\
3292  ((X) == (Y)						\
3293   || (GET_CODE (X) == REG && GET_CODE (Y) == REG	\
3294       && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3295
3296  if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3297    {
3298      n_occurrences++;
3299      return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3300    }
3301
3302  /* If X and FROM are the same register but different modes, they will
3303     not have been seen as equal above.  However, flow.c will make a
3304     LOG_LINKS entry for that case.  If we do nothing, we will try to
3305     rerecognize our original insn and, when it succeeds, we will
3306     delete the feeding insn, which is incorrect.
3307
3308     So force this insn not to match in this (rare) case.  */
3309  if (! in_dest && code == REG && GET_CODE (from) == REG
3310      && REGNO (x) == REGNO (from))
3311    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
3312
3313  /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3314     of which may contain things that can be combined.  */
3315  if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3316    return x;
3317
3318  /* It is possible to have a subexpression appear twice in the insn.
3319     Suppose that FROM is a register that appears within TO.
3320     Then, after that subexpression has been scanned once by `subst',
3321     the second time it is scanned, TO may be found.  If we were
3322     to scan TO here, we would find FROM within it and create a
3323     self-referent rtl structure which is completely wrong.  */
3324  if (COMBINE_RTX_EQUAL_P (x, to))
3325    return to;
3326
3327  /* Parallel asm_operands need special attention because all of the
3328     inputs are shared across the arms.  Furthermore, unsharing the
3329     rtl results in recognition failures.  Failure to handle this case
3330     specially can result in circular rtl.
3331
3332     Solve this by doing a normal pass across the first entry of the
3333     parallel, and only processing the SET_DESTs of the subsequent
3334     entries.  Ug.  */
3335
3336  if (code == PARALLEL
3337      && GET_CODE (XVECEXP (x, 0, 0)) == SET
3338      && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
3339    {
3340      new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3341
3342      /* If this substitution failed, this whole thing fails.  */
3343      if (GET_CODE (new) == CLOBBER
3344	  && XEXP (new, 0) == const0_rtx)
3345	return new;
3346
3347      SUBST (XVECEXP (x, 0, 0), new);
3348
3349      for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
3350	{
3351	  rtx dest = SET_DEST (XVECEXP (x, 0, i));
3352
3353	  if (GET_CODE (dest) != REG
3354	      && GET_CODE (dest) != CC0
3355	      && GET_CODE (dest) != PC)
3356	    {
3357	      new = subst (dest, from, to, 0, unique_copy);
3358
3359	      /* If this substitution failed, this whole thing fails.  */
3360	      if (GET_CODE (new) == CLOBBER
3361		  && XEXP (new, 0) == const0_rtx)
3362		return new;
3363
3364	      SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
3365	    }
3366	}
3367    }
3368  else
3369    {
3370      len = GET_RTX_LENGTH (code);
3371      fmt = GET_RTX_FORMAT (code);
3372
3373      /* We don't need to process a SET_DEST that is a register, CC0,
3374	 or PC, so set up to skip this common case.  All other cases
3375	 where we want to suppress replacing something inside a
3376	 SET_SRC are handled via the IN_DEST operand.  */
3377      if (code == SET
3378	  && (GET_CODE (SET_DEST (x)) == REG
3379	      || GET_CODE (SET_DEST (x)) == CC0
3380	      || GET_CODE (SET_DEST (x)) == PC))
3381	fmt = "ie";
3382
3383      /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3384	 constant.  */
3385      if (fmt[0] == 'e')
3386	op0_mode = GET_MODE (XEXP (x, 0));
3387
3388      for (i = 0; i < len; i++)
3389	{
3390	  if (fmt[i] == 'E')
3391	    {
3392	      int j;
3393	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3394		{
3395		  if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3396		    {
3397		      new = (unique_copy && n_occurrences
3398			     ? copy_rtx (to) : to);
3399		      n_occurrences++;
3400		    }
3401		  else
3402		    {
3403		      new = subst (XVECEXP (x, i, j), from, to, 0,
3404				   unique_copy);
3405
3406		      /* If this substitution failed, this whole thing
3407			 fails.  */
3408		      if (GET_CODE (new) == CLOBBER
3409			  && XEXP (new, 0) == const0_rtx)
3410			return new;
3411		    }
3412
3413		  SUBST (XVECEXP (x, i, j), new);
3414		}
3415	    }
3416	  else if (fmt[i] == 'e')
3417	    {
3418	      /* If this is a register being set, ignore it.  */
3419	      new = XEXP (x, i);
3420	      if (in_dest
3421		  && i == 0
3422		  && (((code == SUBREG || code == ZERO_EXTRACT)
3423		       && GET_CODE (new) == REG)
3424		      || code == STRICT_LOW_PART))
3425		;
3426
3427	      else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3428		{
3429		  /* In general, don't install a subreg involving two
3430		     modes not tieable.  It can worsen register
3431		     allocation, and can even make invalid reload
3432		     insns, since the reg inside may need to be copied
3433		     from in the outside mode, and that may be invalid
3434		     if it is an fp reg copied in integer mode.
3435
3436		     We allow two exceptions to this: It is valid if
3437		     it is inside another SUBREG and the mode of that
3438		     SUBREG and the mode of the inside of TO is
3439		     tieable and it is valid if X is a SET that copies
3440		     FROM to CC0.  */
3441
3442		  if (GET_CODE (to) == SUBREG
3443		      && ! MODES_TIEABLE_P (GET_MODE (to),
3444					    GET_MODE (SUBREG_REG (to)))
3445		      && ! (code == SUBREG
3446			    && MODES_TIEABLE_P (GET_MODE (x),
3447						GET_MODE (SUBREG_REG (to))))
3448#ifdef HAVE_cc0
3449		      && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3450#endif
3451		      )
3452		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3453
3454#ifdef CANNOT_CHANGE_MODE_CLASS
3455		  if (code == SUBREG
3456		      && GET_CODE (to) == REG
3457		      && REGNO (to) < FIRST_PSEUDO_REGISTER
3458		      && REG_CANNOT_CHANGE_MODE_P (REGNO (to),
3459						   GET_MODE (to),
3460						   GET_MODE (x)))
3461		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3462#endif
3463
3464		  new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3465		  n_occurrences++;
3466		}
3467	      else
3468		/* If we are in a SET_DEST, suppress most cases unless we
3469		   have gone inside a MEM, in which case we want to
3470		   simplify the address.  We assume here that things that
3471		   are actually part of the destination have their inner
3472		   parts in the first expression.  This is true for SUBREG,
3473		   STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3474		   things aside from REG and MEM that should appear in a
3475		   SET_DEST.  */
3476		new = subst (XEXP (x, i), from, to,
3477			     (((in_dest
3478				&& (code == SUBREG || code == STRICT_LOW_PART
3479				    || code == ZERO_EXTRACT))
3480			       || code == SET)
3481			      && i == 0), unique_copy);
3482
3483	      /* If we found that we will have to reject this combination,
3484		 indicate that by returning the CLOBBER ourselves, rather than
3485		 an expression containing it.  This will speed things up as
3486		 well as prevent accidents where two CLOBBERs are considered
3487		 to be equal, thus producing an incorrect simplification.  */
3488
3489	      if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3490		return new;
3491
3492	      if (GET_CODE (x) == SUBREG
3493		  && (GET_CODE (new) == CONST_INT
3494		      || GET_CODE (new) == CONST_DOUBLE))
3495		{
3496		  enum machine_mode mode = GET_MODE (x);
3497
3498		  x = simplify_subreg (GET_MODE (x), new,
3499				       GET_MODE (SUBREG_REG (x)),
3500				       SUBREG_BYTE (x));
3501		  if (! x)
3502		    x = gen_rtx_CLOBBER (mode, const0_rtx);
3503		}
3504	      else if (GET_CODE (new) == CONST_INT
3505		       && GET_CODE (x) == ZERO_EXTEND)
3506		{
3507		  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
3508						new, GET_MODE (XEXP (x, 0)));
3509		  if (! x)
3510		    abort ();
3511		}
3512	      else
3513		SUBST (XEXP (x, i), new);
3514	    }
3515	}
3516    }
3517
3518  /* Try to simplify X.  If the simplification changed the code, it is likely
3519     that further simplification will help, so loop, but limit the number
3520     of repetitions that will be performed.  */
3521
3522  for (i = 0; i < 4; i++)
3523    {
3524      /* If X is sufficiently simple, don't bother trying to do anything
3525	 with it.  */
3526      if (code != CONST_INT && code != REG && code != CLOBBER)
3527	x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest);
3528
3529      if (GET_CODE (x) == code)
3530	break;
3531
3532      code = GET_CODE (x);
3533
3534      /* We no longer know the original mode of operand 0 since we
3535	 have changed the form of X)  */
3536      op0_mode = VOIDmode;
3537    }
3538
3539  return x;
3540}
3541
3542/* Simplify X, a piece of RTL.  We just operate on the expression at the
3543   outer level; call `subst' to simplify recursively.  Return the new
3544   expression.
3545
3546   OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3547   will be the iteration even if an expression with a code different from
3548   X is returned; IN_DEST is nonzero if we are inside a SET_DEST.  */
3549
3550static rtx
3551combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int last,
3552		      int in_dest)
3553{
3554  enum rtx_code code = GET_CODE (x);
3555  enum machine_mode mode = GET_MODE (x);
3556  rtx temp;
3557  rtx reversed;
3558  int i;
3559
3560  /* If this is a commutative operation, put a constant last and a complex
3561     expression first.  We don't need to do this for comparisons here.  */
3562  if (GET_RTX_CLASS (code) == 'c'
3563      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
3564    {
3565      temp = XEXP (x, 0);
3566      SUBST (XEXP (x, 0), XEXP (x, 1));
3567      SUBST (XEXP (x, 1), temp);
3568    }
3569
3570  /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3571     sign extension of a PLUS with a constant, reverse the order of the sign
3572     extension and the addition. Note that this not the same as the original
3573     code, but overflow is undefined for signed values.  Also note that the
3574     PLUS will have been partially moved "inside" the sign-extension, so that
3575     the first operand of X will really look like:
3576         (ashiftrt (plus (ashift A C4) C5) C4).
3577     We convert this to
3578         (plus (ashiftrt (ashift A C4) C2) C4)
3579     and replace the first operand of X with that expression.  Later parts
3580     of this function may simplify the expression further.
3581
3582     For example, if we start with (mult (sign_extend (plus A C1)) C2),
3583     we swap the SIGN_EXTEND and PLUS.  Later code will apply the
3584     distributive law to produce (plus (mult (sign_extend X) C1) C3).
3585
3586     We do this to simplify address expressions.  */
3587
3588  if ((code == PLUS || code == MINUS || code == MULT)
3589      && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3590      && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3591      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3592      && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3593      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3594      && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3595      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3596      && (temp = simplify_binary_operation (ASHIFTRT, mode,
3597					    XEXP (XEXP (XEXP (x, 0), 0), 1),
3598					    XEXP (XEXP (x, 0), 1))) != 0)
3599    {
3600      rtx new
3601	= simplify_shift_const (NULL_RTX, ASHIFT, mode,
3602				XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3603				INTVAL (XEXP (XEXP (x, 0), 1)));
3604
3605      new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3606				  INTVAL (XEXP (XEXP (x, 0), 1)));
3607
3608      SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3609    }
3610
3611  /* If this is a simple operation applied to an IF_THEN_ELSE, try
3612     applying it to the arms of the IF_THEN_ELSE.  This often simplifies
3613     things.  Check for cases where both arms are testing the same
3614     condition.
3615
3616     Don't do anything if all operands are very simple.  */
3617
3618  if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3619	|| GET_RTX_CLASS (code) == '<')
3620       && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3621	    && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3622		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3623		      == 'o')))
3624	   || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3625	       && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3626		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3627			 == 'o')))))
3628      || (GET_RTX_CLASS (code) == '1'
3629	  && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3630	       && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3631		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3632			 == 'o'))))))
3633    {
3634      rtx cond, true_rtx, false_rtx;
3635
3636      cond = if_then_else_cond (x, &true_rtx, &false_rtx);
3637      if (cond != 0
3638	  /* If everything is a comparison, what we have is highly unlikely
3639	     to be simpler, so don't use it.  */
3640	  && ! (GET_RTX_CLASS (code) == '<'
3641		&& (GET_RTX_CLASS (GET_CODE (true_rtx)) == '<'
3642		    || GET_RTX_CLASS (GET_CODE (false_rtx)) == '<')))
3643	{
3644	  rtx cop1 = const0_rtx;
3645	  enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3646
3647	  if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3648	    return x;
3649
3650	  /* Simplify the alternative arms; this may collapse the true and
3651	     false arms to store-flag values.  Be careful to use copy_rtx
3652	     here since true_rtx or false_rtx might share RTL with x as a
3653	     result of the if_then_else_cond call above.  */
3654	  true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx, 0, 0);
3655	  false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx, 0, 0);
3656
3657	  /* If true_rtx and false_rtx are not general_operands, an if_then_else
3658	     is unlikely to be simpler.  */
3659	  if (general_operand (true_rtx, VOIDmode)
3660	      && general_operand (false_rtx, VOIDmode))
3661	    {
3662	      enum rtx_code reversed;
3663
3664	      /* Restarting if we generate a store-flag expression will cause
3665		 us to loop.  Just drop through in this case.  */
3666
3667	      /* If the result values are STORE_FLAG_VALUE and zero, we can
3668		 just make the comparison operation.  */
3669	      if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
3670		x = gen_binary (cond_code, mode, cond, cop1);
3671	      else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
3672		       && ((reversed = reversed_comparison_code_parts
3673					(cond_code, cond, cop1, NULL))
3674		           != UNKNOWN))
3675		x = gen_binary (reversed, mode, cond, cop1);
3676
3677	      /* Likewise, we can make the negate of a comparison operation
3678		 if the result values are - STORE_FLAG_VALUE and zero.  */
3679	      else if (GET_CODE (true_rtx) == CONST_INT
3680		       && INTVAL (true_rtx) == - STORE_FLAG_VALUE
3681		       && false_rtx == const0_rtx)
3682		x = simplify_gen_unary (NEG, mode,
3683					gen_binary (cond_code, mode, cond,
3684						    cop1),
3685					mode);
3686	      else if (GET_CODE (false_rtx) == CONST_INT
3687		       && INTVAL (false_rtx) == - STORE_FLAG_VALUE
3688		       && true_rtx == const0_rtx
3689		       && ((reversed = reversed_comparison_code_parts
3690					(cond_code, cond, cop1, NULL))
3691		           != UNKNOWN))
3692		x = simplify_gen_unary (NEG, mode,
3693					gen_binary (reversed, mode,
3694						    cond, cop1),
3695					mode);
3696	      else
3697		return gen_rtx_IF_THEN_ELSE (mode,
3698					     gen_binary (cond_code, VOIDmode,
3699							 cond, cop1),
3700					     true_rtx, false_rtx);
3701
3702	      code = GET_CODE (x);
3703	      op0_mode = VOIDmode;
3704	    }
3705	}
3706    }
3707
3708  /* Try to fold this expression in case we have constants that weren't
3709     present before.  */
3710  temp = 0;
3711  switch (GET_RTX_CLASS (code))
3712    {
3713    case '1':
3714      if (op0_mode == VOIDmode)
3715	op0_mode = GET_MODE (XEXP (x, 0));
3716      temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3717      break;
3718    case '<':
3719      if (! VECTOR_MODE_P (mode))
3720	{
3721	  enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
3722	  if (cmp_mode == VOIDmode)
3723	    {
3724	      cmp_mode = GET_MODE (XEXP (x, 1));
3725	      if (cmp_mode == VOIDmode)
3726		cmp_mode = op0_mode;
3727	    }
3728	  temp = simplify_relational_operation (code, cmp_mode,
3729						XEXP (x, 0), XEXP (x, 1));
3730#ifdef FLOAT_STORE_FLAG_VALUE
3731	  if (temp != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3732	    {
3733	      if (temp == const0_rtx)
3734		temp = CONST0_RTX (mode);
3735	      else
3736		temp = CONST_DOUBLE_FROM_REAL_VALUE
3737			 (FLOAT_STORE_FLAG_VALUE (mode), mode);
3738	    }
3739#endif
3740	}
3741      break;
3742    case 'c':
3743    case '2':
3744      temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3745      break;
3746    case 'b':
3747    case '3':
3748      temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3749					 XEXP (x, 1), XEXP (x, 2));
3750      break;
3751    }
3752
3753  if (temp)
3754    {
3755      x = temp;
3756      code = GET_CODE (temp);
3757      op0_mode = VOIDmode;
3758      mode = GET_MODE (temp);
3759    }
3760
3761  /* First see if we can apply the inverse distributive law.  */
3762  if (code == PLUS || code == MINUS
3763      || code == AND || code == IOR || code == XOR)
3764    {
3765      x = apply_distributive_law (x);
3766      code = GET_CODE (x);
3767      op0_mode = VOIDmode;
3768    }
3769
3770  /* If CODE is an associative operation not otherwise handled, see if we
3771     can associate some operands.  This can win if they are constants or
3772     if they are logically related (i.e. (a & b) & a).  */
3773  if ((code == PLUS || code == MINUS || code == MULT || code == DIV
3774       || code == AND || code == IOR || code == XOR
3775       || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3776      && ((INTEGRAL_MODE_P (mode) && code != DIV)
3777	  || (flag_unsafe_math_optimizations && FLOAT_MODE_P (mode))))
3778    {
3779      if (GET_CODE (XEXP (x, 0)) == code)
3780	{
3781	  rtx other = XEXP (XEXP (x, 0), 0);
3782	  rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3783	  rtx inner_op1 = XEXP (x, 1);
3784	  rtx inner;
3785
3786	  /* Make sure we pass the constant operand if any as the second
3787	     one if this is a commutative operation.  */
3788	  if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3789	    {
3790	      rtx tem = inner_op0;
3791	      inner_op0 = inner_op1;
3792	      inner_op1 = tem;
3793	    }
3794	  inner = simplify_binary_operation (code == MINUS ? PLUS
3795					     : code == DIV ? MULT
3796					     : code,
3797					     mode, inner_op0, inner_op1);
3798
3799	  /* For commutative operations, try the other pair if that one
3800	     didn't simplify.  */
3801	  if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3802	    {
3803	      other = XEXP (XEXP (x, 0), 1);
3804	      inner = simplify_binary_operation (code, mode,
3805						 XEXP (XEXP (x, 0), 0),
3806						 XEXP (x, 1));
3807	    }
3808
3809	  if (inner)
3810	    return gen_binary (code, mode, other, inner);
3811	}
3812    }
3813
3814  /* A little bit of algebraic simplification here.  */
3815  switch (code)
3816    {
3817    case MEM:
3818      /* Ensure that our address has any ASHIFTs converted to MULT in case
3819	 address-recognizing predicates are called later.  */
3820      temp = make_compound_operation (XEXP (x, 0), MEM);
3821      SUBST (XEXP (x, 0), temp);
3822      break;
3823
3824    case SUBREG:
3825      if (op0_mode == VOIDmode)
3826	op0_mode = GET_MODE (SUBREG_REG (x));
3827
3828      /* simplify_subreg can't use gen_lowpart_for_combine.  */
3829      if (CONSTANT_P (SUBREG_REG (x))
3830	  && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
3831	     /* Don't call gen_lowpart_for_combine if the inner mode
3832		is VOIDmode and we cannot simplify it, as SUBREG without
3833		inner mode is invalid.  */
3834	  && (GET_MODE (SUBREG_REG (x)) != VOIDmode
3835	      || gen_lowpart_common (mode, SUBREG_REG (x))))
3836	return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3837
3838      if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
3839        break;
3840      {
3841	rtx temp;
3842	temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
3843				SUBREG_BYTE (x));
3844	if (temp)
3845	  return temp;
3846      }
3847
3848      /* Don't change the mode of the MEM if that would change the meaning
3849	 of the address.  */
3850      if (GET_CODE (SUBREG_REG (x)) == MEM
3851	  && (MEM_VOLATILE_P (SUBREG_REG (x))
3852	      || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
3853	return gen_rtx_CLOBBER (mode, const0_rtx);
3854
3855      /* Note that we cannot do any narrowing for non-constants since
3856	 we might have been counting on using the fact that some bits were
3857	 zero.  We now do this in the SET.  */
3858
3859      break;
3860
3861    case NOT:
3862      if (GET_CODE (XEXP (x, 0)) == SUBREG
3863	  && subreg_lowpart_p (XEXP (x, 0))
3864	  && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3865	      < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3866	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3867	  && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3868	{
3869	  enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3870
3871	  x = gen_rtx_ROTATE (inner_mode,
3872			      simplify_gen_unary (NOT, inner_mode, const1_rtx,
3873						  inner_mode),
3874			      XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3875	  return gen_lowpart_for_combine (mode, x);
3876	}
3877
3878      /* Apply De Morgan's laws to reduce number of patterns for machines
3879	 with negating logical insns (and-not, nand, etc.).  If result has
3880	 only one NOT, put it first, since that is how the patterns are
3881	 coded.  */
3882
3883      if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3884	{
3885	  rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3886	  enum machine_mode op_mode;
3887
3888	  op_mode = GET_MODE (in1);
3889	  in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
3890
3891	  op_mode = GET_MODE (in2);
3892	  if (op_mode == VOIDmode)
3893	    op_mode = mode;
3894	  in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
3895
3896	  if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
3897	    {
3898	      rtx tem = in2;
3899	      in2 = in1; in1 = tem;
3900	    }
3901
3902	  return gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3903				 mode, in1, in2);
3904	}
3905      break;
3906
3907    case NEG:
3908      /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1.  */
3909      if (GET_CODE (XEXP (x, 0)) == XOR
3910	  && XEXP (XEXP (x, 0), 1) == const1_rtx
3911	  && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3912	return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3913
3914      temp = expand_compound_operation (XEXP (x, 0));
3915
3916      /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3917	 replaced by (lshiftrt X C).  This will convert
3918	 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y).  */
3919
3920      if (GET_CODE (temp) == ASHIFTRT
3921	  && GET_CODE (XEXP (temp, 1)) == CONST_INT
3922	  && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3923	return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3924				     INTVAL (XEXP (temp, 1)));
3925
3926      /* If X has only a single bit that might be nonzero, say, bit I, convert
3927	 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3928	 MODE minus 1.  This will convert (neg (zero_extract X 1 Y)) to
3929	 (sign_extract X 1 Y).  But only do this if TEMP isn't a register
3930	 or a SUBREG of one since we'd be making the expression more
3931	 complex if it was just a register.  */
3932
3933      if (GET_CODE (temp) != REG
3934	  && ! (GET_CODE (temp) == SUBREG
3935		&& GET_CODE (SUBREG_REG (temp)) == REG)
3936	  && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
3937	{
3938	  rtx temp1 = simplify_shift_const
3939	    (NULL_RTX, ASHIFTRT, mode,
3940	     simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3941				   GET_MODE_BITSIZE (mode) - 1 - i),
3942	     GET_MODE_BITSIZE (mode) - 1 - i);
3943
3944	  /* If all we did was surround TEMP with the two shifts, we
3945	     haven't improved anything, so don't use it.  Otherwise,
3946	     we are better off with TEMP1.  */
3947	  if (GET_CODE (temp1) != ASHIFTRT
3948	      || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3949	      || XEXP (XEXP (temp1, 0), 0) != temp)
3950	    return temp1;
3951	}
3952      break;
3953
3954    case TRUNCATE:
3955      /* We can't handle truncation to a partial integer mode here
3956	 because we don't know the real bitsize of the partial
3957	 integer mode.  */
3958      if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3959	break;
3960
3961      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3962	  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3963				    GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
3964	SUBST (XEXP (x, 0),
3965	       force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3966			      GET_MODE_MASK (mode), NULL_RTX, 0));
3967
3968      /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI.  */
3969      if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3970	   || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3971	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3972	return XEXP (XEXP (x, 0), 0);
3973
3974      /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3975	 (OP:SI foo:SI) if OP is NEG or ABS.  */
3976      if ((GET_CODE (XEXP (x, 0)) == ABS
3977	   || GET_CODE (XEXP (x, 0)) == NEG)
3978	  && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3979	      || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
3980	  && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3981	return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
3982				   XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
3983
3984      /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
3985	 (truncate:SI x).  */
3986      if (GET_CODE (XEXP (x, 0)) == SUBREG
3987	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
3988	  && subreg_lowpart_p (XEXP (x, 0)))
3989	return SUBREG_REG (XEXP (x, 0));
3990
3991      /* If we know that the value is already truncated, we can
3992         replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
3993         is nonzero for the corresponding modes.  But don't do this
3994         for an (LSHIFTRT (MULT ...)) since this will cause problems
3995         with the umulXi3_highpart patterns.  */
3996      if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3997				 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
3998	  && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3999	     >= (unsigned int) (GET_MODE_BITSIZE (mode) + 1)
4000	  && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4001		&& GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
4002	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4003
4004      /* A truncate of a comparison can be replaced with a subreg if
4005         STORE_FLAG_VALUE permits.  This is like the previous test,
4006         but it works even if the comparison is done in a mode larger
4007         than HOST_BITS_PER_WIDE_INT.  */
4008      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4009	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4010	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
4011	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4012
4013      /* Similarly, a truncate of a register whose value is a
4014         comparison can be replaced with a subreg if STORE_FLAG_VALUE
4015         permits.  */
4016      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4017	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
4018	  && (temp = get_last_value (XEXP (x, 0)))
4019	  && GET_RTX_CLASS (GET_CODE (temp)) == '<')
4020	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4021
4022      break;
4023
4024    case FLOAT_TRUNCATE:
4025      /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF.  */
4026      if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4027	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4028	return XEXP (XEXP (x, 0), 0);
4029
4030      /* (float_truncate:SF (float_truncate:DF foo:XF))
4031         = (float_truncate:SF foo:XF).
4032	 This may eliminate double rounding, so it is unsafe.
4033
4034         (float_truncate:SF (float_extend:XF foo:DF))
4035         = (float_truncate:SF foo:DF).
4036
4037         (float_truncate:DF (float_extend:XF foo:SF))
4038         = (float_extend:SF foo:DF).  */
4039      if ((GET_CODE (XEXP (x, 0)) == FLOAT_TRUNCATE
4040	   && flag_unsafe_math_optimizations)
4041	  || GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND)
4042	return simplify_gen_unary (GET_MODE_SIZE (GET_MODE (XEXP (XEXP (x, 0),
4043							    0)))
4044				   > GET_MODE_SIZE (mode)
4045				   ? FLOAT_TRUNCATE : FLOAT_EXTEND,
4046				   mode,
4047				   XEXP (XEXP (x, 0), 0), mode);
4048
4049      /*  (float_truncate (float x)) is (float x)  */
4050      if (GET_CODE (XEXP (x, 0)) == FLOAT
4051	  && (flag_unsafe_math_optimizations
4052	      || ((unsigned)significand_size (GET_MODE (XEXP (x, 0)))
4053		  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
4054		      - num_sign_bit_copies (XEXP (XEXP (x, 0), 0),
4055					     GET_MODE (XEXP (XEXP (x, 0), 0)))))))
4056	return simplify_gen_unary (FLOAT, mode,
4057				   XEXP (XEXP (x, 0), 0),
4058				   GET_MODE (XEXP (XEXP (x, 0), 0)));
4059
4060      /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4061	 (OP:SF foo:SF) if OP is NEG or ABS.  */
4062      if ((GET_CODE (XEXP (x, 0)) == ABS
4063	   || GET_CODE (XEXP (x, 0)) == NEG)
4064	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4065	  && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4066	return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4067				   XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4068
4069      /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4070	 is (float_truncate:SF x).  */
4071      if (GET_CODE (XEXP (x, 0)) == SUBREG
4072	  && subreg_lowpart_p (XEXP (x, 0))
4073	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4074	return SUBREG_REG (XEXP (x, 0));
4075      break;
4076    case FLOAT_EXTEND:
4077      /*  (float_extend (float_extend x)) is (float_extend x)
4078
4079	  (float_extend (float x)) is (float x) assuming that double
4080	  rounding can't happen.
4081          */
4082      if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4083	  || (GET_CODE (XEXP (x, 0)) == FLOAT
4084	      && ((unsigned)significand_size (GET_MODE (XEXP (x, 0)))
4085		  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
4086		      - num_sign_bit_copies (XEXP (XEXP (x, 0), 0),
4087					     GET_MODE (XEXP (XEXP (x, 0), 0)))))))
4088	return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4089				   XEXP (XEXP (x, 0), 0),
4090				   GET_MODE (XEXP (XEXP (x, 0), 0)));
4091
4092      break;
4093#ifdef HAVE_cc0
4094    case COMPARE:
4095      /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4096	 using cc0, in which case we want to leave it as a COMPARE
4097	 so we can distinguish it from a register-register-copy.  */
4098      if (XEXP (x, 1) == const0_rtx)
4099	return XEXP (x, 0);
4100
4101      /* x - 0 is the same as x unless x's mode has signed zeros and
4102	 allows rounding towards -infinity.  Under those conditions,
4103	 0 - 0 is -0.  */
4104      if (!(HONOR_SIGNED_ZEROS (GET_MODE (XEXP (x, 0)))
4105	    && HONOR_SIGN_DEPENDENT_ROUNDING (GET_MODE (XEXP (x, 0))))
4106	  && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4107	return XEXP (x, 0);
4108      break;
4109#endif
4110
4111    case CONST:
4112      /* (const (const X)) can become (const X).  Do it this way rather than
4113	 returning the inner CONST since CONST can be shared with a
4114	 REG_EQUAL note.  */
4115      if (GET_CODE (XEXP (x, 0)) == CONST)
4116	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4117      break;
4118
4119#ifdef HAVE_lo_sum
4120    case LO_SUM:
4121      /* Convert (lo_sum (high FOO) FOO) to FOO.  This is necessary so we
4122	 can add in an offset.  find_split_point will split this address up
4123	 again if it doesn't match.  */
4124      if (GET_CODE (XEXP (x, 0)) == HIGH
4125	  && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4126	return XEXP (x, 1);
4127      break;
4128#endif
4129
4130    case PLUS:
4131      /* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)).
4132       */
4133      if (GET_CODE (XEXP (x, 0)) == MULT
4134	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == NEG)
4135	{
4136	  rtx in1, in2;
4137
4138	  in1 = XEXP (XEXP (XEXP (x, 0), 0), 0);
4139	  in2 = XEXP (XEXP (x, 0), 1);
4140	  return gen_binary (MINUS, mode, XEXP (x, 1),
4141			     gen_binary (MULT, mode, in1, in2));
4142	}
4143
4144      /* If we have (plus (plus (A const) B)), associate it so that CONST is
4145	 outermost.  That's because that's the way indexed addresses are
4146	 supposed to appear.  This code used to check many more cases, but
4147	 they are now checked elsewhere.  */
4148      if (GET_CODE (XEXP (x, 0)) == PLUS
4149	  && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4150	return gen_binary (PLUS, mode,
4151			   gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4152				       XEXP (x, 1)),
4153			   XEXP (XEXP (x, 0), 1));
4154
4155      /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4156	 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4157	 bit-field and can be replaced by either a sign_extend or a
4158	 sign_extract.  The `and' may be a zero_extend and the two
4159	 <c>, -<c> constants may be reversed.  */
4160      if (GET_CODE (XEXP (x, 0)) == XOR
4161	  && GET_CODE (XEXP (x, 1)) == CONST_INT
4162	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4163	  && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
4164	  && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4165	      || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4166	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4167	  && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4168	       && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4169	       && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4170		   == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
4171	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4172		  && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
4173		      == (unsigned int) i + 1))))
4174	return simplify_shift_const
4175	  (NULL_RTX, ASHIFTRT, mode,
4176	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
4177				 XEXP (XEXP (XEXP (x, 0), 0), 0),
4178				 GET_MODE_BITSIZE (mode) - (i + 1)),
4179	   GET_MODE_BITSIZE (mode) - (i + 1));
4180
4181      /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4182	 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4183	 is 1.  This produces better code than the alternative immediately
4184	 below.  */
4185      if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4186	  && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
4187	      || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))
4188	  && (reversed = reversed_comparison (XEXP (x, 0), mode,
4189					      XEXP (XEXP (x, 0), 0),
4190					      XEXP (XEXP (x, 0), 1))))
4191	return
4192	  simplify_gen_unary (NEG, mode, reversed, mode);
4193
4194      /* If only the low-order bit of X is possibly nonzero, (plus x -1)
4195	 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4196	 the bitsize of the mode - 1.  This allows simplification of
4197	 "a = (b & 8) == 0;"  */
4198      if (XEXP (x, 1) == constm1_rtx
4199	  && GET_CODE (XEXP (x, 0)) != REG
4200	  && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4201		&& GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
4202	  && nonzero_bits (XEXP (x, 0), mode) == 1)
4203	return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4204	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
4205				 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
4206				 GET_MODE_BITSIZE (mode) - 1),
4207	   GET_MODE_BITSIZE (mode) - 1);
4208
4209      /* If we are adding two things that have no bits in common, convert
4210	 the addition into an IOR.  This will often be further simplified,
4211	 for example in cases like ((a & 1) + (a & 2)), which can
4212	 become a & 3.  */
4213
4214      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4215	  && (nonzero_bits (XEXP (x, 0), mode)
4216	      & nonzero_bits (XEXP (x, 1), mode)) == 0)
4217	{
4218	  /* Try to simplify the expression further.  */
4219	  rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4220	  temp = combine_simplify_rtx (tor, mode, last, in_dest);
4221
4222	  /* If we could, great.  If not, do not go ahead with the IOR
4223	     replacement, since PLUS appears in many special purpose
4224	     address arithmetic instructions.  */
4225	  if (GET_CODE (temp) != CLOBBER && temp != tor)
4226	    return temp;
4227	}
4228      break;
4229
4230    case MINUS:
4231      /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4232	 by reversing the comparison code if valid.  */
4233      if (STORE_FLAG_VALUE == 1
4234	  && XEXP (x, 0) == const1_rtx
4235	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
4236	  && (reversed = reversed_comparison (XEXP (x, 1), mode,
4237					      XEXP (XEXP (x, 1), 0),
4238					      XEXP (XEXP (x, 1), 1))))
4239	return reversed;
4240
4241      /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4242	 (and <foo> (const_int pow2-1))  */
4243      if (GET_CODE (XEXP (x, 1)) == AND
4244	  && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4245	  && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4246	  && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4247	return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4248				       -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4249
4250      /* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A).
4251       */
4252      if (GET_CODE (XEXP (x, 1)) == MULT
4253	  && GET_CODE (XEXP (XEXP (x, 1), 0)) == NEG)
4254	{
4255	  rtx in1, in2;
4256
4257	  in1 = XEXP (XEXP (XEXP (x, 1), 0), 0);
4258	  in2 = XEXP (XEXP (x, 1), 1);
4259	  return gen_binary (PLUS, mode, gen_binary (MULT, mode, in1, in2),
4260			     XEXP (x, 0));
4261	}
4262
4263      /* Canonicalize (minus (neg A) (mult B C)) to
4264	 (minus (mult (neg B) C) A).  */
4265      if (GET_CODE (XEXP (x, 1)) == MULT
4266	  && GET_CODE (XEXP (x, 0)) == NEG)
4267	{
4268	  rtx in1, in2;
4269
4270	  in1 = simplify_gen_unary (NEG, mode, XEXP (XEXP (x, 1), 0), mode);
4271	  in2 = XEXP (XEXP (x, 1), 1);
4272	  return gen_binary (MINUS, mode, gen_binary (MULT, mode, in1, in2),
4273			     XEXP (XEXP (x, 0), 0));
4274	}
4275
4276      /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4277	 integers.  */
4278      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
4279	return gen_binary (MINUS, mode,
4280			   gen_binary (MINUS, mode, XEXP (x, 0),
4281				       XEXP (XEXP (x, 1), 0)),
4282			   XEXP (XEXP (x, 1), 1));
4283      break;
4284
4285    case MULT:
4286      /* If we have (mult (plus A B) C), apply the distributive law and then
4287	 the inverse distributive law to see if things simplify.  This
4288	 occurs mostly in addresses, often when unrolling loops.  */
4289
4290      if (GET_CODE (XEXP (x, 0)) == PLUS)
4291	{
4292	  x = apply_distributive_law
4293	    (gen_binary (PLUS, mode,
4294			 gen_binary (MULT, mode,
4295				     XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4296			 gen_binary (MULT, mode,
4297				     XEXP (XEXP (x, 0), 1),
4298				     copy_rtx (XEXP (x, 1)))));
4299
4300	  if (GET_CODE (x) != MULT)
4301	    return x;
4302	}
4303      /* Try simplify a*(b/c) as (a*b)/c.  */
4304      if (FLOAT_MODE_P (mode) && flag_unsafe_math_optimizations
4305	  && GET_CODE (XEXP (x, 0)) == DIV)
4306	{
4307	  rtx tem = simplify_binary_operation (MULT, mode,
4308					       XEXP (XEXP (x, 0), 0),
4309					       XEXP (x, 1));
4310	  if (tem)
4311	    return gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
4312	}
4313      break;
4314
4315    case UDIV:
4316      /* If this is a divide by a power of two, treat it as a shift if
4317	 its first operand is a shift.  */
4318      if (GET_CODE (XEXP (x, 1)) == CONST_INT
4319	  && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4320	  && (GET_CODE (XEXP (x, 0)) == ASHIFT
4321	      || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4322	      || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4323	      || GET_CODE (XEXP (x, 0)) == ROTATE
4324	      || GET_CODE (XEXP (x, 0)) == ROTATERT))
4325	return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4326      break;
4327
4328    case EQ:  case NE:
4329    case GT:  case GTU:  case GE:  case GEU:
4330    case LT:  case LTU:  case LE:  case LEU:
4331    case UNEQ:  case LTGT:
4332    case UNGT:  case UNGE:
4333    case UNLT:  case UNLE:
4334    case UNORDERED: case ORDERED:
4335      /* If the first operand is a condition code, we can't do anything
4336	 with it.  */
4337      if (GET_CODE (XEXP (x, 0)) == COMPARE
4338	  || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4339	      && ! CC0_P (XEXP (x, 0))))
4340	{
4341	  rtx op0 = XEXP (x, 0);
4342	  rtx op1 = XEXP (x, 1);
4343	  enum rtx_code new_code;
4344
4345	  if (GET_CODE (op0) == COMPARE)
4346	    op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4347
4348	  /* Simplify our comparison, if possible.  */
4349	  new_code = simplify_comparison (code, &op0, &op1);
4350
4351	  /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4352	     if only the low-order bit is possibly nonzero in X (such as when
4353	     X is a ZERO_EXTRACT of one bit).  Similarly, we can convert EQ to
4354	     (xor X 1) or (minus 1 X); we use the former.  Finally, if X is
4355	     known to be either 0 or -1, NE becomes a NEG and EQ becomes
4356	     (plus X 1).
4357
4358	     Remove any ZERO_EXTRACT we made when thinking this was a
4359	     comparison.  It may now be simpler to use, e.g., an AND.  If a
4360	     ZERO_EXTRACT is indeed appropriate, it will be placed back by
4361	     the call to make_compound_operation in the SET case.  */
4362
4363	  if (STORE_FLAG_VALUE == 1
4364	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4365	      && op1 == const0_rtx
4366	      && mode == GET_MODE (op0)
4367	      && nonzero_bits (op0, mode) == 1)
4368	    return gen_lowpart_for_combine (mode,
4369					    expand_compound_operation (op0));
4370
4371	  else if (STORE_FLAG_VALUE == 1
4372		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4373		   && op1 == const0_rtx
4374		   && mode == GET_MODE (op0)
4375		   && (num_sign_bit_copies (op0, mode)
4376		       == GET_MODE_BITSIZE (mode)))
4377	    {
4378	      op0 = expand_compound_operation (op0);
4379	      return simplify_gen_unary (NEG, mode,
4380					 gen_lowpart_for_combine (mode, op0),
4381					 mode);
4382	    }
4383
4384	  else if (STORE_FLAG_VALUE == 1
4385		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4386		   && op1 == const0_rtx
4387		   && mode == GET_MODE (op0)
4388		   && nonzero_bits (op0, mode) == 1)
4389	    {
4390	      op0 = expand_compound_operation (op0);
4391	      return gen_binary (XOR, mode,
4392				 gen_lowpart_for_combine (mode, op0),
4393				 const1_rtx);
4394	    }
4395
4396	  else if (STORE_FLAG_VALUE == 1
4397		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4398		   && op1 == const0_rtx
4399		   && mode == GET_MODE (op0)
4400		   && (num_sign_bit_copies (op0, mode)
4401		       == GET_MODE_BITSIZE (mode)))
4402	    {
4403	      op0 = expand_compound_operation (op0);
4404	      return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
4405	    }
4406
4407	  /* If STORE_FLAG_VALUE is -1, we have cases similar to
4408	     those above.  */
4409	  if (STORE_FLAG_VALUE == -1
4410	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4411	      && op1 == const0_rtx
4412	      && (num_sign_bit_copies (op0, mode)
4413		  == GET_MODE_BITSIZE (mode)))
4414	    return gen_lowpart_for_combine (mode,
4415					    expand_compound_operation (op0));
4416
4417	  else if (STORE_FLAG_VALUE == -1
4418		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4419		   && op1 == const0_rtx
4420		   && mode == GET_MODE (op0)
4421		   && nonzero_bits (op0, mode) == 1)
4422	    {
4423	      op0 = expand_compound_operation (op0);
4424	      return simplify_gen_unary (NEG, mode,
4425					 gen_lowpart_for_combine (mode, op0),
4426					 mode);
4427	    }
4428
4429	  else if (STORE_FLAG_VALUE == -1
4430		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4431		   && op1 == const0_rtx
4432		   && mode == GET_MODE (op0)
4433		   && (num_sign_bit_copies (op0, mode)
4434		       == GET_MODE_BITSIZE (mode)))
4435	    {
4436	      op0 = expand_compound_operation (op0);
4437	      return simplify_gen_unary (NOT, mode,
4438					 gen_lowpart_for_combine (mode, op0),
4439					 mode);
4440	    }
4441
4442	  /* If X is 0/1, (eq X 0) is X-1.  */
4443	  else if (STORE_FLAG_VALUE == -1
4444		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4445		   && op1 == const0_rtx
4446		   && mode == GET_MODE (op0)
4447		   && nonzero_bits (op0, mode) == 1)
4448	    {
4449	      op0 = expand_compound_operation (op0);
4450	      return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
4451	    }
4452
4453	  /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4454	     one bit that might be nonzero, we can convert (ne x 0) to
4455	     (ashift x c) where C puts the bit in the sign bit.  Remove any
4456	     AND with STORE_FLAG_VALUE when we are done, since we are only
4457	     going to test the sign bit.  */
4458	  if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4459	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4460	      && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4461		  == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4462	      && op1 == const0_rtx
4463	      && mode == GET_MODE (op0)
4464	      && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4465	    {
4466	      x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4467					expand_compound_operation (op0),
4468					GET_MODE_BITSIZE (mode) - 1 - i);
4469	      if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4470		return XEXP (x, 0);
4471	      else
4472		return x;
4473	    }
4474
4475	  /* If the code changed, return a whole new comparison.  */
4476	  if (new_code != code)
4477	    return gen_rtx_fmt_ee (new_code, mode, op0, op1);
4478
4479	  /* Otherwise, keep this operation, but maybe change its operands.
4480	     This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR).  */
4481	  SUBST (XEXP (x, 0), op0);
4482	  SUBST (XEXP (x, 1), op1);
4483	}
4484      break;
4485
4486    case IF_THEN_ELSE:
4487      return simplify_if_then_else (x);
4488
4489    case ZERO_EXTRACT:
4490    case SIGN_EXTRACT:
4491    case ZERO_EXTEND:
4492    case SIGN_EXTEND:
4493      /* If we are processing SET_DEST, we are done.  */
4494      if (in_dest)
4495	return x;
4496
4497      return expand_compound_operation (x);
4498
4499    case SET:
4500      return simplify_set (x);
4501
4502    case AND:
4503    case IOR:
4504    case XOR:
4505      return simplify_logical (x, last);
4506
4507    case ABS:
4508      /* (abs (neg <foo>)) -> (abs <foo>) */
4509      if (GET_CODE (XEXP (x, 0)) == NEG)
4510	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4511
4512      /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4513         do nothing.  */
4514      if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4515	break;
4516
4517      /* If operand is something known to be positive, ignore the ABS.  */
4518      if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4519	  || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4520	       <= HOST_BITS_PER_WIDE_INT)
4521	      && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4522		   & ((HOST_WIDE_INT) 1
4523		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4524		  == 0)))
4525	return XEXP (x, 0);
4526
4527      /* If operand is known to be only -1 or 0, convert ABS to NEG.  */
4528      if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4529	return gen_rtx_NEG (mode, XEXP (x, 0));
4530
4531      break;
4532
4533    case FFS:
4534      /* (ffs (*_extend <X>)) = (ffs <X>) */
4535      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4536	  || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4537	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4538      break;
4539
4540    case POPCOUNT:
4541    case PARITY:
4542      /* (pop* (zero_extend <X>)) = (pop* <X>) */
4543      if (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4544	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4545      break;
4546
4547    case FLOAT:
4548      /* (float (sign_extend <X>)) = (float <X>).  */
4549      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4550	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4551      break;
4552
4553    case ASHIFT:
4554    case LSHIFTRT:
4555    case ASHIFTRT:
4556    case ROTATE:
4557    case ROTATERT:
4558      /* If this is a shift by a constant amount, simplify it.  */
4559      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4560	return simplify_shift_const (x, code, mode, XEXP (x, 0),
4561				     INTVAL (XEXP (x, 1)));
4562
4563      else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4564	SUBST (XEXP (x, 1),
4565	       force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
4566			      ((HOST_WIDE_INT) 1
4567			       << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4568			      - 1,
4569			      NULL_RTX, 0));
4570      break;
4571
4572    case VEC_SELECT:
4573      {
4574	rtx op0 = XEXP (x, 0);
4575	rtx op1 = XEXP (x, 1);
4576	int len;
4577
4578	if (GET_CODE (op1) != PARALLEL)
4579	  abort ();
4580	len = XVECLEN (op1, 0);
4581	if (len == 1
4582	    && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
4583	    && GET_CODE (op0) == VEC_CONCAT)
4584	  {
4585	    int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
4586
4587	    /* Try to find the element in the VEC_CONCAT.  */
4588	    for (;;)
4589	      {
4590		if (GET_MODE (op0) == GET_MODE (x))
4591		  return op0;
4592		if (GET_CODE (op0) == VEC_CONCAT)
4593		  {
4594		    HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
4595		    if (op0_size < offset)
4596		      op0 = XEXP (op0, 0);
4597		    else
4598		      {
4599			offset -= op0_size;
4600			op0 = XEXP (op0, 1);
4601		      }
4602		  }
4603		else
4604		  break;
4605	      }
4606	  }
4607      }
4608
4609      break;
4610
4611    default:
4612      break;
4613    }
4614
4615  return x;
4616}
4617
4618/* Simplify X, an IF_THEN_ELSE expression.  Return the new expression.  */
4619
4620static rtx
4621simplify_if_then_else (rtx x)
4622{
4623  enum machine_mode mode = GET_MODE (x);
4624  rtx cond = XEXP (x, 0);
4625  rtx true_rtx = XEXP (x, 1);
4626  rtx false_rtx = XEXP (x, 2);
4627  enum rtx_code true_code = GET_CODE (cond);
4628  int comparison_p = GET_RTX_CLASS (true_code) == '<';
4629  rtx temp;
4630  int i;
4631  enum rtx_code false_code;
4632  rtx reversed;
4633
4634  /* Simplify storing of the truth value.  */
4635  if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
4636    return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4637
4638  /* Also when the truth value has to be reversed.  */
4639  if (comparison_p
4640      && true_rtx == const0_rtx && false_rtx == const_true_rtx
4641      && (reversed = reversed_comparison (cond, mode, XEXP (cond, 0),
4642					  XEXP (cond, 1))))
4643    return reversed;
4644
4645  /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4646     in it is being compared against certain values.  Get the true and false
4647     comparisons and see if that says anything about the value of each arm.  */
4648
4649  if (comparison_p
4650      && ((false_code = combine_reversed_comparison_code (cond))
4651	  != UNKNOWN)
4652      && GET_CODE (XEXP (cond, 0)) == REG)
4653    {
4654      HOST_WIDE_INT nzb;
4655      rtx from = XEXP (cond, 0);
4656      rtx true_val = XEXP (cond, 1);
4657      rtx false_val = true_val;
4658      int swapped = 0;
4659
4660      /* If FALSE_CODE is EQ, swap the codes and arms.  */
4661
4662      if (false_code == EQ)
4663	{
4664	  swapped = 1, true_code = EQ, false_code = NE;
4665	  temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4666	}
4667
4668      /* If we are comparing against zero and the expression being tested has
4669	 only a single bit that might be nonzero, that is its value when it is
4670	 not equal to zero.  Similarly if it is known to be -1 or 0.  */
4671
4672      if (true_code == EQ && true_val == const0_rtx
4673	  && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4674	false_code = EQ, false_val = GEN_INT (nzb);
4675      else if (true_code == EQ && true_val == const0_rtx
4676	       && (num_sign_bit_copies (from, GET_MODE (from))
4677		   == GET_MODE_BITSIZE (GET_MODE (from))))
4678	false_code = EQ, false_val = constm1_rtx;
4679
4680      /* Now simplify an arm if we know the value of the register in the
4681	 branch and it is used in the arm.  Be careful due to the potential
4682	 of locally-shared RTL.  */
4683
4684      if (reg_mentioned_p (from, true_rtx))
4685	true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
4686				      from, true_val),
4687		      pc_rtx, pc_rtx, 0, 0);
4688      if (reg_mentioned_p (from, false_rtx))
4689	false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
4690				   from, false_val),
4691		       pc_rtx, pc_rtx, 0, 0);
4692
4693      SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
4694      SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
4695
4696      true_rtx = XEXP (x, 1);
4697      false_rtx = XEXP (x, 2);
4698      true_code = GET_CODE (cond);
4699    }
4700
4701  /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4702     reversed, do so to avoid needing two sets of patterns for
4703     subtract-and-branch insns.  Similarly if we have a constant in the true
4704     arm, the false arm is the same as the first operand of the comparison, or
4705     the false arm is more complicated than the true arm.  */
4706
4707  if (comparison_p
4708      && combine_reversed_comparison_code (cond) != UNKNOWN
4709      && (true_rtx == pc_rtx
4710	  || (CONSTANT_P (true_rtx)
4711	      && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx)
4712	  || true_rtx == const0_rtx
4713	  || (GET_RTX_CLASS (GET_CODE (true_rtx)) == 'o'
4714	      && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4715	  || (GET_CODE (true_rtx) == SUBREG
4716	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true_rtx))) == 'o'
4717	      && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4718	  || reg_mentioned_p (true_rtx, false_rtx)
4719	  || rtx_equal_p (false_rtx, XEXP (cond, 0))))
4720    {
4721      true_code = reversed_comparison_code (cond, NULL);
4722      SUBST (XEXP (x, 0),
4723	     reversed_comparison (cond, GET_MODE (cond), XEXP (cond, 0),
4724				  XEXP (cond, 1)));
4725
4726      SUBST (XEXP (x, 1), false_rtx);
4727      SUBST (XEXP (x, 2), true_rtx);
4728
4729      temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4730      cond = XEXP (x, 0);
4731
4732      /* It is possible that the conditional has been simplified out.  */
4733      true_code = GET_CODE (cond);
4734      comparison_p = GET_RTX_CLASS (true_code) == '<';
4735    }
4736
4737  /* If the two arms are identical, we don't need the comparison.  */
4738
4739  if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
4740    return true_rtx;
4741
4742  /* Convert a == b ? b : a to "a".  */
4743  if (true_code == EQ && ! side_effects_p (cond)
4744      && !HONOR_NANS (mode)
4745      && rtx_equal_p (XEXP (cond, 0), false_rtx)
4746      && rtx_equal_p (XEXP (cond, 1), true_rtx))
4747    return false_rtx;
4748  else if (true_code == NE && ! side_effects_p (cond)
4749	   && !HONOR_NANS (mode)
4750	   && rtx_equal_p (XEXP (cond, 0), true_rtx)
4751	   && rtx_equal_p (XEXP (cond, 1), false_rtx))
4752    return true_rtx;
4753
4754  /* Look for cases where we have (abs x) or (neg (abs X)).  */
4755
4756  if (GET_MODE_CLASS (mode) == MODE_INT
4757      && GET_CODE (false_rtx) == NEG
4758      && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
4759      && comparison_p
4760      && rtx_equal_p (true_rtx, XEXP (cond, 0))
4761      && ! side_effects_p (true_rtx))
4762    switch (true_code)
4763      {
4764      case GT:
4765      case GE:
4766	return simplify_gen_unary (ABS, mode, true_rtx, mode);
4767      case LT:
4768      case LE:
4769	return
4770	  simplify_gen_unary (NEG, mode,
4771			      simplify_gen_unary (ABS, mode, true_rtx, mode),
4772			      mode);
4773      default:
4774	break;
4775      }
4776
4777  /* Look for MIN or MAX.  */
4778
4779  if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
4780      && comparison_p
4781      && rtx_equal_p (XEXP (cond, 0), true_rtx)
4782      && rtx_equal_p (XEXP (cond, 1), false_rtx)
4783      && ! side_effects_p (cond))
4784    switch (true_code)
4785      {
4786      case GE:
4787      case GT:
4788	return gen_binary (SMAX, mode, true_rtx, false_rtx);
4789      case LE:
4790      case LT:
4791	return gen_binary (SMIN, mode, true_rtx, false_rtx);
4792      case GEU:
4793      case GTU:
4794	return gen_binary (UMAX, mode, true_rtx, false_rtx);
4795      case LEU:
4796      case LTU:
4797	return gen_binary (UMIN, mode, true_rtx, false_rtx);
4798      default:
4799	break;
4800      }
4801
4802  /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4803     second operand is zero, this can be done as (OP Z (mult COND C2)) where
4804     C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4805     SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4806     We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4807     neither 1 or -1, but it isn't worth checking for.  */
4808
4809  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4810      && comparison_p
4811      && GET_MODE_CLASS (mode) == MODE_INT
4812      && ! side_effects_p (x))
4813    {
4814      rtx t = make_compound_operation (true_rtx, SET);
4815      rtx f = make_compound_operation (false_rtx, SET);
4816      rtx cond_op0 = XEXP (cond, 0);
4817      rtx cond_op1 = XEXP (cond, 1);
4818      enum rtx_code op = NIL, extend_op = NIL;
4819      enum machine_mode m = mode;
4820      rtx z = 0, c1 = NULL_RTX;
4821
4822      if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4823	   || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4824	   || GET_CODE (t) == ASHIFT
4825	   || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4826	  && rtx_equal_p (XEXP (t, 0), f))
4827	c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4828
4829      /* If an identity-zero op is commutative, check whether there
4830	 would be a match if we swapped the operands.  */
4831      else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4832		|| GET_CODE (t) == XOR)
4833	       && rtx_equal_p (XEXP (t, 1), f))
4834	c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4835      else if (GET_CODE (t) == SIGN_EXTEND
4836	       && (GET_CODE (XEXP (t, 0)) == PLUS
4837		   || GET_CODE (XEXP (t, 0)) == MINUS
4838		   || GET_CODE (XEXP (t, 0)) == IOR
4839		   || GET_CODE (XEXP (t, 0)) == XOR
4840		   || GET_CODE (XEXP (t, 0)) == ASHIFT
4841		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4842		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4843	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4844	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4845	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4846	       && (num_sign_bit_copies (f, GET_MODE (f))
4847		   > (unsigned int)
4848		     (GET_MODE_BITSIZE (mode)
4849		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4850	{
4851	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4852	  extend_op = SIGN_EXTEND;
4853	  m = GET_MODE (XEXP (t, 0));
4854	}
4855      else if (GET_CODE (t) == SIGN_EXTEND
4856	       && (GET_CODE (XEXP (t, 0)) == PLUS
4857		   || GET_CODE (XEXP (t, 0)) == IOR
4858		   || GET_CODE (XEXP (t, 0)) == XOR)
4859	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4860	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4861	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4862	       && (num_sign_bit_copies (f, GET_MODE (f))
4863		   > (unsigned int)
4864		     (GET_MODE_BITSIZE (mode)
4865		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4866	{
4867	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4868	  extend_op = SIGN_EXTEND;
4869	  m = GET_MODE (XEXP (t, 0));
4870	}
4871      else if (GET_CODE (t) == ZERO_EXTEND
4872	       && (GET_CODE (XEXP (t, 0)) == PLUS
4873		   || GET_CODE (XEXP (t, 0)) == MINUS
4874		   || GET_CODE (XEXP (t, 0)) == IOR
4875		   || GET_CODE (XEXP (t, 0)) == XOR
4876		   || GET_CODE (XEXP (t, 0)) == ASHIFT
4877		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4878		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4879	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4880	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4881	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4882	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4883	       && ((nonzero_bits (f, GET_MODE (f))
4884		    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4885		   == 0))
4886	{
4887	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4888	  extend_op = ZERO_EXTEND;
4889	  m = GET_MODE (XEXP (t, 0));
4890	}
4891      else if (GET_CODE (t) == ZERO_EXTEND
4892	       && (GET_CODE (XEXP (t, 0)) == PLUS
4893		   || GET_CODE (XEXP (t, 0)) == IOR
4894		   || GET_CODE (XEXP (t, 0)) == XOR)
4895	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4896	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4897	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4898	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4899	       && ((nonzero_bits (f, GET_MODE (f))
4900		    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4901		   == 0))
4902	{
4903	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4904	  extend_op = ZERO_EXTEND;
4905	  m = GET_MODE (XEXP (t, 0));
4906	}
4907
4908      if (z)
4909	{
4910	  temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4911			pc_rtx, pc_rtx, 0, 0);
4912	  temp = gen_binary (MULT, m, temp,
4913			     gen_binary (MULT, m, c1, const_true_rtx));
4914	  temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4915	  temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4916
4917	  if (extend_op != NIL)
4918	    temp = simplify_gen_unary (extend_op, mode, temp, m);
4919
4920	  return temp;
4921	}
4922    }
4923
4924  /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4925     1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4926     negation of a single bit, we can convert this operation to a shift.  We
4927     can actually do this more generally, but it doesn't seem worth it.  */
4928
4929  if (true_code == NE && XEXP (cond, 1) == const0_rtx
4930      && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
4931      && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4932	   && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
4933	  || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4934	       == GET_MODE_BITSIZE (mode))
4935	      && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
4936    return
4937      simplify_shift_const (NULL_RTX, ASHIFT, mode,
4938			    gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
4939
4940  /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8.  */
4941  if (true_code == NE && XEXP (cond, 1) == const0_rtx
4942      && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
4943      && GET_MODE (XEXP (cond, 0)) == mode
4944      && (INTVAL (true_rtx) & GET_MODE_MASK (mode))
4945	  == nonzero_bits (XEXP (cond, 0), mode)
4946      && (i = exact_log2 (INTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0)
4947    return XEXP (cond, 0);
4948
4949  return x;
4950}
4951
4952/* Simplify X, a SET expression.  Return the new expression.  */
4953
4954static rtx
4955simplify_set (rtx x)
4956{
4957  rtx src = SET_SRC (x);
4958  rtx dest = SET_DEST (x);
4959  enum machine_mode mode
4960    = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4961  rtx other_insn;
4962  rtx *cc_use;
4963
4964  /* (set (pc) (return)) gets written as (return).  */
4965  if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4966    return src;
4967
4968  /* Now that we know for sure which bits of SRC we are using, see if we can
4969     simplify the expression for the object knowing that we only need the
4970     low-order bits.  */
4971
4972  if (GET_MODE_CLASS (mode) == MODE_INT
4973      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4974    {
4975      src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
4976      SUBST (SET_SRC (x), src);
4977    }
4978
4979  /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4980     the comparison result and try to simplify it unless we already have used
4981     undobuf.other_insn.  */
4982  if ((GET_MODE_CLASS (mode) == MODE_CC
4983       || GET_CODE (src) == COMPARE
4984       || CC0_P (dest))
4985      && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4986      && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4987      && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4988      && rtx_equal_p (XEXP (*cc_use, 0), dest))
4989    {
4990      enum rtx_code old_code = GET_CODE (*cc_use);
4991      enum rtx_code new_code;
4992      rtx op0, op1, tmp;
4993      int other_changed = 0;
4994      enum machine_mode compare_mode = GET_MODE (dest);
4995      enum machine_mode tmp_mode;
4996
4997      if (GET_CODE (src) == COMPARE)
4998	op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4999      else
5000	op0 = src, op1 = const0_rtx;
5001
5002      /* Check whether the comparison is known at compile time.  */
5003      if (GET_MODE (op0) != VOIDmode)
5004	tmp_mode = GET_MODE (op0);
5005      else if (GET_MODE (op1) != VOIDmode)
5006	tmp_mode = GET_MODE (op1);
5007      else
5008	tmp_mode = compare_mode;
5009      tmp = simplify_relational_operation (old_code, tmp_mode, op0, op1);
5010      if (tmp != NULL_RTX)
5011	{
5012	  rtx pat = PATTERN (other_insn);
5013	  undobuf.other_insn = other_insn;
5014	  SUBST (*cc_use, tmp);
5015
5016	  /* Attempt to simplify CC user.  */
5017	  if (GET_CODE (pat) == SET)
5018	    {
5019	      rtx new = simplify_rtx (SET_SRC (pat));
5020	      if (new != NULL_RTX)
5021		SUBST (SET_SRC (pat), new);
5022	    }
5023
5024	  /* Convert X into a no-op move.  */
5025	  SUBST (SET_DEST (x), pc_rtx);
5026	  SUBST (SET_SRC (x), pc_rtx);
5027	  return x;
5028	}
5029
5030      /* Simplify our comparison, if possible.  */
5031      new_code = simplify_comparison (old_code, &op0, &op1);
5032
5033#ifdef SELECT_CC_MODE
5034      /* If this machine has CC modes other than CCmode, check to see if we
5035	 need to use a different CC mode here.  */
5036      compare_mode = SELECT_CC_MODE (new_code, op0, op1);
5037
5038#ifndef HAVE_cc0
5039      /* If the mode changed, we have to change SET_DEST, the mode in the
5040	 compare, and the mode in the place SET_DEST is used.  If SET_DEST is
5041	 a hard register, just build new versions with the proper mode.  If it
5042	 is a pseudo, we lose unless it is only time we set the pseudo, in
5043	 which case we can safely change its mode.  */
5044      if (compare_mode != GET_MODE (dest))
5045	{
5046	  unsigned int regno = REGNO (dest);
5047	  rtx new_dest = gen_rtx_REG (compare_mode, regno);
5048
5049	  if (regno < FIRST_PSEUDO_REGISTER
5050	      || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
5051	    {
5052	      if (regno >= FIRST_PSEUDO_REGISTER)
5053		SUBST (regno_reg_rtx[regno], new_dest);
5054
5055	      SUBST (SET_DEST (x), new_dest);
5056	      SUBST (XEXP (*cc_use, 0), new_dest);
5057	      other_changed = 1;
5058
5059	      dest = new_dest;
5060	    }
5061	}
5062#endif  /* cc0 */
5063#endif  /* SELECT_CC_MODE */
5064
5065      /* If the code changed, we have to build a new comparison in
5066	 undobuf.other_insn.  */
5067      if (new_code != old_code)
5068	{
5069	  int other_changed_previously = other_changed;
5070	  unsigned HOST_WIDE_INT mask;
5071
5072	  SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
5073					  dest, const0_rtx));
5074	  other_changed = 1;
5075
5076	  /* If the only change we made was to change an EQ into an NE or
5077	     vice versa, OP0 has only one bit that might be nonzero, and OP1
5078	     is zero, check if changing the user of the condition code will
5079	     produce a valid insn.  If it won't, we can keep the original code
5080	     in that insn by surrounding our operation with an XOR.  */
5081
5082	  if (((old_code == NE && new_code == EQ)
5083	       || (old_code == EQ && new_code == NE))
5084	      && ! other_changed_previously && op1 == const0_rtx
5085	      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
5086	      && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
5087	    {
5088	      rtx pat = PATTERN (other_insn), note = 0;
5089
5090	      if ((recog_for_combine (&pat, other_insn, &note) < 0
5091		   && ! check_asm_operands (pat)))
5092		{
5093		  PUT_CODE (*cc_use, old_code);
5094		  other_changed = 0;
5095
5096		  op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
5097		}
5098	    }
5099	}
5100
5101      if (other_changed)
5102	undobuf.other_insn = other_insn;
5103
5104#ifdef HAVE_cc0
5105      /* If we are now comparing against zero, change our source if
5106	 needed.  If we do not use cc0, we always have a COMPARE.  */
5107      if (op1 == const0_rtx && dest == cc0_rtx)
5108	{
5109	  SUBST (SET_SRC (x), op0);
5110	  src = op0;
5111	}
5112      else
5113#endif
5114
5115      /* Otherwise, if we didn't previously have a COMPARE in the
5116	 correct mode, we need one.  */
5117      if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5118	{
5119	  SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
5120	  src = SET_SRC (x);
5121	}
5122      else
5123	{
5124	  /* Otherwise, update the COMPARE if needed.  */
5125	  SUBST (XEXP (src, 0), op0);
5126	  SUBST (XEXP (src, 1), op1);
5127	}
5128    }
5129  else
5130    {
5131      /* Get SET_SRC in a form where we have placed back any
5132	 compound expressions.  Then do the checks below.  */
5133      src = make_compound_operation (src, SET);
5134      SUBST (SET_SRC (x), src);
5135    }
5136
5137  /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5138     and X being a REG or (subreg (reg)), we may be able to convert this to
5139     (set (subreg:m2 x) (op)).
5140
5141     We can always do this if M1 is narrower than M2 because that means that
5142     we only care about the low bits of the result.
5143
5144     However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
5145     perform a narrower operation than requested since the high-order bits will
5146     be undefined.  On machine where it is defined, this transformation is safe
5147     as long as M1 and M2 have the same number of words.  */
5148
5149  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5150      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
5151      && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5152	   / UNITS_PER_WORD)
5153	  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5154	       + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
5155#ifndef WORD_REGISTER_OPERATIONS
5156      && (GET_MODE_SIZE (GET_MODE (src))
5157        < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5158#endif
5159#ifdef CANNOT_CHANGE_MODE_CLASS
5160      && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
5161	    && REG_CANNOT_CHANGE_MODE_P (REGNO (dest),
5162					 GET_MODE (SUBREG_REG (src)),
5163					 GET_MODE (src)))
5164#endif
5165      && (GET_CODE (dest) == REG
5166	  || (GET_CODE (dest) == SUBREG
5167	      && GET_CODE (SUBREG_REG (dest)) == REG)))
5168    {
5169      SUBST (SET_DEST (x),
5170	     gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
5171				      dest));
5172      SUBST (SET_SRC (x), SUBREG_REG (src));
5173
5174      src = SET_SRC (x), dest = SET_DEST (x);
5175    }
5176
5177#ifdef HAVE_cc0
5178  /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
5179     in SRC.  */
5180  if (dest == cc0_rtx
5181      && GET_CODE (src) == SUBREG
5182      && subreg_lowpart_p (src)
5183      && (GET_MODE_BITSIZE (GET_MODE (src))
5184	  < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src)))))
5185    {
5186      rtx inner = SUBREG_REG (src);
5187      enum machine_mode inner_mode = GET_MODE (inner);
5188
5189      /* Here we make sure that we don't have a sign bit on.  */
5190      if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT
5191	  && (nonzero_bits (inner, inner_mode)
5192	      < ((unsigned HOST_WIDE_INT) 1
5193		 << (GET_MODE_BITSIZE (GET_MODE (src)) - 1))))
5194	{
5195	  SUBST (SET_SRC (x), inner);
5196	  src = SET_SRC (x);
5197	}
5198    }
5199#endif
5200
5201#ifdef LOAD_EXTEND_OP
5202  /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5203     would require a paradoxical subreg.  Replace the subreg with a
5204     zero_extend to avoid the reload that would otherwise be required.  */
5205
5206  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5207      && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
5208      && SUBREG_BYTE (src) == 0
5209      && (GET_MODE_SIZE (GET_MODE (src))
5210	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5211      && GET_CODE (SUBREG_REG (src)) == MEM)
5212    {
5213      SUBST (SET_SRC (x),
5214	     gen_rtx (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
5215		      GET_MODE (src), SUBREG_REG (src)));
5216
5217      src = SET_SRC (x);
5218    }
5219#endif
5220
5221  /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5222     are comparing an item known to be 0 or -1 against 0, use a logical
5223     operation instead. Check for one of the arms being an IOR of the other
5224     arm with some value.  We compute three terms to be IOR'ed together.  In
5225     practice, at most two will be nonzero.  Then we do the IOR's.  */
5226
5227  if (GET_CODE (dest) != PC
5228      && GET_CODE (src) == IF_THEN_ELSE
5229      && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
5230      && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5231      && XEXP (XEXP (src, 0), 1) == const0_rtx
5232      && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
5233#ifdef HAVE_conditional_move
5234      && ! can_conditionally_move_p (GET_MODE (src))
5235#endif
5236      && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5237			       GET_MODE (XEXP (XEXP (src, 0), 0)))
5238	  == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5239      && ! side_effects_p (src))
5240    {
5241      rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
5242		      ? XEXP (src, 1) : XEXP (src, 2));
5243      rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
5244		   ? XEXP (src, 2) : XEXP (src, 1));
5245      rtx term1 = const0_rtx, term2, term3;
5246
5247      if (GET_CODE (true_rtx) == IOR
5248	  && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
5249	term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
5250      else if (GET_CODE (true_rtx) == IOR
5251	       && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
5252	term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
5253      else if (GET_CODE (false_rtx) == IOR
5254	       && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
5255	term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
5256      else if (GET_CODE (false_rtx) == IOR
5257	       && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
5258	term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
5259
5260      term2 = gen_binary (AND, GET_MODE (src),
5261			  XEXP (XEXP (src, 0), 0), true_rtx);
5262      term3 = gen_binary (AND, GET_MODE (src),
5263			  simplify_gen_unary (NOT, GET_MODE (src),
5264					      XEXP (XEXP (src, 0), 0),
5265					      GET_MODE (src)),
5266			  false_rtx);
5267
5268      SUBST (SET_SRC (x),
5269	     gen_binary (IOR, GET_MODE (src),
5270			 gen_binary (IOR, GET_MODE (src), term1, term2),
5271			 term3));
5272
5273      src = SET_SRC (x);
5274    }
5275
5276  /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5277     whole thing fail.  */
5278  if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5279    return src;
5280  else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5281    return dest;
5282  else
5283    /* Convert this into a field assignment operation, if possible.  */
5284    return make_field_assignment (x);
5285}
5286
5287/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5288   result.  LAST is nonzero if this is the last retry.  */
5289
5290static rtx
5291simplify_logical (rtx x, int last)
5292{
5293  enum machine_mode mode = GET_MODE (x);
5294  rtx op0 = XEXP (x, 0);
5295  rtx op1 = XEXP (x, 1);
5296  rtx reversed;
5297
5298  switch (GET_CODE (x))
5299    {
5300    case AND:
5301      /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
5302	 insn (and may simplify more).  */
5303      if (GET_CODE (op0) == XOR
5304	  && rtx_equal_p (XEXP (op0, 0), op1)
5305	  && ! side_effects_p (op1))
5306	x = gen_binary (AND, mode,
5307			simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5308			op1);
5309
5310      if (GET_CODE (op0) == XOR
5311	  && rtx_equal_p (XEXP (op0, 1), op1)
5312	  && ! side_effects_p (op1))
5313	x = gen_binary (AND, mode,
5314			simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5315			op1);
5316
5317      /* Similarly for (~(A ^ B)) & A.  */
5318      if (GET_CODE (op0) == NOT
5319	  && GET_CODE (XEXP (op0, 0)) == XOR
5320	  && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5321	  && ! side_effects_p (op1))
5322	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5323
5324      if (GET_CODE (op0) == NOT
5325	  && GET_CODE (XEXP (op0, 0)) == XOR
5326	  && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5327	  && ! side_effects_p (op1))
5328	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5329
5330      /* We can call simplify_and_const_int only if we don't lose
5331	 any (sign) bits when converting INTVAL (op1) to
5332	 "unsigned HOST_WIDE_INT".  */
5333      if (GET_CODE (op1) == CONST_INT
5334	  && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5335	      || INTVAL (op1) > 0))
5336	{
5337	  x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
5338
5339	  /* If we have (ior (and (X C1) C2)) and the next restart would be
5340	     the last, simplify this by making C1 as small as possible
5341	     and then exit.  */
5342	  if (last
5343	      && GET_CODE (x) == IOR && GET_CODE (op0) == AND
5344	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
5345	      && GET_CODE (op1) == CONST_INT)
5346	    return gen_binary (IOR, mode,
5347			       gen_binary (AND, mode, XEXP (op0, 0),
5348					   GEN_INT (INTVAL (XEXP (op0, 1))
5349						    & ~INTVAL (op1))), op1);
5350
5351	  if (GET_CODE (x) != AND)
5352	    return x;
5353
5354	  if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
5355	      || GET_RTX_CLASS (GET_CODE (x)) == '2')
5356	    op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5357	}
5358
5359      /* Convert (A | B) & A to A.  */
5360      if (GET_CODE (op0) == IOR
5361	  && (rtx_equal_p (XEXP (op0, 0), op1)
5362	      || rtx_equal_p (XEXP (op0, 1), op1))
5363	  && ! side_effects_p (XEXP (op0, 0))
5364	  && ! side_effects_p (XEXP (op0, 1)))
5365	return op1;
5366
5367      /* In the following group of tests (and those in case IOR below),
5368	 we start with some combination of logical operations and apply
5369	 the distributive law followed by the inverse distributive law.
5370	 Most of the time, this results in no change.  However, if some of
5371	 the operands are the same or inverses of each other, simplifications
5372	 will result.
5373
5374	 For example, (and (ior A B) (not B)) can occur as the result of
5375	 expanding a bit field assignment.  When we apply the distributive
5376	 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
5377	 which then simplifies to (and (A (not B))).
5378
5379	 If we have (and (ior A B) C), apply the distributive law and then
5380	 the inverse distributive law to see if things simplify.  */
5381
5382      if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
5383	{
5384	  x = apply_distributive_law
5385	    (gen_binary (GET_CODE (op0), mode,
5386			 gen_binary (AND, mode, XEXP (op0, 0), op1),
5387			 gen_binary (AND, mode, XEXP (op0, 1),
5388				     copy_rtx (op1))));
5389	  if (GET_CODE (x) != AND)
5390	    return x;
5391	}
5392
5393      if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5394	return apply_distributive_law
5395	  (gen_binary (GET_CODE (op1), mode,
5396		       gen_binary (AND, mode, XEXP (op1, 0), op0),
5397		       gen_binary (AND, mode, XEXP (op1, 1),
5398				   copy_rtx (op0))));
5399
5400      /* Similarly, taking advantage of the fact that
5401	 (and (not A) (xor B C)) == (xor (ior A B) (ior A C))  */
5402
5403      if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
5404	return apply_distributive_law
5405	  (gen_binary (XOR, mode,
5406		       gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
5407		       gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)),
5408				   XEXP (op1, 1))));
5409
5410      else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
5411	return apply_distributive_law
5412	  (gen_binary (XOR, mode,
5413		       gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
5414		       gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1))));
5415      break;
5416
5417    case IOR:
5418      /* (ior A C) is C if all bits of A that might be nonzero are on in C.  */
5419      if (GET_CODE (op1) == CONST_INT
5420	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5421	  && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
5422	return op1;
5423
5424      /* Convert (A & B) | A to A.  */
5425      if (GET_CODE (op0) == AND
5426	  && (rtx_equal_p (XEXP (op0, 0), op1)
5427	      || rtx_equal_p (XEXP (op0, 1), op1))
5428	  && ! side_effects_p (XEXP (op0, 0))
5429	  && ! side_effects_p (XEXP (op0, 1)))
5430	return op1;
5431
5432      /* If we have (ior (and A B) C), apply the distributive law and then
5433	 the inverse distributive law to see if things simplify.  */
5434
5435      if (GET_CODE (op0) == AND)
5436	{
5437	  x = apply_distributive_law
5438	    (gen_binary (AND, mode,
5439			 gen_binary (IOR, mode, XEXP (op0, 0), op1),
5440			 gen_binary (IOR, mode, XEXP (op0, 1),
5441				     copy_rtx (op1))));
5442
5443	  if (GET_CODE (x) != IOR)
5444	    return x;
5445	}
5446
5447      if (GET_CODE (op1) == AND)
5448	{
5449	  x = apply_distributive_law
5450	    (gen_binary (AND, mode,
5451			 gen_binary (IOR, mode, XEXP (op1, 0), op0),
5452			 gen_binary (IOR, mode, XEXP (op1, 1),
5453				     copy_rtx (op0))));
5454
5455	  if (GET_CODE (x) != IOR)
5456	    return x;
5457	}
5458
5459      /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5460	 mode size to (rotate A CX).  */
5461
5462      if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5463	   || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5464	  && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5465	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5466	  && GET_CODE (XEXP (op1, 1)) == CONST_INT
5467	  && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
5468	      == GET_MODE_BITSIZE (mode)))
5469	return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5470			       (GET_CODE (op0) == ASHIFT
5471				? XEXP (op0, 1) : XEXP (op1, 1)));
5472
5473      /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5474	 a (sign_extend (plus ...)).  If so, OP1 is a CONST_INT, and the PLUS
5475	 does not affect any of the bits in OP1, it can really be done
5476	 as a PLUS and we can associate.  We do this by seeing if OP1
5477	 can be safely shifted left C bits.  */
5478      if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5479	  && GET_CODE (XEXP (op0, 0)) == PLUS
5480	  && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5481	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5482	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5483	{
5484	  int count = INTVAL (XEXP (op0, 1));
5485	  HOST_WIDE_INT mask = INTVAL (op1) << count;
5486
5487	  if (mask >> count == INTVAL (op1)
5488	      && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5489	    {
5490	      SUBST (XEXP (XEXP (op0, 0), 1),
5491		     GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5492	      return op0;
5493	    }
5494	}
5495      break;
5496
5497    case XOR:
5498      /* If we are XORing two things that have no bits in common,
5499	 convert them into an IOR.  This helps to detect rotation encoded
5500	 using those methods and possibly other simplifications.  */
5501
5502      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5503	  && (nonzero_bits (op0, mode)
5504	      & nonzero_bits (op1, mode)) == 0)
5505	return (gen_binary (IOR, mode, op0, op1));
5506
5507      /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5508	 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5509	 (NOT y).  */
5510      {
5511	int num_negated = 0;
5512
5513	if (GET_CODE (op0) == NOT)
5514	  num_negated++, op0 = XEXP (op0, 0);
5515	if (GET_CODE (op1) == NOT)
5516	  num_negated++, op1 = XEXP (op1, 0);
5517
5518	if (num_negated == 2)
5519	  {
5520	    SUBST (XEXP (x, 0), op0);
5521	    SUBST (XEXP (x, 1), op1);
5522	  }
5523	else if (num_negated == 1)
5524	  return
5525	    simplify_gen_unary (NOT, mode, gen_binary (XOR, mode, op0, op1),
5526				mode);
5527      }
5528
5529      /* Convert (xor (and A B) B) to (and (not A) B).  The latter may
5530	 correspond to a machine insn or result in further simplifications
5531	 if B is a constant.  */
5532
5533      if (GET_CODE (op0) == AND
5534	  && rtx_equal_p (XEXP (op0, 1), op1)
5535	  && ! side_effects_p (op1))
5536	return gen_binary (AND, mode,
5537			   simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5538			   op1);
5539
5540      else if (GET_CODE (op0) == AND
5541	       && rtx_equal_p (XEXP (op0, 0), op1)
5542	       && ! side_effects_p (op1))
5543	return gen_binary (AND, mode,
5544			   simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5545			   op1);
5546
5547      /* (xor (comparison foo bar) (const_int 1)) can become the reversed
5548	 comparison if STORE_FLAG_VALUE is 1.  */
5549      if (STORE_FLAG_VALUE == 1
5550	  && op1 == const1_rtx
5551	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5552	  && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5553					      XEXP (op0, 1))))
5554	return reversed;
5555
5556      /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5557	 is (lt foo (const_int 0)), so we can perform the above
5558	 simplification if STORE_FLAG_VALUE is 1.  */
5559
5560      if (STORE_FLAG_VALUE == 1
5561	  && op1 == const1_rtx
5562	  && GET_CODE (op0) == LSHIFTRT
5563	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5564	  && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5565	return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
5566
5567      /* (xor (comparison foo bar) (const_int sign-bit))
5568	 when STORE_FLAG_VALUE is the sign bit.  */
5569      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5570	  && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5571	      == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5572	  && op1 == const_true_rtx
5573	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5574	  && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5575					      XEXP (op0, 1))))
5576	return reversed;
5577
5578      break;
5579
5580    default:
5581      abort ();
5582    }
5583
5584  return x;
5585}
5586
5587/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5588   operations" because they can be replaced with two more basic operations.
5589   ZERO_EXTEND is also considered "compound" because it can be replaced with
5590   an AND operation, which is simpler, though only one operation.
5591
5592   The function expand_compound_operation is called with an rtx expression
5593   and will convert it to the appropriate shifts and AND operations,
5594   simplifying at each stage.
5595
5596   The function make_compound_operation is called to convert an expression
5597   consisting of shifts and ANDs into the equivalent compound expression.
5598   It is the inverse of this function, loosely speaking.  */
5599
5600static rtx
5601expand_compound_operation (rtx x)
5602{
5603  unsigned HOST_WIDE_INT pos = 0, len;
5604  int unsignedp = 0;
5605  unsigned int modewidth;
5606  rtx tem;
5607
5608  switch (GET_CODE (x))
5609    {
5610    case ZERO_EXTEND:
5611      unsignedp = 1;
5612    case SIGN_EXTEND:
5613      /* We can't necessarily use a const_int for a multiword mode;
5614	 it depends on implicitly extending the value.
5615	 Since we don't know the right way to extend it,
5616	 we can't tell whether the implicit way is right.
5617
5618	 Even for a mode that is no wider than a const_int,
5619	 we can't win, because we need to sign extend one of its bits through
5620	 the rest of it, and we don't know which bit.  */
5621      if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5622	return x;
5623
5624      /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5625	 (zero_extend:MODE FROM) or (sign_extend:MODE FROM).  It is for any MEM
5626	 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5627	 reloaded. If not for that, MEM's would very rarely be safe.
5628
5629	 Reject MODEs bigger than a word, because we might not be able
5630	 to reference a two-register group starting with an arbitrary register
5631	 (and currently gen_lowpart might crash for a SUBREG).  */
5632
5633      if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5634	return x;
5635
5636      /* Reject MODEs that aren't scalar integers because turning vector
5637	 or complex modes into shifts causes problems.  */
5638
5639      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5640	return x;
5641
5642      len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5643      /* If the inner object has VOIDmode (the only way this can happen
5644	 is if it is an ASM_OPERANDS), we can't do anything since we don't
5645	 know how much masking to do.  */
5646      if (len == 0)
5647	return x;
5648
5649      break;
5650
5651    case ZERO_EXTRACT:
5652      unsignedp = 1;
5653    case SIGN_EXTRACT:
5654      /* If the operand is a CLOBBER, just return it.  */
5655      if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5656	return XEXP (x, 0);
5657
5658      if (GET_CODE (XEXP (x, 1)) != CONST_INT
5659	  || GET_CODE (XEXP (x, 2)) != CONST_INT
5660	  || GET_MODE (XEXP (x, 0)) == VOIDmode)
5661	return x;
5662
5663      /* Reject MODEs that aren't scalar integers because turning vector
5664	 or complex modes into shifts causes problems.  */
5665
5666      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5667	return x;
5668
5669      len = INTVAL (XEXP (x, 1));
5670      pos = INTVAL (XEXP (x, 2));
5671
5672      /* If this goes outside the object being extracted, replace the object
5673	 with a (use (mem ...)) construct that only combine understands
5674	 and is used only for this purpose.  */
5675      if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5676	SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
5677
5678      if (BITS_BIG_ENDIAN)
5679	pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5680
5681      break;
5682
5683    default:
5684      return x;
5685    }
5686  /* Convert sign extension to zero extension, if we know that the high
5687     bit is not set, as this is easier to optimize.  It will be converted
5688     back to cheaper alternative in make_extraction.  */
5689  if (GET_CODE (x) == SIGN_EXTEND
5690      && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5691	  && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5692		& ~(((unsigned HOST_WIDE_INT)
5693		      GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5694		     >> 1))
5695	       == 0)))
5696    {
5697      rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5698      rtx temp2 = expand_compound_operation (temp);
5699
5700      /* Make sure this is a profitable operation.  */
5701      if (rtx_cost (x, SET) > rtx_cost (temp2, SET))
5702       return temp2;
5703      else if (rtx_cost (x, SET) > rtx_cost (temp, SET))
5704       return temp;
5705      else
5706       return x;
5707    }
5708
5709  /* We can optimize some special cases of ZERO_EXTEND.  */
5710  if (GET_CODE (x) == ZERO_EXTEND)
5711    {
5712      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5713         know that the last value didn't have any inappropriate bits
5714         set.  */
5715      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5716	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5717	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5718	  && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5719	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5720	return XEXP (XEXP (x, 0), 0);
5721
5722      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5723      if (GET_CODE (XEXP (x, 0)) == SUBREG
5724	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5725	  && subreg_lowpart_p (XEXP (x, 0))
5726	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5727	  && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5728	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5729	return SUBREG_REG (XEXP (x, 0));
5730
5731      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5732         is a comparison and STORE_FLAG_VALUE permits.  This is like
5733         the first case, but it works even when GET_MODE (x) is larger
5734         than HOST_WIDE_INT.  */
5735      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5736	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5737	  && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5738	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5739	      <= HOST_BITS_PER_WIDE_INT)
5740	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5741	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5742	return XEXP (XEXP (x, 0), 0);
5743
5744      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5745      if (GET_CODE (XEXP (x, 0)) == SUBREG
5746	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5747	  && subreg_lowpart_p (XEXP (x, 0))
5748	  && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5749	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5750	      <= HOST_BITS_PER_WIDE_INT)
5751	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5752	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5753	return SUBREG_REG (XEXP (x, 0));
5754
5755    }
5756
5757  /* If we reach here, we want to return a pair of shifts.  The inner
5758     shift is a left shift of BITSIZE - POS - LEN bits.  The outer
5759     shift is a right shift of BITSIZE - LEN bits.  It is arithmetic or
5760     logical depending on the value of UNSIGNEDP.
5761
5762     If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5763     converted into an AND of a shift.
5764
5765     We must check for the case where the left shift would have a negative
5766     count.  This can happen in a case like (x >> 31) & 255 on machines
5767     that can't shift by a constant.  On those machines, we would first
5768     combine the shift with the AND to produce a variable-position
5769     extraction.  Then the constant of 31 would be substituted in to produce
5770     a such a position.  */
5771
5772  modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5773  if (modewidth + len >= pos)
5774    tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
5775				GET_MODE (x),
5776				simplify_shift_const (NULL_RTX, ASHIFT,
5777						      GET_MODE (x),
5778						      XEXP (x, 0),
5779						      modewidth - pos - len),
5780				modewidth - len);
5781
5782  else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5783    tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5784				  simplify_shift_const (NULL_RTX, LSHIFTRT,
5785							GET_MODE (x),
5786							XEXP (x, 0), pos),
5787				  ((HOST_WIDE_INT) 1 << len) - 1);
5788  else
5789    /* Any other cases we can't handle.  */
5790    return x;
5791
5792  /* If we couldn't do this for some reason, return the original
5793     expression.  */
5794  if (GET_CODE (tem) == CLOBBER)
5795    return x;
5796
5797  return tem;
5798}
5799
5800/* X is a SET which contains an assignment of one object into
5801   a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5802   or certain SUBREGS). If possible, convert it into a series of
5803   logical operations.
5804
5805   We half-heartedly support variable positions, but do not at all
5806   support variable lengths.  */
5807
5808static rtx
5809expand_field_assignment (rtx x)
5810{
5811  rtx inner;
5812  rtx pos;			/* Always counts from low bit.  */
5813  int len;
5814  rtx mask;
5815  enum machine_mode compute_mode;
5816
5817  /* Loop until we find something we can't simplify.  */
5818  while (1)
5819    {
5820      if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5821	  && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5822	{
5823	  inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5824	  len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
5825	  pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
5826	}
5827      else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5828	       && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5829	{
5830	  inner = XEXP (SET_DEST (x), 0);
5831	  len = INTVAL (XEXP (SET_DEST (x), 1));
5832	  pos = XEXP (SET_DEST (x), 2);
5833
5834	  /* If the position is constant and spans the width of INNER,
5835	     surround INNER  with a USE to indicate this.  */
5836	  if (GET_CODE (pos) == CONST_INT
5837	      && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5838	    inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
5839
5840	  if (BITS_BIG_ENDIAN)
5841	    {
5842	      if (GET_CODE (pos) == CONST_INT)
5843		pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5844			       - INTVAL (pos));
5845	      else if (GET_CODE (pos) == MINUS
5846		       && GET_CODE (XEXP (pos, 1)) == CONST_INT
5847		       && (INTVAL (XEXP (pos, 1))
5848			   == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5849		/* If position is ADJUST - X, new position is X.  */
5850		pos = XEXP (pos, 0);
5851	      else
5852		pos = gen_binary (MINUS, GET_MODE (pos),
5853				  GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5854					   - len),
5855				  pos);
5856	    }
5857	}
5858
5859      /* A SUBREG between two modes that occupy the same numbers of words
5860	 can be done by moving the SUBREG to the source.  */
5861      else if (GET_CODE (SET_DEST (x)) == SUBREG
5862	       /* We need SUBREGs to compute nonzero_bits properly.  */
5863	       && nonzero_sign_valid
5864	       && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5865		     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5866		   == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5867			+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5868	{
5869	  x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
5870			   gen_lowpart_for_combine
5871			   (GET_MODE (SUBREG_REG (SET_DEST (x))),
5872			    SET_SRC (x)));
5873	  continue;
5874	}
5875      else
5876	break;
5877
5878      while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5879	inner = SUBREG_REG (inner);
5880
5881      compute_mode = GET_MODE (inner);
5882
5883      /* Don't attempt bitwise arithmetic on non scalar integer modes.  */
5884      if (! SCALAR_INT_MODE_P (compute_mode))
5885	{
5886	  enum machine_mode imode;
5887
5888	  /* Don't do anything for vector or complex integral types.  */
5889	  if (! FLOAT_MODE_P (compute_mode))
5890	    break;
5891
5892	  /* Try to find an integral mode to pun with.  */
5893	  imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5894	  if (imode == BLKmode)
5895	    break;
5896
5897	  compute_mode = imode;
5898	  inner = gen_lowpart_for_combine (imode, inner);
5899	}
5900
5901      /* Compute a mask of LEN bits, if we can do this on the host machine.  */
5902      if (len < HOST_BITS_PER_WIDE_INT)
5903	mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
5904      else
5905	break;
5906
5907      /* Now compute the equivalent expression.  Make a copy of INNER
5908	 for the SET_DEST in case it is a MEM into which we will substitute;
5909	 we don't want shared RTL in that case.  */
5910      x = gen_rtx_SET
5911	(VOIDmode, copy_rtx (inner),
5912	 gen_binary (IOR, compute_mode,
5913		     gen_binary (AND, compute_mode,
5914				 simplify_gen_unary (NOT, compute_mode,
5915						     gen_binary (ASHIFT,
5916								 compute_mode,
5917								 mask, pos),
5918						     compute_mode),
5919				 inner),
5920		     gen_binary (ASHIFT, compute_mode,
5921				 gen_binary (AND, compute_mode,
5922					     gen_lowpart_for_combine
5923					     (compute_mode, SET_SRC (x)),
5924					     mask),
5925				 pos)));
5926    }
5927
5928  return x;
5929}
5930
5931/* Return an RTX for a reference to LEN bits of INNER.  If POS_RTX is nonzero,
5932   it is an RTX that represents a variable starting position; otherwise,
5933   POS is the (constant) starting bit position (counted from the LSB).
5934
5935   INNER may be a USE.  This will occur when we started with a bitfield
5936   that went outside the boundary of the object in memory, which is
5937   allowed on most machines.  To isolate this case, we produce a USE
5938   whose mode is wide enough and surround the MEM with it.  The only
5939   code that understands the USE is this routine.  If it is not removed,
5940   it will cause the resulting insn not to match.
5941
5942   UNSIGNEDP is nonzero for an unsigned reference and zero for a
5943   signed reference.
5944
5945   IN_DEST is nonzero if this is a reference in the destination of a
5946   SET.  This is used when a ZERO_ or SIGN_EXTRACT isn't needed.  If nonzero,
5947   a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5948   be used.
5949
5950   IN_COMPARE is nonzero if we are in a COMPARE.  This means that a
5951   ZERO_EXTRACT should be built even for bits starting at bit 0.
5952
5953   MODE is the desired mode of the result (if IN_DEST == 0).
5954
5955   The result is an RTX for the extraction or NULL_RTX if the target
5956   can't handle it.  */
5957
5958static rtx
5959make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
5960		 rtx pos_rtx, unsigned HOST_WIDE_INT len, int unsignedp,
5961		 int in_dest, int in_compare)
5962{
5963  /* This mode describes the size of the storage area
5964     to fetch the overall value from.  Within that, we
5965     ignore the POS lowest bits, etc.  */
5966  enum machine_mode is_mode = GET_MODE (inner);
5967  enum machine_mode inner_mode;
5968  enum machine_mode wanted_inner_mode = byte_mode;
5969  enum machine_mode wanted_inner_reg_mode = word_mode;
5970  enum machine_mode pos_mode = word_mode;
5971  enum machine_mode extraction_mode = word_mode;
5972  enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5973  int spans_byte = 0;
5974  rtx new = 0;
5975  rtx orig_pos_rtx = pos_rtx;
5976  HOST_WIDE_INT orig_pos;
5977
5978  /* Get some information about INNER and get the innermost object.  */
5979  if (GET_CODE (inner) == USE)
5980    /* (use:SI (mem:QI foo)) stands for (mem:SI foo).  */
5981    /* We don't need to adjust the position because we set up the USE
5982       to pretend that it was a full-word object.  */
5983    spans_byte = 1, inner = XEXP (inner, 0);
5984  else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5985    {
5986      /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5987	 consider just the QI as the memory to extract from.
5988	 The subreg adds or removes high bits; its mode is
5989	 irrelevant to the meaning of this extraction,
5990	 since POS and LEN count from the lsb.  */
5991      if (GET_CODE (SUBREG_REG (inner)) == MEM)
5992	is_mode = GET_MODE (SUBREG_REG (inner));
5993      inner = SUBREG_REG (inner);
5994    }
5995  else if (GET_CODE (inner) == ASHIFT
5996	   && GET_CODE (XEXP (inner, 1)) == CONST_INT
5997	   && pos_rtx == 0 && pos == 0
5998	   && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1)))
5999    {
6000      /* We're extracting the least significant bits of an rtx
6001	 (ashift X (const_int C)), where LEN > C.  Extract the
6002	 least significant (LEN - C) bits of X, giving an rtx
6003	 whose mode is MODE, then shift it left C times.  */
6004      new = make_extraction (mode, XEXP (inner, 0),
6005			     0, 0, len - INTVAL (XEXP (inner, 1)),
6006			     unsignedp, in_dest, in_compare);
6007      if (new != 0)
6008	return gen_rtx_ASHIFT (mode, new, XEXP (inner, 1));
6009    }
6010
6011  inner_mode = GET_MODE (inner);
6012
6013  if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
6014    pos = INTVAL (pos_rtx), pos_rtx = 0;
6015
6016  /* See if this can be done without an extraction.  We never can if the
6017     width of the field is not the same as that of some integer mode. For
6018     registers, we can only avoid the extraction if the position is at the
6019     low-order bit and this is either not in the destination or we have the
6020     appropriate STRICT_LOW_PART operation available.
6021
6022     For MEM, we can avoid an extract if the field starts on an appropriate
6023     boundary and we can change the mode of the memory reference.  However,
6024     we cannot directly access the MEM if we have a USE and the underlying
6025     MEM is not TMODE.  This combination means that MEM was being used in a
6026     context where bits outside its mode were being referenced; that is only
6027     valid in bit-field insns.  */
6028
6029  if (tmode != BLKmode
6030      && ! (spans_byte && inner_mode != tmode)
6031      && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
6032	   && GET_CODE (inner) != MEM
6033	   && (! in_dest
6034	       || (GET_CODE (inner) == REG
6035		   && have_insn_for (STRICT_LOW_PART, tmode))))
6036	  || (GET_CODE (inner) == MEM && pos_rtx == 0
6037	      && (pos
6038		  % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
6039		     : BITS_PER_UNIT)) == 0
6040	      /* We can't do this if we are widening INNER_MODE (it
6041		 may not be aligned, for one thing).  */
6042	      && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
6043	      && (inner_mode == tmode
6044		  || (! mode_dependent_address_p (XEXP (inner, 0))
6045		      && ! MEM_VOLATILE_P (inner))))))
6046    {
6047      /* If INNER is a MEM, make a new MEM that encompasses just the desired
6048	 field.  If the original and current mode are the same, we need not
6049	 adjust the offset.  Otherwise, we do if bytes big endian.
6050
6051	 If INNER is not a MEM, get a piece consisting of just the field
6052	 of interest (in this case POS % BITS_PER_WORD must be 0).  */
6053
6054      if (GET_CODE (inner) == MEM)
6055	{
6056	  HOST_WIDE_INT offset;
6057
6058	  /* POS counts from lsb, but make OFFSET count in memory order.  */
6059	  if (BYTES_BIG_ENDIAN)
6060	    offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
6061	  else
6062	    offset = pos / BITS_PER_UNIT;
6063
6064	  new = adjust_address_nv (inner, tmode, offset);
6065	}
6066      else if (GET_CODE (inner) == REG)
6067	{
6068	  if (tmode != inner_mode)
6069	    {
6070	      /* We can't call gen_lowpart_for_combine in a DEST since we
6071		 always want a SUBREG (see below) and it would sometimes
6072		 return a new hard register.  */
6073	      if (pos || in_dest)
6074		{
6075		  HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6076
6077		  if (WORDS_BIG_ENDIAN
6078		      && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6079		    final_word = ((GET_MODE_SIZE (inner_mode)
6080				   - GET_MODE_SIZE (tmode))
6081				  / UNITS_PER_WORD) - final_word;
6082
6083		  final_word *= UNITS_PER_WORD;
6084		  if (BYTES_BIG_ENDIAN &&
6085		      GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6086		    final_word += (GET_MODE_SIZE (inner_mode)
6087				   - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6088
6089		  /* Avoid creating invalid subregs, for example when
6090		     simplifying (x>>32)&255.  */
6091		  if (final_word >= GET_MODE_SIZE (inner_mode))
6092		    return NULL_RTX;
6093
6094		  new = gen_rtx_SUBREG (tmode, inner, final_word);
6095		}
6096	      else
6097		new = gen_lowpart_for_combine (tmode, inner);
6098	    }
6099	  else
6100	    new = inner;
6101	}
6102      else
6103	new = force_to_mode (inner, tmode,
6104			     len >= HOST_BITS_PER_WIDE_INT
6105			     ? ~(unsigned HOST_WIDE_INT) 0
6106			     : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6107			     NULL_RTX, 0);
6108
6109      /* If this extraction is going into the destination of a SET,
6110	 make a STRICT_LOW_PART unless we made a MEM.  */
6111
6112      if (in_dest)
6113	return (GET_CODE (new) == MEM ? new
6114		: (GET_CODE (new) != SUBREG
6115		   ? gen_rtx_CLOBBER (tmode, const0_rtx)
6116		   : gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
6117
6118      if (mode == tmode)
6119	return new;
6120
6121      if (GET_CODE (new) == CONST_INT)
6122	return gen_int_mode (INTVAL (new), mode);
6123
6124      /* If we know that no extraneous bits are set, and that the high
6125	 bit is not set, convert the extraction to the cheaper of
6126	 sign and zero extension, that are equivalent in these cases.  */
6127      if (flag_expensive_optimizations
6128	  && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6129	      && ((nonzero_bits (new, tmode)
6130		   & ~(((unsigned HOST_WIDE_INT)
6131			GET_MODE_MASK (tmode))
6132		       >> 1))
6133		  == 0)))
6134	{
6135	  rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
6136	  rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
6137
6138	  /* Prefer ZERO_EXTENSION, since it gives more information to
6139	     backends.  */
6140	  if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
6141	    return temp;
6142	  return temp1;
6143	}
6144
6145      /* Otherwise, sign- or zero-extend unless we already are in the
6146	 proper mode.  */
6147
6148      return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6149			     mode, new));
6150    }
6151
6152  /* Unless this is a COMPARE or we have a funny memory reference,
6153     don't do anything with zero-extending field extracts starting at
6154     the low-order bit since they are simple AND operations.  */
6155  if (pos_rtx == 0 && pos == 0 && ! in_dest
6156      && ! in_compare && ! spans_byte && unsignedp)
6157    return 0;
6158
6159  /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
6160     we would be spanning bytes or if the position is not a constant and the
6161     length is not 1.  In all other cases, we would only be going outside
6162     our object in cases when an original shift would have been
6163     undefined.  */
6164  if (! spans_byte && GET_CODE (inner) == MEM
6165      && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6166	  || (pos_rtx != 0 && len != 1)))
6167    return 0;
6168
6169  /* Get the mode to use should INNER not be a MEM, the mode for the position,
6170     and the mode for the result.  */
6171  if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
6172    {
6173      wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6174      pos_mode = mode_for_extraction (EP_insv, 2);
6175      extraction_mode = mode_for_extraction (EP_insv, 3);
6176    }
6177
6178  if (! in_dest && unsignedp
6179      && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
6180    {
6181      wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6182      pos_mode = mode_for_extraction (EP_extzv, 3);
6183      extraction_mode = mode_for_extraction (EP_extzv, 0);
6184    }
6185
6186  if (! in_dest && ! unsignedp
6187      && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
6188    {
6189      wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6190      pos_mode = mode_for_extraction (EP_extv, 3);
6191      extraction_mode = mode_for_extraction (EP_extv, 0);
6192    }
6193
6194  /* Never narrow an object, since that might not be safe.  */
6195
6196  if (mode != VOIDmode
6197      && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6198    extraction_mode = mode;
6199
6200  if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6201      && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6202    pos_mode = GET_MODE (pos_rtx);
6203
6204  /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6205     if we have to change the mode of memory and cannot, the desired mode is
6206     EXTRACTION_MODE.  */
6207  if (GET_CODE (inner) != MEM)
6208    wanted_inner_mode = wanted_inner_reg_mode;
6209  else if (inner_mode != wanted_inner_mode
6210	   && (mode_dependent_address_p (XEXP (inner, 0))
6211	       || MEM_VOLATILE_P (inner)))
6212    wanted_inner_mode = extraction_mode;
6213
6214  orig_pos = pos;
6215
6216  if (BITS_BIG_ENDIAN)
6217    {
6218      /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6219	 BITS_BIG_ENDIAN style.  If position is constant, compute new
6220	 position.  Otherwise, build subtraction.
6221	 Note that POS is relative to the mode of the original argument.
6222	 If it's a MEM we need to recompute POS relative to that.
6223	 However, if we're extracting from (or inserting into) a register,
6224	 we want to recompute POS relative to wanted_inner_mode.  */
6225      int width = (GET_CODE (inner) == MEM
6226		   ? GET_MODE_BITSIZE (is_mode)
6227		   : GET_MODE_BITSIZE (wanted_inner_mode));
6228
6229      if (pos_rtx == 0)
6230	pos = width - len - pos;
6231      else
6232	pos_rtx
6233	  = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6234      /* POS may be less than 0 now, but we check for that below.
6235	 Note that it can only be less than 0 if GET_CODE (inner) != MEM.  */
6236    }
6237
6238  /* If INNER has a wider mode, make it smaller.  If this is a constant
6239     extract, try to adjust the byte to point to the byte containing
6240     the value.  */
6241  if (wanted_inner_mode != VOIDmode
6242      && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6243      && ((GET_CODE (inner) == MEM
6244	   && (inner_mode == wanted_inner_mode
6245	       || (! mode_dependent_address_p (XEXP (inner, 0))
6246		   && ! MEM_VOLATILE_P (inner))))))
6247    {
6248      int offset = 0;
6249
6250      /* The computations below will be correct if the machine is big
6251	 endian in both bits and bytes or little endian in bits and bytes.
6252	 If it is mixed, we must adjust.  */
6253
6254      /* If bytes are big endian and we had a paradoxical SUBREG, we must
6255	 adjust OFFSET to compensate.  */
6256      if (BYTES_BIG_ENDIAN
6257	  && ! spans_byte
6258	  && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6259	offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6260
6261      /* If this is a constant position, we can move to the desired byte.  */
6262      if (pos_rtx == 0)
6263	{
6264	  offset += pos / BITS_PER_UNIT;
6265	  pos %= GET_MODE_BITSIZE (wanted_inner_mode);
6266	}
6267
6268      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6269	  && ! spans_byte
6270	  && is_mode != wanted_inner_mode)
6271	offset = (GET_MODE_SIZE (is_mode)
6272		  - GET_MODE_SIZE (wanted_inner_mode) - offset);
6273
6274      if (offset != 0 || inner_mode != wanted_inner_mode)
6275	inner = adjust_address_nv (inner, wanted_inner_mode, offset);
6276    }
6277
6278  /* If INNER is not memory, we can always get it into the proper mode.  If we
6279     are changing its mode, POS must be a constant and smaller than the size
6280     of the new mode.  */
6281  else if (GET_CODE (inner) != MEM)
6282    {
6283      if (GET_MODE (inner) != wanted_inner_mode
6284	  && (pos_rtx != 0
6285	      || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6286	return 0;
6287
6288      inner = force_to_mode (inner, wanted_inner_mode,
6289			     pos_rtx
6290			     || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6291			     ? ~(unsigned HOST_WIDE_INT) 0
6292			     : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6293				<< orig_pos),
6294			     NULL_RTX, 0);
6295    }
6296
6297  /* Adjust mode of POS_RTX, if needed.  If we want a wider mode, we
6298     have to zero extend.  Otherwise, we can just use a SUBREG.  */
6299  if (pos_rtx != 0
6300      && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
6301    {
6302      rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
6303
6304      /* If we know that no extraneous bits are set, and that the high
6305	 bit is not set, convert extraction to cheaper one - either
6306	 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6307	 cases.  */
6308      if (flag_expensive_optimizations
6309	  && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6310	      && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
6311		   & ~(((unsigned HOST_WIDE_INT)
6312			GET_MODE_MASK (GET_MODE (pos_rtx)))
6313		       >> 1))
6314		  == 0)))
6315	{
6316	  rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6317
6318	  /* Prefer ZERO_EXTENSION, since it gives more information to
6319	     backends.  */
6320	  if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6321	    temp = temp1;
6322	}
6323      pos_rtx = temp;
6324    }
6325  else if (pos_rtx != 0
6326	   && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6327    pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
6328
6329  /* Make POS_RTX unless we already have it and it is correct.  If we don't
6330     have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
6331     be a CONST_INT.  */
6332  if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6333    pos_rtx = orig_pos_rtx;
6334
6335  else if (pos_rtx == 0)
6336    pos_rtx = GEN_INT (pos);
6337
6338  /* Make the required operation.  See if we can use existing rtx.  */
6339  new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
6340			 extraction_mode, inner, GEN_INT (len), pos_rtx);
6341  if (! in_dest)
6342    new = gen_lowpart_for_combine (mode, new);
6343
6344  return new;
6345}
6346
6347/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6348   with any other operations in X.  Return X without that shift if so.  */
6349
6350static rtx
6351extract_left_shift (rtx x, int count)
6352{
6353  enum rtx_code code = GET_CODE (x);
6354  enum machine_mode mode = GET_MODE (x);
6355  rtx tem;
6356
6357  switch (code)
6358    {
6359    case ASHIFT:
6360      /* This is the shift itself.  If it is wide enough, we will return
6361	 either the value being shifted if the shift count is equal to
6362	 COUNT or a shift for the difference.  */
6363      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6364	  && INTVAL (XEXP (x, 1)) >= count)
6365	return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6366				     INTVAL (XEXP (x, 1)) - count);
6367      break;
6368
6369    case NEG:  case NOT:
6370      if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6371	return simplify_gen_unary (code, mode, tem, mode);
6372
6373      break;
6374
6375    case PLUS:  case IOR:  case XOR:  case AND:
6376      /* If we can safely shift this constant and we find the inner shift,
6377	 make a new operation.  */
6378      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6379	  && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
6380	  && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6381	return gen_binary (code, mode, tem,
6382			   GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6383
6384      break;
6385
6386    default:
6387      break;
6388    }
6389
6390  return 0;
6391}
6392
6393/* Look at the expression rooted at X.  Look for expressions
6394   equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6395   Form these expressions.
6396
6397   Return the new rtx, usually just X.
6398
6399   Also, for machines like the VAX that don't have logical shift insns,
6400   try to convert logical to arithmetic shift operations in cases where
6401   they are equivalent.  This undoes the canonicalizations to logical
6402   shifts done elsewhere.
6403
6404   We try, as much as possible, to re-use rtl expressions to save memory.
6405
6406   IN_CODE says what kind of expression we are processing.  Normally, it is
6407   SET.  In a memory address (inside a MEM, PLUS or minus, the latter two
6408   being kludges), it is MEM.  When processing the arguments of a comparison
6409   or a COMPARE against zero, it is COMPARE.  */
6410
6411static rtx
6412make_compound_operation (rtx x, enum rtx_code in_code)
6413{
6414  enum rtx_code code = GET_CODE (x);
6415  enum machine_mode mode = GET_MODE (x);
6416  int mode_width = GET_MODE_BITSIZE (mode);
6417  rtx rhs, lhs;
6418  enum rtx_code next_code;
6419  int i;
6420  rtx new = 0;
6421  rtx tem;
6422  const char *fmt;
6423
6424  /* Select the code to be used in recursive calls.  Once we are inside an
6425     address, we stay there.  If we have a comparison, set to COMPARE,
6426     but once inside, go back to our default of SET.  */
6427
6428  next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
6429	       : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
6430		  && XEXP (x, 1) == const0_rtx) ? COMPARE
6431	       : in_code == COMPARE ? SET : in_code);
6432
6433  /* Process depending on the code of this operation.  If NEW is set
6434     nonzero, it will be returned.  */
6435
6436  switch (code)
6437    {
6438    case ASHIFT:
6439      /* Convert shifts by constants into multiplications if inside
6440	 an address.  */
6441      if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
6442	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6443	  && INTVAL (XEXP (x, 1)) >= 0)
6444	{
6445	  new = make_compound_operation (XEXP (x, 0), next_code);
6446	  new = gen_rtx_MULT (mode, new,
6447			      GEN_INT ((HOST_WIDE_INT) 1
6448				       << INTVAL (XEXP (x, 1))));
6449	}
6450      break;
6451
6452    case AND:
6453      /* If the second operand is not a constant, we can't do anything
6454	 with it.  */
6455      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6456	break;
6457
6458      /* If the constant is a power of two minus one and the first operand
6459	 is a logical right shift, make an extraction.  */
6460      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6461	  && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6462	{
6463	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6464	  new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6465				 0, in_code == COMPARE);
6466	}
6467
6468      /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
6469      else if (GET_CODE (XEXP (x, 0)) == SUBREG
6470	       && subreg_lowpart_p (XEXP (x, 0))
6471	       && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6472	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6473	{
6474	  new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6475					 next_code);
6476	  new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
6477				 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6478				 0, in_code == COMPARE);
6479	}
6480      /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)).  */
6481      else if ((GET_CODE (XEXP (x, 0)) == XOR
6482		|| GET_CODE (XEXP (x, 0)) == IOR)
6483	       && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6484	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6485	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6486	{
6487	  /* Apply the distributive law, and then try to make extractions.  */
6488	  new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
6489				gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6490					     XEXP (x, 1)),
6491				gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6492					     XEXP (x, 1)));
6493	  new = make_compound_operation (new, in_code);
6494	}
6495
6496      /* If we are have (and (rotate X C) M) and C is larger than the number
6497	 of bits in M, this is an extraction.  */
6498
6499      else if (GET_CODE (XEXP (x, 0)) == ROTATE
6500	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6501	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6502	       && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
6503	{
6504	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6505	  new = make_extraction (mode, new,
6506				 (GET_MODE_BITSIZE (mode)
6507				  - INTVAL (XEXP (XEXP (x, 0), 1))),
6508				 NULL_RTX, i, 1, 0, in_code == COMPARE);
6509	}
6510
6511      /* On machines without logical shifts, if the operand of the AND is
6512	 a logical shift and our mask turns off all the propagated sign
6513	 bits, we can replace the logical shift with an arithmetic shift.  */
6514      else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6515	       && !have_insn_for (LSHIFTRT, mode)
6516	       && have_insn_for (ASHIFTRT, mode)
6517	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6518	       && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6519	       && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6520	       && mode_width <= HOST_BITS_PER_WIDE_INT)
6521	{
6522	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6523
6524	  mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6525	  if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6526	    SUBST (XEXP (x, 0),
6527		   gen_rtx_ASHIFTRT (mode,
6528				     make_compound_operation
6529				     (XEXP (XEXP (x, 0), 0), next_code),
6530				     XEXP (XEXP (x, 0), 1)));
6531	}
6532
6533      /* If the constant is one less than a power of two, this might be
6534	 representable by an extraction even if no shift is present.
6535	 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6536	 we are in a COMPARE.  */
6537      else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6538	new = make_extraction (mode,
6539			       make_compound_operation (XEXP (x, 0),
6540							next_code),
6541			       0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6542
6543      /* If we are in a comparison and this is an AND with a power of two,
6544	 convert this into the appropriate bit extract.  */
6545      else if (in_code == COMPARE
6546	       && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6547	new = make_extraction (mode,
6548			       make_compound_operation (XEXP (x, 0),
6549							next_code),
6550			       i, NULL_RTX, 1, 1, 0, 1);
6551
6552      break;
6553
6554    case LSHIFTRT:
6555      /* If the sign bit is known to be zero, replace this with an
6556	 arithmetic shift.  */
6557      if (have_insn_for (ASHIFTRT, mode)
6558	  && ! have_insn_for (LSHIFTRT, mode)
6559	  && mode_width <= HOST_BITS_PER_WIDE_INT
6560	  && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6561	{
6562	  new = gen_rtx_ASHIFTRT (mode,
6563				  make_compound_operation (XEXP (x, 0),
6564							   next_code),
6565				  XEXP (x, 1));
6566	  break;
6567	}
6568
6569      /* ... fall through ...  */
6570
6571    case ASHIFTRT:
6572      lhs = XEXP (x, 0);
6573      rhs = XEXP (x, 1);
6574
6575      /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6576	 this is a SIGN_EXTRACT.  */
6577      if (GET_CODE (rhs) == CONST_INT
6578	  && GET_CODE (lhs) == ASHIFT
6579	  && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6580	  && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6581	{
6582	  new = make_compound_operation (XEXP (lhs, 0), next_code);
6583	  new = make_extraction (mode, new,
6584				 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6585				 NULL_RTX, mode_width - INTVAL (rhs),
6586				 code == LSHIFTRT, 0, in_code == COMPARE);
6587	  break;
6588	}
6589
6590      /* See if we have operations between an ASHIFTRT and an ASHIFT.
6591	 If so, try to merge the shifts into a SIGN_EXTEND.  We could
6592	 also do this for some cases of SIGN_EXTRACT, but it doesn't
6593	 seem worth the effort; the case checked for occurs on Alpha.  */
6594
6595      if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6596	  && ! (GET_CODE (lhs) == SUBREG
6597		&& (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6598	  && GET_CODE (rhs) == CONST_INT
6599	  && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6600	  && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6601	new = make_extraction (mode, make_compound_operation (new, next_code),
6602			       0, NULL_RTX, mode_width - INTVAL (rhs),
6603			       code == LSHIFTRT, 0, in_code == COMPARE);
6604
6605      break;
6606
6607    case SUBREG:
6608      /* Call ourselves recursively on the inner expression.  If we are
6609	 narrowing the object and it has a different RTL code from
6610	 what it originally did, do this SUBREG as a force_to_mode.  */
6611
6612      tem = make_compound_operation (SUBREG_REG (x), in_code);
6613      if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6614	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6615	  && subreg_lowpart_p (x))
6616	{
6617	  rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6618				     NULL_RTX, 0);
6619
6620	  /* If we have something other than a SUBREG, we might have
6621	     done an expansion, so rerun ourselves.  */
6622	  if (GET_CODE (newer) != SUBREG)
6623	    newer = make_compound_operation (newer, in_code);
6624
6625	  return newer;
6626	}
6627
6628      /* If this is a paradoxical subreg, and the new code is a sign or
6629	 zero extension, omit the subreg and widen the extension.  If it
6630	 is a regular subreg, we can still get rid of the subreg by not
6631	 widening so much, or in fact removing the extension entirely.  */
6632      if ((GET_CODE (tem) == SIGN_EXTEND
6633	   || GET_CODE (tem) == ZERO_EXTEND)
6634	  && subreg_lowpart_p (x))
6635	{
6636	  if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6637	      || (GET_MODE_SIZE (mode) >
6638		  GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6639	    {
6640	      if (! SCALAR_INT_MODE_P (mode))
6641		break;
6642	      tem = gen_rtx_fmt_e (GET_CODE (tem), mode, XEXP (tem, 0));
6643	    }
6644	  else
6645	    tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6646	  return tem;
6647	}
6648      break;
6649
6650    default:
6651      break;
6652    }
6653
6654  if (new)
6655    {
6656      x = gen_lowpart_for_combine (mode, new);
6657      code = GET_CODE (x);
6658    }
6659
6660  /* Now recursively process each operand of this operation.  */
6661  fmt = GET_RTX_FORMAT (code);
6662  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6663    if (fmt[i] == 'e')
6664      {
6665	new = make_compound_operation (XEXP (x, i), next_code);
6666	SUBST (XEXP (x, i), new);
6667      }
6668
6669  return x;
6670}
6671
6672/* Given M see if it is a value that would select a field of bits
6673   within an item, but not the entire word.  Return -1 if not.
6674   Otherwise, return the starting position of the field, where 0 is the
6675   low-order bit.
6676
6677   *PLEN is set to the length of the field.  */
6678
6679static int
6680get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
6681{
6682  /* Get the bit number of the first 1 bit from the right, -1 if none.  */
6683  int pos = exact_log2 (m & -m);
6684  int len;
6685
6686  if (pos < 0)
6687    return -1;
6688
6689  /* Now shift off the low-order zero bits and see if we have a power of
6690     two minus 1.  */
6691  len = exact_log2 ((m >> pos) + 1);
6692
6693  if (len <= 0)
6694    return -1;
6695
6696  *plen = len;
6697  return pos;
6698}
6699
6700/* See if X can be simplified knowing that we will only refer to it in
6701   MODE and will only refer to those bits that are nonzero in MASK.
6702   If other bits are being computed or if masking operations are done
6703   that select a superset of the bits in MASK, they can sometimes be
6704   ignored.
6705
6706   Return a possibly simplified expression, but always convert X to
6707   MODE.  If X is a CONST_INT, AND the CONST_INT with MASK.
6708
6709   Also, if REG is nonzero and X is a register equal in value to REG,
6710   replace X with REG.
6711
6712   If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6713   are all off in X.  This is used when X will be complemented, by either
6714   NOT, NEG, or XOR.  */
6715
6716static rtx
6717force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
6718	       rtx reg, int just_select)
6719{
6720  enum rtx_code code = GET_CODE (x);
6721  int next_select = just_select || code == XOR || code == NOT || code == NEG;
6722  enum machine_mode op_mode;
6723  unsigned HOST_WIDE_INT fuller_mask, nonzero;
6724  rtx op0, op1, temp;
6725
6726  /* If this is a CALL or ASM_OPERANDS, don't do anything.  Some of the
6727     code below will do the wrong thing since the mode of such an
6728     expression is VOIDmode.
6729
6730     Also do nothing if X is a CLOBBER; this can happen if X was
6731     the return value from a call to gen_lowpart_for_combine.  */
6732  if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
6733    return x;
6734
6735  /* We want to perform the operation is its present mode unless we know
6736     that the operation is valid in MODE, in which case we do the operation
6737     in MODE.  */
6738  op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6739	      && have_insn_for (code, mode))
6740	     ? mode : GET_MODE (x));
6741
6742  /* It is not valid to do a right-shift in a narrower mode
6743     than the one it came in with.  */
6744  if ((code == LSHIFTRT || code == ASHIFTRT)
6745      && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6746    op_mode = GET_MODE (x);
6747
6748  /* Truncate MASK to fit OP_MODE.  */
6749  if (op_mode)
6750    mask &= GET_MODE_MASK (op_mode);
6751
6752  /* When we have an arithmetic operation, or a shift whose count we
6753     do not know, we need to assume that all bits up to the highest-order
6754     bit in MASK will be needed.  This is how we form such a mask.  */
6755  if (mask & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
6756    fuller_mask = ~(unsigned HOST_WIDE_INT) 0;
6757  else
6758    fuller_mask = (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
6759		   - 1);
6760
6761  /* Determine what bits of X are guaranteed to be (non)zero.  */
6762  nonzero = nonzero_bits (x, mode);
6763
6764  /* If none of the bits in X are needed, return a zero.  */
6765  if (! just_select && (nonzero & mask) == 0)
6766    x = const0_rtx;
6767
6768  /* If X is a CONST_INT, return a new one.  Do this here since the
6769     test below will fail.  */
6770  if (GET_CODE (x) == CONST_INT)
6771    {
6772      if (SCALAR_INT_MODE_P (mode))
6773        return gen_int_mode (INTVAL (x) & mask, mode);
6774      else
6775	{
6776	  x = GEN_INT (INTVAL (x) & mask);
6777	  return gen_lowpart_common (mode, x);
6778	}
6779    }
6780
6781  /* If X is narrower than MODE and we want all the bits in X's mode, just
6782     get X in the proper mode.  */
6783  if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6784      && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
6785    return gen_lowpart_for_combine (mode, x);
6786
6787  /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6788     MASK are already known to be zero in X, we need not do anything.  */
6789  if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0)
6790    return x;
6791
6792  switch (code)
6793    {
6794    case CLOBBER:
6795      /* If X is a (clobber (const_int)), return it since we know we are
6796	 generating something that won't match.  */
6797      return x;
6798
6799    case USE:
6800      /* X is a (use (mem ..)) that was made from a bit-field extraction that
6801	 spanned the boundary of the MEM.  If we are now masking so it is
6802	 within that boundary, we don't need the USE any more.  */
6803      if (! BITS_BIG_ENDIAN
6804	  && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6805	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6806      break;
6807
6808    case SIGN_EXTEND:
6809    case ZERO_EXTEND:
6810    case ZERO_EXTRACT:
6811    case SIGN_EXTRACT:
6812      x = expand_compound_operation (x);
6813      if (GET_CODE (x) != code)
6814	return force_to_mode (x, mode, mask, reg, next_select);
6815      break;
6816
6817    case REG:
6818      if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6819		       || rtx_equal_p (reg, get_last_value (x))))
6820	x = reg;
6821      break;
6822
6823    case SUBREG:
6824      if (subreg_lowpart_p (x)
6825	  /* We can ignore the effect of this SUBREG if it narrows the mode or
6826	     if the constant masks to zero all the bits the mode doesn't
6827	     have.  */
6828	  && ((GET_MODE_SIZE (GET_MODE (x))
6829	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6830	      || (0 == (mask
6831			& GET_MODE_MASK (GET_MODE (x))
6832			& ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
6833	return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
6834      break;
6835
6836    case AND:
6837      /* If this is an AND with a constant, convert it into an AND
6838	 whose constant is the AND of that constant with MASK.  If it
6839	 remains an AND of MASK, delete it since it is redundant.  */
6840
6841      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6842	{
6843	  x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6844				      mask & INTVAL (XEXP (x, 1)));
6845
6846	  /* If X is still an AND, see if it is an AND with a mask that
6847	     is just some low-order bits.  If so, and it is MASK, we don't
6848	     need it.  */
6849
6850	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6851	      && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
6852		  == mask))
6853	    x = XEXP (x, 0);
6854
6855	  /* If it remains an AND, try making another AND with the bits
6856	     in the mode mask that aren't in MASK turned on.  If the
6857	     constant in the AND is wide enough, this might make a
6858	     cheaper constant.  */
6859
6860	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6861	      && GET_MODE_MASK (GET_MODE (x)) != mask
6862	      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
6863	    {
6864	      HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6865				    | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
6866	      int width = GET_MODE_BITSIZE (GET_MODE (x));
6867	      rtx y;
6868
6869	      /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6870		 number, sign extend it.  */
6871	      if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6872		  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6873		cval |= (HOST_WIDE_INT) -1 << width;
6874
6875	      y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6876	      if (rtx_cost (y, SET) < rtx_cost (x, SET))
6877		x = y;
6878	    }
6879
6880	  break;
6881	}
6882
6883      goto binop;
6884
6885    case PLUS:
6886      /* In (and (plus FOO C1) M), if M is a mask that just turns off
6887	 low-order bits (as in an alignment operation) and FOO is already
6888	 aligned to that boundary, mask C1 to that boundary as well.
6889	 This may eliminate that PLUS and, later, the AND.  */
6890
6891      {
6892	unsigned int width = GET_MODE_BITSIZE (mode);
6893	unsigned HOST_WIDE_INT smask = mask;
6894
6895	/* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6896	   number, sign extend it.  */
6897
6898	if (width < HOST_BITS_PER_WIDE_INT
6899	    && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6900	  smask |= (HOST_WIDE_INT) -1 << width;
6901
6902	if (GET_CODE (XEXP (x, 1)) == CONST_INT
6903	    && exact_log2 (- smask) >= 0
6904	    && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
6905	    && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
6906	  return force_to_mode (plus_constant (XEXP (x, 0),
6907					       (INTVAL (XEXP (x, 1)) & smask)),
6908				mode, smask, reg, next_select);
6909      }
6910
6911      /* ... fall through ...  */
6912
6913    case MULT:
6914      /* For PLUS, MINUS and MULT, we need any bits less significant than the
6915	 most significant bit in MASK since carries from those bits will
6916	 affect the bits we are interested in.  */
6917      mask = fuller_mask;
6918      goto binop;
6919
6920    case MINUS:
6921      /* If X is (minus C Y) where C's least set bit is larger than any bit
6922	 in the mask, then we may replace with (neg Y).  */
6923      if (GET_CODE (XEXP (x, 0)) == CONST_INT
6924	  && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
6925					& -INTVAL (XEXP (x, 0))))
6926	      > mask))
6927	{
6928	  x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
6929				  GET_MODE (x));
6930	  return force_to_mode (x, mode, mask, reg, next_select);
6931	}
6932
6933      /* Similarly, if C contains every bit in the fuller_mask, then we may
6934	 replace with (not Y).  */
6935      if (GET_CODE (XEXP (x, 0)) == CONST_INT
6936	  && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask)
6937	      == INTVAL (XEXP (x, 0))))
6938	{
6939	  x = simplify_gen_unary (NOT, GET_MODE (x),
6940				  XEXP (x, 1), GET_MODE (x));
6941	  return force_to_mode (x, mode, mask, reg, next_select);
6942	}
6943
6944      mask = fuller_mask;
6945      goto binop;
6946
6947    case IOR:
6948    case XOR:
6949      /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6950	 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6951	 operation which may be a bitfield extraction.  Ensure that the
6952	 constant we form is not wider than the mode of X.  */
6953
6954      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6955	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6956	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6957	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6958	  && GET_CODE (XEXP (x, 1)) == CONST_INT
6959	  && ((INTVAL (XEXP (XEXP (x, 0), 1))
6960	       + floor_log2 (INTVAL (XEXP (x, 1))))
6961	      < GET_MODE_BITSIZE (GET_MODE (x)))
6962	  && (INTVAL (XEXP (x, 1))
6963	      & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6964	{
6965	  temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6966			  << INTVAL (XEXP (XEXP (x, 0), 1)));
6967	  temp = gen_binary (GET_CODE (x), GET_MODE (x),
6968			     XEXP (XEXP (x, 0), 0), temp);
6969	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6970			  XEXP (XEXP (x, 0), 1));
6971	  return force_to_mode (x, mode, mask, reg, next_select);
6972	}
6973
6974    binop:
6975      /* For most binary operations, just propagate into the operation and
6976	 change the mode if we have an operation of that mode.  */
6977
6978      op0 = gen_lowpart_for_combine (op_mode,
6979				     force_to_mode (XEXP (x, 0), mode, mask,
6980						    reg, next_select));
6981      op1 = gen_lowpart_for_combine (op_mode,
6982				     force_to_mode (XEXP (x, 1), mode, mask,
6983						    reg, next_select));
6984
6985      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6986	x = gen_binary (code, op_mode, op0, op1);
6987      break;
6988
6989    case ASHIFT:
6990      /* For left shifts, do the same, but just for the first operand.
6991	 However, we cannot do anything with shifts where we cannot
6992	 guarantee that the counts are smaller than the size of the mode
6993	 because such a count will have a different meaning in a
6994	 wider mode.  */
6995
6996      if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6997	     && INTVAL (XEXP (x, 1)) >= 0
6998	     && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6999	  && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
7000		&& (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
7001		    < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
7002	break;
7003
7004      /* If the shift count is a constant and we can do arithmetic in
7005	 the mode of the shift, refine which bits we need.  Otherwise, use the
7006	 conservative form of the mask.  */
7007      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7008	  && INTVAL (XEXP (x, 1)) >= 0
7009	  && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
7010	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7011	mask >>= INTVAL (XEXP (x, 1));
7012      else
7013	mask = fuller_mask;
7014
7015      op0 = gen_lowpart_for_combine (op_mode,
7016				     force_to_mode (XEXP (x, 0), op_mode,
7017						    mask, reg, next_select));
7018
7019      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7020	x = gen_binary (code, op_mode, op0, XEXP (x, 1));
7021      break;
7022
7023    case LSHIFTRT:
7024      /* Here we can only do something if the shift count is a constant,
7025	 this shift constant is valid for the host, and we can do arithmetic
7026	 in OP_MODE.  */
7027
7028      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7029	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7030	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7031	{
7032	  rtx inner = XEXP (x, 0);
7033	  unsigned HOST_WIDE_INT inner_mask;
7034
7035	  /* Select the mask of the bits we need for the shift operand.  */
7036	  inner_mask = mask << INTVAL (XEXP (x, 1));
7037
7038	  /* We can only change the mode of the shift if we can do arithmetic
7039	     in the mode of the shift and INNER_MASK is no wider than the
7040	     width of OP_MODE.  */
7041	  if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
7042	      || (inner_mask & ~GET_MODE_MASK (op_mode)) != 0)
7043	    op_mode = GET_MODE (x);
7044
7045	  inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
7046
7047	  if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7048	    x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
7049	}
7050
7051      /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7052	 shift and AND produces only copies of the sign bit (C2 is one less
7053	 than a power of two), we can do this with just a shift.  */
7054
7055      if (GET_CODE (x) == LSHIFTRT
7056	  && GET_CODE (XEXP (x, 1)) == CONST_INT
7057	  /* The shift puts one of the sign bit copies in the least significant
7058	     bit.  */
7059	  && ((INTVAL (XEXP (x, 1))
7060	       + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7061	      >= GET_MODE_BITSIZE (GET_MODE (x)))
7062	  && exact_log2 (mask + 1) >= 0
7063	  /* Number of bits left after the shift must be more than the mask
7064	     needs.  */
7065	  && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7066	      <= GET_MODE_BITSIZE (GET_MODE (x)))
7067	  /* Must be more sign bit copies than the mask needs.  */
7068	  && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
7069	      >= exact_log2 (mask + 1)))
7070	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7071			GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7072				 - exact_log2 (mask + 1)));
7073
7074      goto shiftrt;
7075
7076    case ASHIFTRT:
7077      /* If we are just looking for the sign bit, we don't need this shift at
7078	 all, even if it has a variable count.  */
7079      if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7080	  && (mask == ((unsigned HOST_WIDE_INT) 1
7081		       << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7082	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7083
7084      /* If this is a shift by a constant, get a mask that contains those bits
7085	 that are not copies of the sign bit.  We then have two cases:  If
7086	 MASK only includes those bits, this can be a logical shift, which may
7087	 allow simplifications.  If MASK is a single-bit field not within
7088	 those bits, we are requesting a copy of the sign bit and hence can
7089	 shift the sign bit to the appropriate location.  */
7090
7091      if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
7092	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7093	{
7094	  int i = -1;
7095
7096	  /* If the considered data is wider than HOST_WIDE_INT, we can't
7097	     represent a mask for all its bits in a single scalar.
7098	     But we only care about the lower bits, so calculate these.  */
7099
7100	  if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7101	    {
7102	      nonzero = ~(HOST_WIDE_INT) 0;
7103
7104	      /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7105		 is the number of bits a full-width mask would have set.
7106		 We need only shift if these are fewer than nonzero can
7107		 hold.  If not, we must keep all bits set in nonzero.  */
7108
7109	      if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7110		  < HOST_BITS_PER_WIDE_INT)
7111		nonzero >>= INTVAL (XEXP (x, 1))
7112			    + HOST_BITS_PER_WIDE_INT
7113			    - GET_MODE_BITSIZE (GET_MODE (x)) ;
7114	    }
7115	  else
7116	    {
7117	      nonzero = GET_MODE_MASK (GET_MODE (x));
7118	      nonzero >>= INTVAL (XEXP (x, 1));
7119	    }
7120
7121	  if ((mask & ~nonzero) == 0
7122	      || (i = exact_log2 (mask)) >= 0)
7123	    {
7124	      x = simplify_shift_const
7125		(x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7126		 i < 0 ? INTVAL (XEXP (x, 1))
7127		 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7128
7129	      if (GET_CODE (x) != ASHIFTRT)
7130		return force_to_mode (x, mode, mask, reg, next_select);
7131	    }
7132	}
7133
7134      /* If MASK is 1, convert this to an LSHIFTRT.  This can be done
7135	 even if the shift count isn't a constant.  */
7136      if (mask == 1)
7137	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
7138
7139    shiftrt:
7140
7141      /* If this is a zero- or sign-extension operation that just affects bits
7142	 we don't care about, remove it.  Be sure the call above returned
7143	 something that is still a shift.  */
7144
7145      if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7146	  && GET_CODE (XEXP (x, 1)) == CONST_INT
7147	  && INTVAL (XEXP (x, 1)) >= 0
7148	  && (INTVAL (XEXP (x, 1))
7149	      <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
7150	  && GET_CODE (XEXP (x, 0)) == ASHIFT
7151	  && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
7152	return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7153			      reg, next_select);
7154
7155      break;
7156
7157    case ROTATE:
7158    case ROTATERT:
7159      /* If the shift count is constant and we can do computations
7160	 in the mode of X, compute where the bits we care about are.
7161	 Otherwise, we can't do anything.  Don't change the mode of
7162	 the shift or propagate MODE into the shift, though.  */
7163      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7164	  && INTVAL (XEXP (x, 1)) >= 0)
7165	{
7166	  temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7167					    GET_MODE (x), GEN_INT (mask),
7168					    XEXP (x, 1));
7169	  if (temp && GET_CODE (temp) == CONST_INT)
7170	    SUBST (XEXP (x, 0),
7171		   force_to_mode (XEXP (x, 0), GET_MODE (x),
7172				  INTVAL (temp), reg, next_select));
7173	}
7174      break;
7175
7176    case NEG:
7177      /* If we just want the low-order bit, the NEG isn't needed since it
7178	 won't change the low-order bit.  */
7179      if (mask == 1)
7180	return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
7181
7182      /* We need any bits less significant than the most significant bit in
7183	 MASK since carries from those bits will affect the bits we are
7184	 interested in.  */
7185      mask = fuller_mask;
7186      goto unop;
7187
7188    case NOT:
7189      /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7190	 same as the XOR case above.  Ensure that the constant we form is not
7191	 wider than the mode of X.  */
7192
7193      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7194	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7195	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7196	  && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7197	      < GET_MODE_BITSIZE (GET_MODE (x)))
7198	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7199	{
7200	  temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)),
7201			       GET_MODE (x));
7202	  temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
7203	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
7204
7205	  return force_to_mode (x, mode, mask, reg, next_select);
7206	}
7207
7208      /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7209	 use the full mask inside the NOT.  */
7210      mask = fuller_mask;
7211
7212    unop:
7213      op0 = gen_lowpart_for_combine (op_mode,
7214				     force_to_mode (XEXP (x, 0), mode, mask,
7215						    reg, next_select));
7216      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7217	x = simplify_gen_unary (code, op_mode, op0, op_mode);
7218      break;
7219
7220    case NE:
7221      /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
7222	 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
7223	 which is equal to STORE_FLAG_VALUE.  */
7224      if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
7225	  && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
7226	  && (nonzero_bits (XEXP (x, 0), mode)
7227	      == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
7228	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7229
7230      break;
7231
7232    case IF_THEN_ELSE:
7233      /* We have no way of knowing if the IF_THEN_ELSE can itself be
7234	 written in a narrower mode.  We play it safe and do not do so.  */
7235
7236      SUBST (XEXP (x, 1),
7237	     gen_lowpart_for_combine (GET_MODE (x),
7238				      force_to_mode (XEXP (x, 1), mode,
7239						     mask, reg, next_select)));
7240      SUBST (XEXP (x, 2),
7241	     gen_lowpart_for_combine (GET_MODE (x),
7242				      force_to_mode (XEXP (x, 2), mode,
7243						     mask, reg, next_select)));
7244      break;
7245
7246    default:
7247      break;
7248    }
7249
7250  /* Ensure we return a value of the proper mode.  */
7251  return gen_lowpart_for_combine (mode, x);
7252}
7253
7254/* Return nonzero if X is an expression that has one of two values depending on
7255   whether some other value is zero or nonzero.  In that case, we return the
7256   value that is being tested, *PTRUE is set to the value if the rtx being
7257   returned has a nonzero value, and *PFALSE is set to the other alternative.
7258
7259   If we return zero, we set *PTRUE and *PFALSE to X.  */
7260
7261static rtx
7262if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
7263{
7264  enum machine_mode mode = GET_MODE (x);
7265  enum rtx_code code = GET_CODE (x);
7266  rtx cond0, cond1, true0, true1, false0, false1;
7267  unsigned HOST_WIDE_INT nz;
7268
7269  /* If we are comparing a value against zero, we are done.  */
7270  if ((code == NE || code == EQ)
7271      && XEXP (x, 1) == const0_rtx)
7272    {
7273      *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7274      *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
7275      return XEXP (x, 0);
7276    }
7277
7278  /* If this is a unary operation whose operand has one of two values, apply
7279     our opcode to compute those values.  */
7280  else if (GET_RTX_CLASS (code) == '1'
7281	   && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
7282    {
7283      *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
7284      *pfalse = simplify_gen_unary (code, mode, false0,
7285				    GET_MODE (XEXP (x, 0)));
7286      return cond0;
7287    }
7288
7289  /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
7290     make can't possibly match and would suppress other optimizations.  */
7291  else if (code == COMPARE)
7292    ;
7293
7294  /* If this is a binary operation, see if either side has only one of two
7295     values.  If either one does or if both do and they are conditional on
7296     the same value, compute the new true and false values.  */
7297  else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
7298	   || GET_RTX_CLASS (code) == '<')
7299    {
7300      cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7301      cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7302
7303      if ((cond0 != 0 || cond1 != 0)
7304	  && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7305	{
7306	  /* If if_then_else_cond returned zero, then true/false are the
7307	     same rtl.  We must copy one of them to prevent invalid rtl
7308	     sharing.  */
7309	  if (cond0 == 0)
7310	    true0 = copy_rtx (true0);
7311	  else if (cond1 == 0)
7312	    true1 = copy_rtx (true1);
7313
7314	  *ptrue = gen_binary (code, mode, true0, true1);
7315	  *pfalse = gen_binary (code, mode, false0, false1);
7316	  return cond0 ? cond0 : cond1;
7317	}
7318
7319      /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
7320	 operands is zero when the other is nonzero, and vice-versa,
7321	 and STORE_FLAG_VALUE is 1 or -1.  */
7322
7323      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7324	  && (code == PLUS || code == IOR || code == XOR || code == MINUS
7325	      || code == UMAX)
7326	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7327	{
7328	  rtx op0 = XEXP (XEXP (x, 0), 1);
7329	  rtx op1 = XEXP (XEXP (x, 1), 1);
7330
7331	  cond0 = XEXP (XEXP (x, 0), 0);
7332	  cond1 = XEXP (XEXP (x, 1), 0);
7333
7334	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7335	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7336	      && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7337		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7338		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7339		  || ((swap_condition (GET_CODE (cond0))
7340		       == combine_reversed_comparison_code (cond1))
7341		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7342		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7343	      && ! side_effects_p (x))
7344	    {
7345	      *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
7346	      *pfalse = gen_binary (MULT, mode,
7347				    (code == MINUS
7348				     ? simplify_gen_unary (NEG, mode, op1,
7349							   mode)
7350				     : op1),
7351				    const_true_rtx);
7352	      return cond0;
7353	    }
7354	}
7355
7356      /* Similarly for MULT, AND and UMIN, except that for these the result
7357	 is always zero.  */
7358      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7359	  && (code == MULT || code == AND || code == UMIN)
7360	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7361	{
7362	  cond0 = XEXP (XEXP (x, 0), 0);
7363	  cond1 = XEXP (XEXP (x, 1), 0);
7364
7365	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7366	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7367	      && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7368		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7369		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7370		  || ((swap_condition (GET_CODE (cond0))
7371		       == combine_reversed_comparison_code (cond1))
7372		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7373		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7374	      && ! side_effects_p (x))
7375	    {
7376	      *ptrue = *pfalse = const0_rtx;
7377	      return cond0;
7378	    }
7379	}
7380    }
7381
7382  else if (code == IF_THEN_ELSE)
7383    {
7384      /* If we have IF_THEN_ELSE already, extract the condition and
7385	 canonicalize it if it is NE or EQ.  */
7386      cond0 = XEXP (x, 0);
7387      *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7388      if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7389	return XEXP (cond0, 0);
7390      else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7391	{
7392	  *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7393	  return XEXP (cond0, 0);
7394	}
7395      else
7396	return cond0;
7397    }
7398
7399  /* If X is a SUBREG, we can narrow both the true and false values
7400     if the inner expression, if there is a condition.  */
7401  else if (code == SUBREG
7402	   && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7403					       &true0, &false0)))
7404    {
7405      true0 = simplify_gen_subreg (mode, true0,
7406				   GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7407      false0 = simplify_gen_subreg (mode, false0,
7408				    GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7409      if (true0 && false0)
7410	{
7411	  *ptrue = true0;
7412	  *pfalse = false0;
7413	  return cond0;
7414	}
7415    }
7416
7417  /* If X is a constant, this isn't special and will cause confusions
7418     if we treat it as such.  Likewise if it is equivalent to a constant.  */
7419  else if (CONSTANT_P (x)
7420	   || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7421    ;
7422
7423  /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7424     will be least confusing to the rest of the compiler.  */
7425  else if (mode == BImode)
7426    {
7427      *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7428      return x;
7429    }
7430
7431  /* If X is known to be either 0 or -1, those are the true and
7432     false values when testing X.  */
7433  else if (x == constm1_rtx || x == const0_rtx
7434	   || (mode != VOIDmode
7435	       && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
7436    {
7437      *ptrue = constm1_rtx, *pfalse = const0_rtx;
7438      return x;
7439    }
7440
7441  /* Likewise for 0 or a single bit.  */
7442  else if (SCALAR_INT_MODE_P (mode)
7443	   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7444	   && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
7445    {
7446      *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
7447      return x;
7448    }
7449
7450  /* Otherwise fail; show no condition with true and false values the same.  */
7451  *ptrue = *pfalse = x;
7452  return 0;
7453}
7454
7455/* Return the value of expression X given the fact that condition COND
7456   is known to be true when applied to REG as its first operand and VAL
7457   as its second.  X is known to not be shared and so can be modified in
7458   place.
7459
7460   We only handle the simplest cases, and specifically those cases that
7461   arise with IF_THEN_ELSE expressions.  */
7462
7463static rtx
7464known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
7465{
7466  enum rtx_code code = GET_CODE (x);
7467  rtx temp;
7468  const char *fmt;
7469  int i, j;
7470
7471  if (side_effects_p (x))
7472    return x;
7473
7474  /* If either operand of the condition is a floating point value,
7475     then we have to avoid collapsing an EQ comparison.  */
7476  if (cond == EQ
7477      && rtx_equal_p (x, reg)
7478      && ! FLOAT_MODE_P (GET_MODE (x))
7479      && ! FLOAT_MODE_P (GET_MODE (val)))
7480    return val;
7481
7482  if (cond == UNEQ && rtx_equal_p (x, reg))
7483    return val;
7484
7485  /* If X is (abs REG) and we know something about REG's relationship
7486     with zero, we may be able to simplify this.  */
7487
7488  if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7489    switch (cond)
7490      {
7491      case GE:  case GT:  case EQ:
7492	return XEXP (x, 0);
7493      case LT:  case LE:
7494	return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
7495				   XEXP (x, 0),
7496				   GET_MODE (XEXP (x, 0)));
7497      default:
7498	break;
7499      }
7500
7501  /* The only other cases we handle are MIN, MAX, and comparisons if the
7502     operands are the same as REG and VAL.  */
7503
7504  else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
7505    {
7506      if (rtx_equal_p (XEXP (x, 0), val))
7507	cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7508
7509      if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7510	{
7511	  if (GET_RTX_CLASS (code) == '<')
7512	    {
7513	      if (comparison_dominates_p (cond, code))
7514		return const_true_rtx;
7515
7516	      code = combine_reversed_comparison_code (x);
7517	      if (code != UNKNOWN
7518		  && comparison_dominates_p (cond, code))
7519		return const0_rtx;
7520	      else
7521		return x;
7522	    }
7523	  else if (code == SMAX || code == SMIN
7524		   || code == UMIN || code == UMAX)
7525	    {
7526	      int unsignedp = (code == UMIN || code == UMAX);
7527
7528	      /* Do not reverse the condition when it is NE or EQ.
7529		 This is because we cannot conclude anything about
7530		 the value of 'SMAX (x, y)' when x is not equal to y,
7531		 but we can when x equals y.  */
7532	      if ((code == SMAX || code == UMAX)
7533		  && ! (cond == EQ || cond == NE))
7534		cond = reverse_condition (cond);
7535
7536	      switch (cond)
7537		{
7538		case GE:   case GT:
7539		  return unsignedp ? x : XEXP (x, 1);
7540		case LE:   case LT:
7541		  return unsignedp ? x : XEXP (x, 0);
7542		case GEU:  case GTU:
7543		  return unsignedp ? XEXP (x, 1) : x;
7544		case LEU:  case LTU:
7545		  return unsignedp ? XEXP (x, 0) : x;
7546		default:
7547		  break;
7548		}
7549	    }
7550	}
7551    }
7552  else if (code == SUBREG)
7553    {
7554      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
7555      rtx new, r = known_cond (SUBREG_REG (x), cond, reg, val);
7556
7557      if (SUBREG_REG (x) != r)
7558	{
7559	  /* We must simplify subreg here, before we lose track of the
7560	     original inner_mode.  */
7561	  new = simplify_subreg (GET_MODE (x), r,
7562				 inner_mode, SUBREG_BYTE (x));
7563	  if (new)
7564	    return new;
7565	  else
7566	    SUBST (SUBREG_REG (x), r);
7567	}
7568
7569      return x;
7570    }
7571  /* We don't have to handle SIGN_EXTEND here, because even in the
7572     case of replacing something with a modeless CONST_INT, a
7573     CONST_INT is already (supposed to be) a valid sign extension for
7574     its narrower mode, which implies it's already properly
7575     sign-extended for the wider mode.  Now, for ZERO_EXTEND, the
7576     story is different.  */
7577  else if (code == ZERO_EXTEND)
7578    {
7579      enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
7580      rtx new, r = known_cond (XEXP (x, 0), cond, reg, val);
7581
7582      if (XEXP (x, 0) != r)
7583	{
7584	  /* We must simplify the zero_extend here, before we lose
7585             track of the original inner_mode.  */
7586	  new = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
7587					  r, inner_mode);
7588	  if (new)
7589	    return new;
7590	  else
7591	    SUBST (XEXP (x, 0), r);
7592	}
7593
7594      return x;
7595    }
7596
7597  fmt = GET_RTX_FORMAT (code);
7598  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7599    {
7600      if (fmt[i] == 'e')
7601	SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7602      else if (fmt[i] == 'E')
7603	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7604	  SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7605						cond, reg, val));
7606    }
7607
7608  return x;
7609}
7610
7611/* See if X and Y are equal for the purposes of seeing if we can rewrite an
7612   assignment as a field assignment.  */
7613
7614static int
7615rtx_equal_for_field_assignment_p (rtx x, rtx y)
7616{
7617  if (x == y || rtx_equal_p (x, y))
7618    return 1;
7619
7620  if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7621    return 0;
7622
7623  /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7624     Note that all SUBREGs of MEM are paradoxical; otherwise they
7625     would have been rewritten.  */
7626  if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7627      && GET_CODE (SUBREG_REG (y)) == MEM
7628      && rtx_equal_p (SUBREG_REG (y),
7629		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7630    return 1;
7631
7632  if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7633      && GET_CODE (SUBREG_REG (x)) == MEM
7634      && rtx_equal_p (SUBREG_REG (x),
7635		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7636    return 1;
7637
7638  /* We used to see if get_last_value of X and Y were the same but that's
7639     not correct.  In one direction, we'll cause the assignment to have
7640     the wrong destination and in the case, we'll import a register into this
7641     insn that might have already have been dead.   So fail if none of the
7642     above cases are true.  */
7643  return 0;
7644}
7645
7646/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7647   Return that assignment if so.
7648
7649   We only handle the most common cases.  */
7650
7651static rtx
7652make_field_assignment (rtx x)
7653{
7654  rtx dest = SET_DEST (x);
7655  rtx src = SET_SRC (x);
7656  rtx assign;
7657  rtx rhs, lhs;
7658  HOST_WIDE_INT c1;
7659  HOST_WIDE_INT pos;
7660  unsigned HOST_WIDE_INT len;
7661  rtx other;
7662  enum machine_mode mode;
7663
7664  /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7665     a clear of a one-bit field.  We will have changed it to
7666     (and (rotate (const_int -2) POS) DEST), so check for that.  Also check
7667     for a SUBREG.  */
7668
7669  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7670      && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7671      && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7672      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7673    {
7674      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7675				1, 1, 1, 0);
7676      if (assign != 0)
7677	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7678      return x;
7679    }
7680
7681  else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7682	   && subreg_lowpart_p (XEXP (src, 0))
7683	   && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7684	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7685	   && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7686	   && GET_CODE (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == CONST_INT
7687	   && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7688	   && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7689    {
7690      assign = make_extraction (VOIDmode, dest, 0,
7691				XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7692				1, 1, 1, 0);
7693      if (assign != 0)
7694	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7695      return x;
7696    }
7697
7698  /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
7699     one-bit field.  */
7700  else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7701	   && XEXP (XEXP (src, 0), 0) == const1_rtx
7702	   && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7703    {
7704      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7705				1, 1, 1, 0);
7706      if (assign != 0)
7707	return gen_rtx_SET (VOIDmode, assign, const1_rtx);
7708      return x;
7709    }
7710
7711  /* The other case we handle is assignments into a constant-position
7712     field.  They look like (ior/xor (and DEST C1) OTHER).  If C1 represents
7713     a mask that has all one bits except for a group of zero bits and
7714     OTHER is known to have zeros where C1 has ones, this is such an
7715     assignment.  Compute the position and length from C1.  Shift OTHER
7716     to the appropriate position, force it to the required mode, and
7717     make the extraction.  Check for the AND in both operands.  */
7718
7719  if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
7720    return x;
7721
7722  rhs = expand_compound_operation (XEXP (src, 0));
7723  lhs = expand_compound_operation (XEXP (src, 1));
7724
7725  if (GET_CODE (rhs) == AND
7726      && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7727      && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7728    c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7729  else if (GET_CODE (lhs) == AND
7730	   && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7731	   && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7732    c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
7733  else
7734    return x;
7735
7736  pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
7737  if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
7738      || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7739      || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
7740    return x;
7741
7742  assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
7743  if (assign == 0)
7744    return x;
7745
7746  /* The mode to use for the source is the mode of the assignment, or of
7747     what is inside a possible STRICT_LOW_PART.  */
7748  mode = (GET_CODE (assign) == STRICT_LOW_PART
7749	  ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
7750
7751  /* Shift OTHER right POS places and make it the source, restricting it
7752     to the proper length and mode.  */
7753
7754  src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7755					     GET_MODE (src), other, pos),
7756		       mode,
7757		       GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7758		       ? ~(unsigned HOST_WIDE_INT) 0
7759		       : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
7760		       dest, 0);
7761
7762  /* If SRC is masked by an AND that does not make a difference in
7763     the value being stored, strip it.  */
7764  if (GET_CODE (assign) == ZERO_EXTRACT
7765      && GET_CODE (XEXP (assign, 1)) == CONST_INT
7766      && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
7767      && GET_CODE (src) == AND
7768      && GET_CODE (XEXP (src, 1)) == CONST_INT
7769      && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1))
7770	  == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1))
7771    src = XEXP (src, 0);
7772
7773  return gen_rtx_SET (VOIDmode, assign, src);
7774}
7775
7776/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7777   if so.  */
7778
7779static rtx
7780apply_distributive_law (rtx x)
7781{
7782  enum rtx_code code = GET_CODE (x);
7783  enum rtx_code inner_code;
7784  rtx lhs, rhs, other;
7785  rtx tem;
7786
7787  /* Distributivity is not true for floating point as it can change the
7788     value.  So we don't do it unless -funsafe-math-optimizations.  */
7789  if (FLOAT_MODE_P (GET_MODE (x))
7790      && ! flag_unsafe_math_optimizations)
7791    return x;
7792
7793  /* The outer operation can only be one of the following:  */
7794  if (code != IOR && code != AND && code != XOR
7795      && code != PLUS && code != MINUS)
7796    return x;
7797
7798  lhs = XEXP (x, 0);
7799  rhs = XEXP (x, 1);
7800
7801  /* If either operand is a primitive we can't do anything, so get out
7802     fast.  */
7803  if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
7804      || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
7805    return x;
7806
7807  lhs = expand_compound_operation (lhs);
7808  rhs = expand_compound_operation (rhs);
7809  inner_code = GET_CODE (lhs);
7810  if (inner_code != GET_CODE (rhs))
7811    return x;
7812
7813  /* See if the inner and outer operations distribute.  */
7814  switch (inner_code)
7815    {
7816    case LSHIFTRT:
7817    case ASHIFTRT:
7818    case AND:
7819    case IOR:
7820      /* These all distribute except over PLUS.  */
7821      if (code == PLUS || code == MINUS)
7822	return x;
7823      break;
7824
7825    case MULT:
7826      if (code != PLUS && code != MINUS)
7827	return x;
7828      break;
7829
7830    case ASHIFT:
7831      /* This is also a multiply, so it distributes over everything.  */
7832      break;
7833
7834    case SUBREG:
7835      /* Non-paradoxical SUBREGs distributes over all operations, provided
7836	 the inner modes and byte offsets are the same, this is an extraction
7837	 of a low-order part, we don't convert an fp operation to int or
7838	 vice versa, and we would not be converting a single-word
7839	 operation into a multi-word operation.  The latter test is not
7840	 required, but it prevents generating unneeded multi-word operations.
7841	 Some of the previous tests are redundant given the latter test, but
7842	 are retained because they are required for correctness.
7843
7844	 We produce the result slightly differently in this case.  */
7845
7846      if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7847	  || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
7848	  || ! subreg_lowpart_p (lhs)
7849	  || (GET_MODE_CLASS (GET_MODE (lhs))
7850	      != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
7851	  || (GET_MODE_SIZE (GET_MODE (lhs))
7852	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
7853	  || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
7854	return x;
7855
7856      tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7857			SUBREG_REG (lhs), SUBREG_REG (rhs));
7858      return gen_lowpart_for_combine (GET_MODE (x), tem);
7859
7860    default:
7861      return x;
7862    }
7863
7864  /* Set LHS and RHS to the inner operands (A and B in the example
7865     above) and set OTHER to the common operand (C in the example).
7866     These is only one way to do this unless the inner operation is
7867     commutative.  */
7868  if (GET_RTX_CLASS (inner_code) == 'c'
7869      && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7870    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7871  else if (GET_RTX_CLASS (inner_code) == 'c'
7872	   && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7873    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7874  else if (GET_RTX_CLASS (inner_code) == 'c'
7875	   && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7876    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7877  else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7878    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7879  else
7880    return x;
7881
7882  /* Form the new inner operation, seeing if it simplifies first.  */
7883  tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7884
7885  /* There is one exception to the general way of distributing:
7886     (a | c) ^ (b | c) -> (a ^ b) & ~c  */
7887  if (code == XOR && inner_code == IOR)
7888    {
7889      inner_code = AND;
7890      other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
7891    }
7892
7893  /* We may be able to continuing distributing the result, so call
7894     ourselves recursively on the inner operation before forming the
7895     outer operation, which we return.  */
7896  return gen_binary (inner_code, GET_MODE (x),
7897		     apply_distributive_law (tem), other);
7898}
7899
7900/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7901   in MODE.
7902
7903   Return an equivalent form, if different from X.  Otherwise, return X.  If
7904   X is zero, we are to always construct the equivalent form.  */
7905
7906static rtx
7907simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop,
7908			unsigned HOST_WIDE_INT constop)
7909{
7910  unsigned HOST_WIDE_INT nonzero;
7911  int i;
7912
7913  /* Simplify VAROP knowing that we will be only looking at some of the
7914     bits in it.
7915
7916     Note by passing in CONSTOP, we guarantee that the bits not set in
7917     CONSTOP are not significant and will never be examined.  We must
7918     ensure that is the case by explicitly masking out those bits
7919     before returning.  */
7920  varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
7921
7922  /* If VAROP is a CLOBBER, we will fail so return it.  */
7923  if (GET_CODE (varop) == CLOBBER)
7924    return varop;
7925
7926  /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
7927     to VAROP and return the new constant.  */
7928  if (GET_CODE (varop) == CONST_INT)
7929    return GEN_INT (trunc_int_for_mode (INTVAL (varop) & constop, mode));
7930
7931  /* See what bits may be nonzero in VAROP.  Unlike the general case of
7932     a call to nonzero_bits, here we don't care about bits outside
7933     MODE.  */
7934
7935  nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7936
7937  /* Turn off all bits in the constant that are known to already be zero.
7938     Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
7939     which is tested below.  */
7940
7941  constop &= nonzero;
7942
7943  /* If we don't have any bits left, return zero.  */
7944  if (constop == 0)
7945    return const0_rtx;
7946
7947  /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7948     a power of two, we can replace this with an ASHIFT.  */
7949  if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7950      && (i = exact_log2 (constop)) >= 0)
7951    return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7952
7953  /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7954     or XOR, then try to apply the distributive law.  This may eliminate
7955     operations if either branch can be simplified because of the AND.
7956     It may also make some cases more complex, but those cases probably
7957     won't match a pattern either with or without this.  */
7958
7959  if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7960    return
7961      gen_lowpart_for_combine
7962	(mode,
7963	 apply_distributive_law
7964	 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7965		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7966					      XEXP (varop, 0), constop),
7967		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7968					      XEXP (varop, 1), constop))));
7969
7970  /* If VAROP is PLUS, and the constant is a mask of low bite, distribute
7971     the AND and see if one of the operands simplifies to zero.  If so, we
7972     may eliminate it.  */
7973
7974  if (GET_CODE (varop) == PLUS
7975      && exact_log2 (constop + 1) >= 0)
7976    {
7977      rtx o0, o1;
7978
7979      o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
7980      o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
7981      if (o0 == const0_rtx)
7982	return o1;
7983      if (o1 == const0_rtx)
7984	return o0;
7985    }
7986
7987  /* Get VAROP in MODE.  Try to get a SUBREG if not.  Don't make a new SUBREG
7988     if we already had one (just check for the simplest cases).  */
7989  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7990      && GET_MODE (XEXP (x, 0)) == mode
7991      && SUBREG_REG (XEXP (x, 0)) == varop)
7992    varop = XEXP (x, 0);
7993  else
7994    varop = gen_lowpart_for_combine (mode, varop);
7995
7996  /* If we can't make the SUBREG, try to return what we were given.  */
7997  if (GET_CODE (varop) == CLOBBER)
7998    return x ? x : varop;
7999
8000  /* If we are only masking insignificant bits, return VAROP.  */
8001  if (constop == nonzero)
8002    x = varop;
8003  else
8004    {
8005      /* Otherwise, return an AND.  */
8006      constop = trunc_int_for_mode (constop, mode);
8007      /* See how much, if any, of X we can use.  */
8008      if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
8009	x = gen_binary (AND, mode, varop, GEN_INT (constop));
8010
8011      else
8012	{
8013	  if (GET_CODE (XEXP (x, 1)) != CONST_INT
8014	      || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
8015	    SUBST (XEXP (x, 1), GEN_INT (constop));
8016
8017	  SUBST (XEXP (x, 0), varop);
8018	}
8019    }
8020
8021  return x;
8022}
8023
8024#define nonzero_bits_with_known(X, MODE) \
8025  cached_nonzero_bits (X, MODE, known_x, known_mode, known_ret)
8026
8027/* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
8028   It avoids exponential behavior in nonzero_bits1 when X has
8029   identical subexpressions on the first or the second level.  */
8030
8031static unsigned HOST_WIDE_INT
8032cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
8033		     enum machine_mode known_mode,
8034		     unsigned HOST_WIDE_INT known_ret)
8035{
8036  if (x == known_x && mode == known_mode)
8037    return known_ret;
8038
8039  /* Try to find identical subexpressions.  If found call
8040     nonzero_bits1 on X with the subexpressions as KNOWN_X and the
8041     precomputed value for the subexpression as KNOWN_RET.  */
8042
8043  if (GET_RTX_CLASS (GET_CODE (x)) == '2'
8044      || GET_RTX_CLASS (GET_CODE (x)) == 'c')
8045    {
8046      rtx x0 = XEXP (x, 0);
8047      rtx x1 = XEXP (x, 1);
8048
8049      /* Check the first level.  */
8050      if (x0 == x1)
8051	return nonzero_bits1 (x, mode, x0, mode,
8052			      nonzero_bits_with_known (x0, mode));
8053
8054      /* Check the second level.  */
8055      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
8056	   || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
8057	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
8058	return nonzero_bits1 (x, mode, x1, mode,
8059			      nonzero_bits_with_known (x1, mode));
8060
8061      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
8062	   || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
8063	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
8064	return nonzero_bits1 (x, mode, x0, mode,
8065			 nonzero_bits_with_known (x0, mode));
8066    }
8067
8068  return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
8069}
8070
8071/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
8072   We don't let nonzero_bits recur into num_sign_bit_copies, because that
8073   is less useful.  We can't allow both, because that results in exponential
8074   run time recursion.  There is a nullstone testcase that triggered
8075   this.  This macro avoids accidental uses of num_sign_bit_copies.  */
8076#define cached_num_sign_bit_copies()
8077
8078/* Given an expression, X, compute which bits in X can be nonzero.
8079   We don't care about bits outside of those defined in MODE.
8080
8081   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
8082   a shift, AND, or zero_extract, we can do better.  */
8083
8084static unsigned HOST_WIDE_INT
8085nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
8086	       enum machine_mode known_mode,
8087	       unsigned HOST_WIDE_INT known_ret)
8088{
8089  unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
8090  unsigned HOST_WIDE_INT inner_nz;
8091  enum rtx_code code;
8092  unsigned int mode_width = GET_MODE_BITSIZE (mode);
8093  rtx tem;
8094
8095  /* For floating-point values, assume all bits are needed.  */
8096  if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
8097    return nonzero;
8098
8099  /* If X is wider than MODE, use its mode instead.  */
8100  if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
8101    {
8102      mode = GET_MODE (x);
8103      nonzero = GET_MODE_MASK (mode);
8104      mode_width = GET_MODE_BITSIZE (mode);
8105    }
8106
8107  if (mode_width > HOST_BITS_PER_WIDE_INT)
8108    /* Our only callers in this case look for single bit values.  So
8109       just return the mode mask.  Those tests will then be false.  */
8110    return nonzero;
8111
8112#ifndef WORD_REGISTER_OPERATIONS
8113  /* If MODE is wider than X, but both are a single word for both the host
8114     and target machines, we can compute this from which bits of the
8115     object might be nonzero in its own mode, taking into account the fact
8116     that on many CISC machines, accessing an object in a wider mode
8117     causes the high-order bits to become undefined.  So they are
8118     not known to be zero.  */
8119
8120  if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
8121      && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
8122      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
8123      && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
8124    {
8125      nonzero &= nonzero_bits_with_known (x, GET_MODE (x));
8126      nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
8127      return nonzero;
8128    }
8129#endif
8130
8131  code = GET_CODE (x);
8132  switch (code)
8133    {
8134    case REG:
8135#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
8136      /* If pointers extend unsigned and this is a pointer in Pmode, say that
8137	 all the bits above ptr_mode are known to be zero.  */
8138      if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8139	  && REG_POINTER (x))
8140	nonzero &= GET_MODE_MASK (ptr_mode);
8141#endif
8142
8143      /* Include declared information about alignment of pointers.  */
8144      /* ??? We don't properly preserve REG_POINTER changes across
8145	 pointer-to-integer casts, so we can't trust it except for
8146	 things that we know must be pointers.  See execute/960116-1.c.  */
8147      if ((x == stack_pointer_rtx
8148	   || x == frame_pointer_rtx
8149	   || x == arg_pointer_rtx)
8150	  && REGNO_POINTER_ALIGN (REGNO (x)))
8151	{
8152	  unsigned HOST_WIDE_INT alignment
8153	    = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
8154
8155#ifdef PUSH_ROUNDING
8156	  /* If PUSH_ROUNDING is defined, it is possible for the
8157	     stack to be momentarily aligned only to that amount,
8158	     so we pick the least alignment.  */
8159	  if (x == stack_pointer_rtx && PUSH_ARGS)
8160	    alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
8161			     alignment);
8162#endif
8163
8164	  nonzero &= ~(alignment - 1);
8165	}
8166
8167      /* If X is a register whose nonzero bits value is current, use it.
8168	 Otherwise, if X is a register whose value we can find, use that
8169	 value.  Otherwise, use the previously-computed global nonzero bits
8170	 for this register.  */
8171
8172      if (reg_last_set_value[REGNO (x)] != 0
8173	  && (reg_last_set_mode[REGNO (x)] == mode
8174	      || (GET_MODE_CLASS (reg_last_set_mode[REGNO (x)]) == MODE_INT
8175		  && GET_MODE_CLASS (mode) == MODE_INT))
8176	  && (reg_last_set_label[REGNO (x)] == label_tick
8177	      || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8178		  && REG_N_SETS (REGNO (x)) == 1
8179		  && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start,
8180					REGNO (x))))
8181	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8182	return reg_last_set_nonzero_bits[REGNO (x)] & nonzero;
8183
8184      tem = get_last_value (x);
8185
8186      if (tem)
8187	{
8188#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8189	  /* If X is narrower than MODE and TEM is a non-negative
8190	     constant that would appear negative in the mode of X,
8191	     sign-extend it for use in reg_nonzero_bits because some
8192	     machines (maybe most) will actually do the sign-extension
8193	     and this is the conservative approach.
8194
8195	     ??? For 2.5, try to tighten up the MD files in this regard
8196	     instead of this kludge.  */
8197
8198	  if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
8199	      && GET_CODE (tem) == CONST_INT
8200	      && INTVAL (tem) > 0
8201	      && 0 != (INTVAL (tem)
8202		       & ((HOST_WIDE_INT) 1
8203			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
8204	    tem = GEN_INT (INTVAL (tem)
8205			   | ((HOST_WIDE_INT) (-1)
8206			      << GET_MODE_BITSIZE (GET_MODE (x))));
8207#endif
8208	  return nonzero_bits_with_known (tem, mode) & nonzero;
8209	}
8210      else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
8211	{
8212	  unsigned HOST_WIDE_INT mask = reg_nonzero_bits[REGNO (x)];
8213
8214	  if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width)
8215	    /* We don't know anything about the upper bits.  */
8216	    mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
8217	  return nonzero & mask;
8218	}
8219      else
8220	return nonzero;
8221
8222    case CONST_INT:
8223#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8224      /* If X is negative in MODE, sign-extend the value.  */
8225      if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
8226	  && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
8227	return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
8228#endif
8229
8230      return INTVAL (x);
8231
8232    case MEM:
8233#ifdef LOAD_EXTEND_OP
8234      /* In many, if not most, RISC machines, reading a byte from memory
8235	 zeros the rest of the register.  Noticing that fact saves a lot
8236	 of extra zero-extends.  */
8237      if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
8238	nonzero &= GET_MODE_MASK (GET_MODE (x));
8239#endif
8240      break;
8241
8242    case EQ:  case NE:
8243    case UNEQ:  case LTGT:
8244    case GT:  case GTU:  case UNGT:
8245    case LT:  case LTU:  case UNLT:
8246    case GE:  case GEU:  case UNGE:
8247    case LE:  case LEU:  case UNLE:
8248    case UNORDERED: case ORDERED:
8249
8250      /* If this produces an integer result, we know which bits are set.
8251	 Code here used to clear bits outside the mode of X, but that is
8252	 now done above.  */
8253
8254      if (GET_MODE_CLASS (mode) == MODE_INT
8255	  && mode_width <= HOST_BITS_PER_WIDE_INT)
8256	nonzero = STORE_FLAG_VALUE;
8257      break;
8258
8259    case NEG:
8260#if 0
8261      /* Disabled to avoid exponential mutual recursion between nonzero_bits
8262	 and num_sign_bit_copies.  */
8263      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8264	  == GET_MODE_BITSIZE (GET_MODE (x)))
8265	nonzero = 1;
8266#endif
8267
8268      if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
8269	nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
8270      break;
8271
8272    case ABS:
8273#if 0
8274      /* Disabled to avoid exponential mutual recursion between nonzero_bits
8275	 and num_sign_bit_copies.  */
8276      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8277	  == GET_MODE_BITSIZE (GET_MODE (x)))
8278	nonzero = 1;
8279#endif
8280      break;
8281
8282    case TRUNCATE:
8283      nonzero &= (nonzero_bits_with_known (XEXP (x, 0), mode)
8284		  & GET_MODE_MASK (mode));
8285      break;
8286
8287    case ZERO_EXTEND:
8288      nonzero &= nonzero_bits_with_known (XEXP (x, 0), mode);
8289      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8290	nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8291      break;
8292
8293    case SIGN_EXTEND:
8294      /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
8295	 Otherwise, show all the bits in the outer mode but not the inner
8296	 may be nonzero.  */
8297      inner_nz = nonzero_bits_with_known (XEXP (x, 0), mode);
8298      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8299	{
8300	  inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8301	  if (inner_nz
8302	      & (((HOST_WIDE_INT) 1
8303		  << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
8304	    inner_nz |= (GET_MODE_MASK (mode)
8305			 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
8306	}
8307
8308      nonzero &= inner_nz;
8309      break;
8310
8311    case AND:
8312      nonzero &= (nonzero_bits_with_known (XEXP (x, 0), mode)
8313		  & nonzero_bits_with_known (XEXP (x, 1), mode));
8314      break;
8315
8316    case XOR:   case IOR:
8317    case UMIN:  case UMAX:  case SMIN:  case SMAX:
8318      {
8319	unsigned HOST_WIDE_INT nonzero0 =
8320	  nonzero_bits_with_known (XEXP (x, 0), mode);
8321
8322	/* Don't call nonzero_bits for the second time if it cannot change
8323	   anything.  */
8324	if ((nonzero & nonzero0) != nonzero)
8325	  nonzero &= (nonzero0
8326		      | nonzero_bits_with_known (XEXP (x, 1), mode));
8327      }
8328      break;
8329
8330    case PLUS:  case MINUS:
8331    case MULT:
8332    case DIV:   case UDIV:
8333    case MOD:   case UMOD:
8334      /* We can apply the rules of arithmetic to compute the number of
8335	 high- and low-order zero bits of these operations.  We start by
8336	 computing the width (position of the highest-order nonzero bit)
8337	 and the number of low-order zero bits for each value.  */
8338      {
8339	unsigned HOST_WIDE_INT nz0 =
8340	  nonzero_bits_with_known (XEXP (x, 0), mode);
8341	unsigned HOST_WIDE_INT nz1 =
8342	  nonzero_bits_with_known (XEXP (x, 1), mode);
8343	int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
8344	int width0 = floor_log2 (nz0) + 1;
8345	int width1 = floor_log2 (nz1) + 1;
8346	int low0 = floor_log2 (nz0 & -nz0);
8347	int low1 = floor_log2 (nz1 & -nz1);
8348	HOST_WIDE_INT op0_maybe_minusp
8349	  = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
8350	HOST_WIDE_INT op1_maybe_minusp
8351	  = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
8352	unsigned int result_width = mode_width;
8353	int result_low = 0;
8354
8355	switch (code)
8356	  {
8357	  case PLUS:
8358	    result_width = MAX (width0, width1) + 1;
8359	    result_low = MIN (low0, low1);
8360	    break;
8361	  case MINUS:
8362	    result_low = MIN (low0, low1);
8363	    break;
8364	  case MULT:
8365	    result_width = width0 + width1;
8366	    result_low = low0 + low1;
8367	    break;
8368	  case DIV:
8369	    if (width1 == 0)
8370	      break;
8371	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
8372	      result_width = width0;
8373	    break;
8374	  case UDIV:
8375	    if (width1 == 0)
8376	      break;
8377	    result_width = width0;
8378	    break;
8379	  case MOD:
8380	    if (width1 == 0)
8381	      break;
8382	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
8383	      result_width = MIN (width0, width1);
8384	    result_low = MIN (low0, low1);
8385	    break;
8386	  case UMOD:
8387	    if (width1 == 0)
8388	      break;
8389	    result_width = MIN (width0, width1);
8390	    result_low = MIN (low0, low1);
8391	    break;
8392	  default:
8393	    abort ();
8394	  }
8395
8396	if (result_width < mode_width)
8397	  nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
8398
8399	if (result_low > 0)
8400	  nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
8401
8402#ifdef POINTERS_EXTEND_UNSIGNED
8403	/* If pointers extend unsigned and this is an addition or subtraction
8404	   to a pointer in Pmode, all the bits above ptr_mode are known to be
8405	   zero.  */
8406	if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
8407	    && (code == PLUS || code == MINUS)
8408	    && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8409	  nonzero &= GET_MODE_MASK (ptr_mode);
8410#endif
8411      }
8412      break;
8413
8414    case ZERO_EXTRACT:
8415      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8416	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8417	nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
8418      break;
8419
8420    case SUBREG:
8421      /* If this is a SUBREG formed for a promoted variable that has
8422	 been zero-extended, we know that at least the high-order bits
8423	 are zero, though others might be too.  */
8424
8425      if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
8426	nonzero = (GET_MODE_MASK (GET_MODE (x))
8427		   & nonzero_bits_with_known (SUBREG_REG (x), GET_MODE (x)));
8428
8429      /* If the inner mode is a single word for both the host and target
8430	 machines, we can compute this from which bits of the inner
8431	 object might be nonzero.  */
8432      if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
8433	  && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8434	      <= HOST_BITS_PER_WIDE_INT))
8435	{
8436	  nonzero &= nonzero_bits_with_known (SUBREG_REG (x), mode);
8437
8438#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
8439	  /* If this is a typical RISC machine, we only have to worry
8440	     about the way loads are extended.  */
8441	  if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
8442	       ? (((nonzero
8443		    & (((unsigned HOST_WIDE_INT) 1
8444			<< (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
8445		   != 0))
8446	       : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
8447	      || GET_CODE (SUBREG_REG (x)) != MEM)
8448#endif
8449	    {
8450	      /* On many CISC machines, accessing an object in a wider mode
8451		 causes the high-order bits to become undefined.  So they are
8452		 not known to be zero.  */
8453	      if (GET_MODE_SIZE (GET_MODE (x))
8454		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8455		nonzero |= (GET_MODE_MASK (GET_MODE (x))
8456			    & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
8457	    }
8458	}
8459      break;
8460
8461    case ASHIFTRT:
8462    case LSHIFTRT:
8463    case ASHIFT:
8464    case ROTATE:
8465      /* The nonzero bits are in two classes: any bits within MODE
8466	 that aren't in GET_MODE (x) are always significant.  The rest of the
8467	 nonzero bits are those that are significant in the operand of
8468	 the shift when shifted the appropriate number of bits.  This
8469	 shows that high-order bits are cleared by the right shift and
8470	 low-order bits by left shifts.  */
8471      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8472	  && INTVAL (XEXP (x, 1)) >= 0
8473	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8474	{
8475	  enum machine_mode inner_mode = GET_MODE (x);
8476	  unsigned int width = GET_MODE_BITSIZE (inner_mode);
8477	  int count = INTVAL (XEXP (x, 1));
8478	  unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
8479	  unsigned HOST_WIDE_INT op_nonzero =
8480	    nonzero_bits_with_known (XEXP (x, 0), mode);
8481	  unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
8482	  unsigned HOST_WIDE_INT outer = 0;
8483
8484	  if (mode_width > width)
8485	    outer = (op_nonzero & nonzero & ~mode_mask);
8486
8487	  if (code == LSHIFTRT)
8488	    inner >>= count;
8489	  else if (code == ASHIFTRT)
8490	    {
8491	      inner >>= count;
8492
8493	      /* If the sign bit may have been nonzero before the shift, we
8494		 need to mark all the places it could have been copied to
8495		 by the shift as possibly nonzero.  */
8496	      if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
8497		inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
8498	    }
8499	  else if (code == ASHIFT)
8500	    inner <<= count;
8501	  else
8502	    inner = ((inner << (count % width)
8503		      | (inner >> (width - (count % width)))) & mode_mask);
8504
8505	  nonzero &= (outer | inner);
8506	}
8507      break;
8508
8509    case FFS:
8510    case POPCOUNT:
8511      /* This is at most the number of bits in the mode.  */
8512      nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
8513      break;
8514
8515    case CLZ:
8516      /* If CLZ has a known value at zero, then the nonzero bits are
8517	 that value, plus the number of bits in the mode minus one.  */
8518      if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
8519	nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
8520      else
8521	nonzero = -1;
8522      break;
8523
8524    case CTZ:
8525      /* If CTZ has a known value at zero, then the nonzero bits are
8526	 that value, plus the number of bits in the mode minus one.  */
8527      if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
8528	nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
8529      else
8530	nonzero = -1;
8531      break;
8532
8533    case PARITY:
8534      nonzero = 1;
8535      break;
8536
8537    case IF_THEN_ELSE:
8538      nonzero &= (nonzero_bits_with_known (XEXP (x, 1), mode)
8539		  | nonzero_bits_with_known (XEXP (x, 2), mode));
8540      break;
8541
8542    default:
8543      break;
8544    }
8545
8546  return nonzero;
8547}
8548
8549/* See the macro definition above.  */
8550#undef cached_num_sign_bit_copies
8551
8552#define num_sign_bit_copies_with_known(X, M) \
8553  cached_num_sign_bit_copies (X, M, known_x, known_mode, known_ret)
8554
8555/* The function cached_num_sign_bit_copies is a wrapper around
8556   num_sign_bit_copies1.  It avoids exponential behavior in
8557   num_sign_bit_copies1 when X has identical subexpressions on the
8558   first or the second level.  */
8559
8560static unsigned int
8561cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
8562			    enum machine_mode known_mode,
8563			    unsigned int known_ret)
8564{
8565  if (x == known_x && mode == known_mode)
8566    return known_ret;
8567
8568  /* Try to find identical subexpressions.  If found call
8569     num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
8570     the precomputed value for the subexpression as KNOWN_RET.  */
8571
8572  if (GET_RTX_CLASS (GET_CODE (x)) == '2'
8573      || GET_RTX_CLASS (GET_CODE (x)) == 'c')
8574    {
8575      rtx x0 = XEXP (x, 0);
8576      rtx x1 = XEXP (x, 1);
8577
8578      /* Check the first level.  */
8579      if (x0 == x1)
8580	return
8581	  num_sign_bit_copies1 (x, mode, x0, mode,
8582				num_sign_bit_copies_with_known (x0, mode));
8583
8584      /* Check the second level.  */
8585      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
8586	   || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
8587	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
8588	return
8589	  num_sign_bit_copies1 (x, mode, x1, mode,
8590				num_sign_bit_copies_with_known (x1, mode));
8591
8592      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
8593	   || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
8594	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
8595	return
8596	  num_sign_bit_copies1 (x, mode, x0, mode,
8597				num_sign_bit_copies_with_known (x0, mode));
8598    }
8599
8600  return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
8601}
8602
8603/* Return the number of bits at the high-order end of X that are known to
8604   be equal to the sign bit.  X will be used in mode MODE; if MODE is
8605   VOIDmode, X will be used in its own mode.  The returned value  will always
8606   be between 1 and the number of bits in MODE.  */
8607
8608static unsigned int
8609num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
8610		      enum machine_mode known_mode,
8611		      unsigned int known_ret)
8612{
8613  enum rtx_code code = GET_CODE (x);
8614  unsigned int bitwidth;
8615  int num0, num1, result;
8616  unsigned HOST_WIDE_INT nonzero;
8617  rtx tem;
8618
8619  /* If we weren't given a mode, use the mode of X.  If the mode is still
8620     VOIDmode, we don't know anything.  Likewise if one of the modes is
8621     floating-point.  */
8622
8623  if (mode == VOIDmode)
8624    mode = GET_MODE (x);
8625
8626  if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
8627    return 1;
8628
8629  bitwidth = GET_MODE_BITSIZE (mode);
8630
8631  /* For a smaller object, just ignore the high bits.  */
8632  if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
8633    {
8634      num0 = num_sign_bit_copies_with_known (x, GET_MODE (x));
8635      return MAX (1,
8636		  num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
8637    }
8638
8639  if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
8640    {
8641#ifndef WORD_REGISTER_OPERATIONS
8642  /* If this machine does not do all register operations on the entire
8643     register and MODE is wider than the mode of X, we can say nothing
8644     at all about the high-order bits.  */
8645      return 1;
8646#else
8647      /* Likewise on machines that do, if the mode of the object is smaller
8648	 than a word and loads of that size don't sign extend, we can say
8649	 nothing about the high order bits.  */
8650      if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
8651#ifdef LOAD_EXTEND_OP
8652	  && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
8653#endif
8654	  )
8655	return 1;
8656#endif
8657    }
8658
8659  switch (code)
8660    {
8661    case REG:
8662
8663#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
8664      /* If pointers extend signed and this is a pointer in Pmode, say that
8665	 all the bits above ptr_mode are known to be sign bit copies.  */
8666      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
8667	  && REG_POINTER (x))
8668	return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
8669#endif
8670
8671      if (reg_last_set_value[REGNO (x)] != 0
8672	  && reg_last_set_mode[REGNO (x)] == mode
8673	  && (reg_last_set_label[REGNO (x)] == label_tick
8674	      || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8675		  && REG_N_SETS (REGNO (x)) == 1
8676		  && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start,
8677					REGNO (x))))
8678	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8679	return reg_last_set_sign_bit_copies[REGNO (x)];
8680
8681      tem = get_last_value (x);
8682      if (tem != 0)
8683	return num_sign_bit_copies_with_known (tem, mode);
8684
8685      if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0
8686	  && GET_MODE_BITSIZE (GET_MODE (x)) == bitwidth)
8687	return reg_sign_bit_copies[REGNO (x)];
8688      break;
8689
8690    case MEM:
8691#ifdef LOAD_EXTEND_OP
8692      /* Some RISC machines sign-extend all loads of smaller than a word.  */
8693      if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
8694	return MAX (1, ((int) bitwidth
8695			- (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
8696#endif
8697      break;
8698
8699    case CONST_INT:
8700      /* If the constant is negative, take its 1's complement and remask.
8701	 Then see how many zero bits we have.  */
8702      nonzero = INTVAL (x) & GET_MODE_MASK (mode);
8703      if (bitwidth <= HOST_BITS_PER_WIDE_INT
8704	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8705	nonzero = (~nonzero) & GET_MODE_MASK (mode);
8706
8707      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8708
8709    case SUBREG:
8710      /* If this is a SUBREG for a promoted object that is sign-extended
8711	 and we are looking at it in a wider mode, we know that at least the
8712	 high-order bits are known to be sign bit copies.  */
8713
8714      if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
8715	{
8716	  num0 = num_sign_bit_copies_with_known (SUBREG_REG (x), mode);
8717	  return MAX ((int) bitwidth
8718		      - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
8719		      num0);
8720	}
8721
8722      /* For a smaller object, just ignore the high bits.  */
8723      if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
8724	{
8725	  num0 = num_sign_bit_copies_with_known (SUBREG_REG (x), VOIDmode);
8726	  return MAX (1, (num0
8727			  - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8728				   - bitwidth)));
8729	}
8730
8731#ifdef WORD_REGISTER_OPERATIONS
8732#ifdef LOAD_EXTEND_OP
8733      /* For paradoxical SUBREGs on machines where all register operations
8734	 affect the entire register, just look inside.  Note that we are
8735	 passing MODE to the recursive call, so the number of sign bit copies
8736	 will remain relative to that mode, not the inner mode.  */
8737
8738      /* This works only if loads sign extend.  Otherwise, if we get a
8739	 reload for the inner part, it may be loaded from the stack, and
8740	 then we lose all sign bit copies that existed before the store
8741	 to the stack.  */
8742
8743      if ((GET_MODE_SIZE (GET_MODE (x))
8744	   > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8745	  && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
8746	  && GET_CODE (SUBREG_REG (x)) == MEM)
8747	return num_sign_bit_copies_with_known (SUBREG_REG (x), mode);
8748#endif
8749#endif
8750      break;
8751
8752    case SIGN_EXTRACT:
8753      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
8754	return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
8755      break;
8756
8757    case SIGN_EXTEND:
8758      return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8759	      + num_sign_bit_copies_with_known (XEXP (x, 0), VOIDmode));
8760
8761    case TRUNCATE:
8762      /* For a smaller object, just ignore the high bits.  */
8763      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), VOIDmode);
8764      return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8765				    - bitwidth)));
8766
8767    case NOT:
8768      return num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8769
8770    case ROTATE:       case ROTATERT:
8771      /* If we are rotating left by a number of bits less than the number
8772	 of sign bit copies, we can just subtract that amount from the
8773	 number.  */
8774      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8775	  && INTVAL (XEXP (x, 1)) >= 0
8776	  && INTVAL (XEXP (x, 1)) < (int) bitwidth)
8777	{
8778	  num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8779	  return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
8780				 : (int) bitwidth - INTVAL (XEXP (x, 1))));
8781	}
8782      break;
8783
8784    case NEG:
8785      /* In general, this subtracts one sign bit copy.  But if the value
8786	 is known to be positive, the number of sign bit copies is the
8787	 same as that of the input.  Finally, if the input has just one bit
8788	 that might be nonzero, all the bits are copies of the sign bit.  */
8789      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8790      if (bitwidth > HOST_BITS_PER_WIDE_INT)
8791	return num0 > 1 ? num0 - 1 : 1;
8792
8793      nonzero = nonzero_bits (XEXP (x, 0), mode);
8794      if (nonzero == 1)
8795	return bitwidth;
8796
8797      if (num0 > 1
8798	  && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
8799	num0--;
8800
8801      return num0;
8802
8803    case IOR:   case AND:   case XOR:
8804    case SMIN:  case SMAX:  case UMIN:  case UMAX:
8805      /* Logical operations will preserve the number of sign-bit copies.
8806	 MIN and MAX operations always return one of the operands.  */
8807      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8808      num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8809      return MIN (num0, num1);
8810
8811    case PLUS:  case MINUS:
8812      /* For addition and subtraction, we can have a 1-bit carry.  However,
8813	 if we are subtracting 1 from a positive number, there will not
8814	 be such a carry.  Furthermore, if the positive number is known to
8815	 be 0 or 1, we know the result is either -1 or 0.  */
8816
8817      if (code == PLUS && XEXP (x, 1) == constm1_rtx
8818	  && bitwidth <= HOST_BITS_PER_WIDE_INT)
8819	{
8820	  nonzero = nonzero_bits (XEXP (x, 0), mode);
8821	  if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8822	    return (nonzero == 1 || nonzero == 0 ? bitwidth
8823		    : bitwidth - floor_log2 (nonzero) - 1);
8824	}
8825
8826      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8827      num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8828      result = MAX (1, MIN (num0, num1) - 1);
8829
8830#ifdef POINTERS_EXTEND_UNSIGNED
8831      /* If pointers extend signed and this is an addition or subtraction
8832	 to a pointer in Pmode, all the bits above ptr_mode are known to be
8833	 sign bit copies.  */
8834      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8835	  && (code == PLUS || code == MINUS)
8836	  && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8837	result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
8838			     - GET_MODE_BITSIZE (ptr_mode) + 1),
8839		      result);
8840#endif
8841      return result;
8842
8843    case MULT:
8844      /* The number of bits of the product is the sum of the number of
8845	 bits of both terms.  However, unless one of the terms if known
8846	 to be positive, we must allow for an additional bit since negating
8847	 a negative number can remove one sign bit copy.  */
8848
8849      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8850      num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8851
8852      result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8853      if (result > 0
8854	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8855	      || (((nonzero_bits (XEXP (x, 0), mode)
8856		    & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8857		  && ((nonzero_bits (XEXP (x, 1), mode)
8858		       & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
8859	result--;
8860
8861      return MAX (1, result);
8862
8863    case UDIV:
8864      /* The result must be <= the first operand.  If the first operand
8865         has the high bit set, we know nothing about the number of sign
8866         bit copies.  */
8867      if (bitwidth > HOST_BITS_PER_WIDE_INT)
8868	return 1;
8869      else if ((nonzero_bits (XEXP (x, 0), mode)
8870		& ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8871	return 1;
8872      else
8873	return num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8874
8875    case UMOD:
8876      /* The result must be <= the second operand.  */
8877      return num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8878
8879    case DIV:
8880      /* Similar to unsigned division, except that we have to worry about
8881	 the case where the divisor is negative, in which case we have
8882	 to add 1.  */
8883      result = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8884      if (result > 1
8885	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8886	      || (nonzero_bits (XEXP (x, 1), mode)
8887		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8888	result--;
8889
8890      return result;
8891
8892    case MOD:
8893      result = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8894      if (result > 1
8895	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8896	      || (nonzero_bits (XEXP (x, 1), mode)
8897		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8898	result--;
8899
8900      return result;
8901
8902    case ASHIFTRT:
8903      /* Shifts by a constant add to the number of bits equal to the
8904	 sign bit.  */
8905      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8906      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8907	  && INTVAL (XEXP (x, 1)) > 0)
8908	num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
8909
8910      return num0;
8911
8912    case ASHIFT:
8913      /* Left shifts destroy copies.  */
8914      if (GET_CODE (XEXP (x, 1)) != CONST_INT
8915	  || INTVAL (XEXP (x, 1)) < 0
8916	  || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
8917	return 1;
8918
8919      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8920      return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8921
8922    case IF_THEN_ELSE:
8923      num0 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8924      num1 = num_sign_bit_copies_with_known (XEXP (x, 2), mode);
8925      return MIN (num0, num1);
8926
8927    case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
8928    case UNEQ:  case LTGT:  case UNGE:  case UNGT:  case UNLE:  case UNLT:
8929    case GEU: case GTU: case LEU: case LTU:
8930    case UNORDERED: case ORDERED:
8931      /* If the constant is negative, take its 1's complement and remask.
8932	 Then see how many zero bits we have.  */
8933      nonzero = STORE_FLAG_VALUE;
8934      if (bitwidth <= HOST_BITS_PER_WIDE_INT
8935	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8936	nonzero = (~nonzero) & GET_MODE_MASK (mode);
8937
8938      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8939      break;
8940
8941    default:
8942      break;
8943    }
8944
8945  /* If we haven't been able to figure it out by one of the above rules,
8946     see if some of the high-order bits are known to be zero.  If so,
8947     count those bits and return one less than that amount.  If we can't
8948     safely compute the mask for this mode, always return BITWIDTH.  */
8949
8950  if (bitwidth > HOST_BITS_PER_WIDE_INT)
8951    return 1;
8952
8953  nonzero = nonzero_bits (x, mode);
8954  return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
8955	  ? 1 : bitwidth - floor_log2 (nonzero) - 1);
8956}
8957
8958/* Return the number of "extended" bits there are in X, when interpreted
8959   as a quantity in MODE whose signedness is indicated by UNSIGNEDP.  For
8960   unsigned quantities, this is the number of high-order zero bits.
8961   For signed quantities, this is the number of copies of the sign bit
8962   minus 1.  In both case, this function returns the number of "spare"
8963   bits.  For example, if two quantities for which this function returns
8964   at least 1 are added, the addition is known not to overflow.
8965
8966   This function will always return 0 unless called during combine, which
8967   implies that it must be called from a define_split.  */
8968
8969unsigned int
8970extended_count (rtx x, enum machine_mode mode, int unsignedp)
8971{
8972  if (nonzero_sign_valid == 0)
8973    return 0;
8974
8975  return (unsignedp
8976	  ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8977	     ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1
8978			       - floor_log2 (nonzero_bits (x, mode)))
8979	     : 0)
8980	  : num_sign_bit_copies (x, mode) - 1);
8981}
8982
8983/* This function is called from `simplify_shift_const' to merge two
8984   outer operations.  Specifically, we have already found that we need
8985   to perform operation *POP0 with constant *PCONST0 at the outermost
8986   position.  We would now like to also perform OP1 with constant CONST1
8987   (with *POP0 being done last).
8988
8989   Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8990   the resulting operation.  *PCOMP_P is set to 1 if we would need to
8991   complement the innermost operand, otherwise it is unchanged.
8992
8993   MODE is the mode in which the operation will be done.  No bits outside
8994   the width of this mode matter.  It is assumed that the width of this mode
8995   is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8996
8997   If *POP0 or OP1 are NIL, it means no operation is required.  Only NEG, PLUS,
8998   IOR, XOR, and AND are supported.  We may set *POP0 to SET if the proper
8999   result is simply *PCONST0.
9000
9001   If the resulting operation cannot be expressed as one operation, we
9002   return 0 and do not change *POP0, *PCONST0, and *PCOMP_P.  */
9003
9004static int
9005merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, enum machine_mode mode, int *pcomp_p)
9006{
9007  enum rtx_code op0 = *pop0;
9008  HOST_WIDE_INT const0 = *pconst0;
9009
9010  const0 &= GET_MODE_MASK (mode);
9011  const1 &= GET_MODE_MASK (mode);
9012
9013  /* If OP0 is an AND, clear unimportant bits in CONST1.  */
9014  if (op0 == AND)
9015    const1 &= const0;
9016
9017  /* If OP0 or OP1 is NIL, this is easy.  Similarly if they are the same or
9018     if OP0 is SET.  */
9019
9020  if (op1 == NIL || op0 == SET)
9021    return 1;
9022
9023  else if (op0 == NIL)
9024    op0 = op1, const0 = const1;
9025
9026  else if (op0 == op1)
9027    {
9028      switch (op0)
9029	{
9030	case AND:
9031	  const0 &= const1;
9032	  break;
9033	case IOR:
9034	  const0 |= const1;
9035	  break;
9036	case XOR:
9037	  const0 ^= const1;
9038	  break;
9039	case PLUS:
9040	  const0 += const1;
9041	  break;
9042	case NEG:
9043	  op0 = NIL;
9044	  break;
9045	default:
9046	  break;
9047	}
9048    }
9049
9050  /* Otherwise, if either is a PLUS or NEG, we can't do anything.  */
9051  else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
9052    return 0;
9053
9054  /* If the two constants aren't the same, we can't do anything.  The
9055     remaining six cases can all be done.  */
9056  else if (const0 != const1)
9057    return 0;
9058
9059  else
9060    switch (op0)
9061      {
9062      case IOR:
9063	if (op1 == AND)
9064	  /* (a & b) | b == b */
9065	  op0 = SET;
9066	else /* op1 == XOR */
9067	  /* (a ^ b) | b == a | b */
9068	  {;}
9069	break;
9070
9071      case XOR:
9072	if (op1 == AND)
9073	  /* (a & b) ^ b == (~a) & b */
9074	  op0 = AND, *pcomp_p = 1;
9075	else /* op1 == IOR */
9076	  /* (a | b) ^ b == a & ~b */
9077	  op0 = AND, const0 = ~const0;
9078	break;
9079
9080      case AND:
9081	if (op1 == IOR)
9082	  /* (a | b) & b == b */
9083	op0 = SET;
9084	else /* op1 == XOR */
9085	  /* (a ^ b) & b) == (~a) & b */
9086	  *pcomp_p = 1;
9087	break;
9088      default:
9089	break;
9090      }
9091
9092  /* Check for NO-OP cases.  */
9093  const0 &= GET_MODE_MASK (mode);
9094  if (const0 == 0
9095      && (op0 == IOR || op0 == XOR || op0 == PLUS))
9096    op0 = NIL;
9097  else if (const0 == 0 && op0 == AND)
9098    op0 = SET;
9099  else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
9100	   && op0 == AND)
9101    op0 = NIL;
9102
9103  /* ??? Slightly redundant with the above mask, but not entirely.
9104     Moving this above means we'd have to sign-extend the mode mask
9105     for the final test.  */
9106  const0 = trunc_int_for_mode (const0, mode);
9107
9108  *pop0 = op0;
9109  *pconst0 = const0;
9110
9111  return 1;
9112}
9113
9114/* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
9115   The result of the shift is RESULT_MODE.  X, if nonzero, is an expression
9116   that we started with.
9117
9118   The shift is normally computed in the widest mode we find in VAROP, as
9119   long as it isn't a different number of words than RESULT_MODE.  Exceptions
9120   are ASHIFTRT and ROTATE, which are always done in their original mode,  */
9121
9122static rtx
9123simplify_shift_const (rtx x, enum rtx_code code,
9124		      enum machine_mode result_mode, rtx varop,
9125		      int orig_count)
9126{
9127  enum rtx_code orig_code = code;
9128  unsigned int count;
9129  int signed_count;
9130  enum machine_mode mode = result_mode;
9131  enum machine_mode shift_mode, tmode;
9132  unsigned int mode_words
9133    = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
9134  /* We form (outer_op (code varop count) (outer_const)).  */
9135  enum rtx_code outer_op = NIL;
9136  HOST_WIDE_INT outer_const = 0;
9137  rtx const_rtx;
9138  int complement_p = 0;
9139  rtx new;
9140
9141  /* Make sure and truncate the "natural" shift on the way in.  We don't
9142     want to do this inside the loop as it makes it more difficult to
9143     combine shifts.  */
9144  if (SHIFT_COUNT_TRUNCATED)
9145    orig_count &= GET_MODE_BITSIZE (mode) - 1;
9146
9147  /* If we were given an invalid count, don't do anything except exactly
9148     what was requested.  */
9149
9150  if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
9151    {
9152      if (x)
9153	return x;
9154
9155      return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (orig_count));
9156    }
9157
9158  count = orig_count;
9159
9160  /* Unless one of the branches of the `if' in this loop does a `continue',
9161     we will `break' the loop after the `if'.  */
9162
9163  while (count != 0)
9164    {
9165      /* If we have an operand of (clobber (const_int 0)), just return that
9166	 value.  */
9167      if (GET_CODE (varop) == CLOBBER)
9168	return varop;
9169
9170      /* If we discovered we had to complement VAROP, leave.  Making a NOT
9171	 here would cause an infinite loop.  */
9172      if (complement_p)
9173	break;
9174
9175      /* Convert ROTATERT to ROTATE.  */
9176      if (code == ROTATERT)
9177	{
9178	  unsigned int bitsize = GET_MODE_BITSIZE (result_mode);;
9179	  code = ROTATE;
9180	  if (VECTOR_MODE_P (result_mode))
9181	    count = bitsize / GET_MODE_NUNITS (result_mode) - count;
9182	  else
9183	    count = bitsize - count;
9184	}
9185
9186      /* We need to determine what mode we will do the shift in.  If the
9187	 shift is a right shift or a ROTATE, we must always do it in the mode
9188	 it was originally done in.  Otherwise, we can do it in MODE, the
9189	 widest mode encountered.  */
9190      shift_mode
9191	= (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9192	   ? result_mode : mode);
9193
9194      /* Handle cases where the count is greater than the size of the mode
9195	 minus 1.  For ASHIFT, use the size minus one as the count (this can
9196	 occur when simplifying (lshiftrt (ashiftrt ..))).  For rotates,
9197	 take the count modulo the size.  For other shifts, the result is
9198	 zero.
9199
9200	 Since these shifts are being produced by the compiler by combining
9201	 multiple operations, each of which are defined, we know what the
9202	 result is supposed to be.  */
9203
9204      if (count > (unsigned int) (GET_MODE_BITSIZE (shift_mode) - 1))
9205	{
9206	  if (code == ASHIFTRT)
9207	    count = GET_MODE_BITSIZE (shift_mode) - 1;
9208	  else if (code == ROTATE || code == ROTATERT)
9209	    count %= GET_MODE_BITSIZE (shift_mode);
9210	  else
9211	    {
9212	      /* We can't simply return zero because there may be an
9213		 outer op.  */
9214	      varop = const0_rtx;
9215	      count = 0;
9216	      break;
9217	    }
9218	}
9219
9220      /* An arithmetic right shift of a quantity known to be -1 or 0
9221	 is a no-op.  */
9222      if (code == ASHIFTRT
9223	  && (num_sign_bit_copies (varop, shift_mode)
9224	      == GET_MODE_BITSIZE (shift_mode)))
9225	{
9226	  count = 0;
9227	  break;
9228	}
9229
9230      /* If we are doing an arithmetic right shift and discarding all but
9231	 the sign bit copies, this is equivalent to doing a shift by the
9232	 bitsize minus one.  Convert it into that shift because it will often
9233	 allow other simplifications.  */
9234
9235      if (code == ASHIFTRT
9236	  && (count + num_sign_bit_copies (varop, shift_mode)
9237	      >= GET_MODE_BITSIZE (shift_mode)))
9238	count = GET_MODE_BITSIZE (shift_mode) - 1;
9239
9240      /* We simplify the tests below and elsewhere by converting
9241	 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
9242	 `make_compound_operation' will convert it to an ASHIFTRT for
9243	 those machines (such as VAX) that don't have an LSHIFTRT.  */
9244      if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9245	  && code == ASHIFTRT
9246	  && ((nonzero_bits (varop, shift_mode)
9247	       & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
9248	      == 0))
9249	code = LSHIFTRT;
9250
9251      if (code == LSHIFTRT
9252	  && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9253	  && !(nonzero_bits (varop, shift_mode) >> count))
9254	varop = const0_rtx;
9255      if (code == ASHIFT
9256	  && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9257	  && !((nonzero_bits (varop, shift_mode) << count)
9258	       & GET_MODE_MASK (shift_mode)))
9259	varop = const0_rtx;
9260
9261      switch (GET_CODE (varop))
9262	{
9263	case SIGN_EXTEND:
9264	case ZERO_EXTEND:
9265	case SIGN_EXTRACT:
9266	case ZERO_EXTRACT:
9267	  new = expand_compound_operation (varop);
9268	  if (new != varop)
9269	    {
9270	      varop = new;
9271	      continue;
9272	    }
9273	  break;
9274
9275	case MEM:
9276	  /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
9277	     minus the width of a smaller mode, we can do this with a
9278	     SIGN_EXTEND or ZERO_EXTEND from the narrower memory location.  */
9279	  if ((code == ASHIFTRT || code == LSHIFTRT)
9280	      && ! mode_dependent_address_p (XEXP (varop, 0))
9281	      && ! MEM_VOLATILE_P (varop)
9282	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9283					 MODE_INT, 1)) != BLKmode)
9284	    {
9285	      new = adjust_address_nv (varop, tmode,
9286				       BYTES_BIG_ENDIAN ? 0
9287				       : count / BITS_PER_UNIT);
9288
9289	      varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9290				     : ZERO_EXTEND, mode, new);
9291	      count = 0;
9292	      continue;
9293	    }
9294	  break;
9295
9296	case USE:
9297	  /* Similar to the case above, except that we can only do this if
9298	     the resulting mode is the same as that of the underlying
9299	     MEM and adjust the address depending on the *bits* endianness
9300	     because of the way that bit-field extract insns are defined.  */
9301	  if ((code == ASHIFTRT || code == LSHIFTRT)
9302	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9303					 MODE_INT, 1)) != BLKmode
9304	      && tmode == GET_MODE (XEXP (varop, 0)))
9305	    {
9306	      if (BITS_BIG_ENDIAN)
9307		new = XEXP (varop, 0);
9308	      else
9309		{
9310		  new = copy_rtx (XEXP (varop, 0));
9311		  SUBST (XEXP (new, 0),
9312			 plus_constant (XEXP (new, 0),
9313					count / BITS_PER_UNIT));
9314		}
9315
9316	      varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9317				     : ZERO_EXTEND, mode, new);
9318	      count = 0;
9319	      continue;
9320	    }
9321	  break;
9322
9323	case SUBREG:
9324	  /* If VAROP is a SUBREG, strip it as long as the inner operand has
9325	     the same number of words as what we've seen so far.  Then store
9326	     the widest mode in MODE.  */
9327	  if (subreg_lowpart_p (varop)
9328	      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9329		  > GET_MODE_SIZE (GET_MODE (varop)))
9330	      && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9331				  + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9332		 == mode_words)
9333	    {
9334	      varop = SUBREG_REG (varop);
9335	      if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9336		mode = GET_MODE (varop);
9337	      continue;
9338	    }
9339	  break;
9340
9341	case MULT:
9342	  /* Some machines use MULT instead of ASHIFT because MULT
9343	     is cheaper.  But it is still better on those machines to
9344	     merge two shifts into one.  */
9345	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9346	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9347	    {
9348	      varop
9349		= gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
9350			      GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9351	      continue;
9352	    }
9353	  break;
9354
9355	case UDIV:
9356	  /* Similar, for when divides are cheaper.  */
9357	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9358	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9359	    {
9360	      varop
9361		= gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
9362			      GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9363	      continue;
9364	    }
9365	  break;
9366
9367	case ASHIFTRT:
9368	  /* If we are extracting just the sign bit of an arithmetic
9369	     right shift, that shift is not needed.  However, the sign
9370	     bit of a wider mode may be different from what would be
9371	     interpreted as the sign bit in a narrower mode, so, if
9372	     the result is narrower, don't discard the shift.  */
9373	  if (code == LSHIFTRT
9374	      && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9375	      && (GET_MODE_BITSIZE (result_mode)
9376		  >= GET_MODE_BITSIZE (GET_MODE (varop))))
9377	    {
9378	      varop = XEXP (varop, 0);
9379	      continue;
9380	    }
9381
9382	  /* ... fall through ...  */
9383
9384	case LSHIFTRT:
9385	case ASHIFT:
9386	case ROTATE:
9387	  /* Here we have two nested shifts.  The result is usually the
9388	     AND of a new shift with a mask.  We compute the result below.  */
9389	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9390	      && INTVAL (XEXP (varop, 1)) >= 0
9391	      && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
9392	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9393	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9394	    {
9395	      enum rtx_code first_code = GET_CODE (varop);
9396	      unsigned int first_count = INTVAL (XEXP (varop, 1));
9397	      unsigned HOST_WIDE_INT mask;
9398	      rtx mask_rtx;
9399
9400	      /* We have one common special case.  We can't do any merging if
9401		 the inner code is an ASHIFTRT of a smaller mode.  However, if
9402		 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9403		 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9404		 we can convert it to
9405		 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9406		 This simplifies certain SIGN_EXTEND operations.  */
9407	      if (code == ASHIFT && first_code == ASHIFTRT
9408		  && count == (unsigned int)
9409			      (GET_MODE_BITSIZE (result_mode)
9410			       - GET_MODE_BITSIZE (GET_MODE (varop))))
9411		{
9412		  /* C3 has the low-order C1 bits zero.  */
9413
9414		  mask = (GET_MODE_MASK (mode)
9415			  & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
9416
9417		  varop = simplify_and_const_int (NULL_RTX, result_mode,
9418						  XEXP (varop, 0), mask);
9419		  varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
9420						varop, count);
9421		  count = first_count;
9422		  code = ASHIFTRT;
9423		  continue;
9424		}
9425
9426	      /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9427		 than C1 high-order bits equal to the sign bit, we can convert
9428		 this to either an ASHIFT or an ASHIFTRT depending on the
9429		 two counts.
9430
9431		 We cannot do this if VAROP's mode is not SHIFT_MODE.  */
9432
9433	      if (code == ASHIFTRT && first_code == ASHIFT
9434		  && GET_MODE (varop) == shift_mode
9435		  && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9436		      > first_count))
9437		{
9438		  varop = XEXP (varop, 0);
9439
9440		  signed_count = count - first_count;
9441		  if (signed_count < 0)
9442		    count = -signed_count, code = ASHIFT;
9443		  else
9444		    count = signed_count;
9445
9446		  continue;
9447		}
9448
9449	      /* There are some cases we can't do.  If CODE is ASHIFTRT,
9450		 we can only do this if FIRST_CODE is also ASHIFTRT.
9451
9452		 We can't do the case when CODE is ROTATE and FIRST_CODE is
9453		 ASHIFTRT.
9454
9455		 If the mode of this shift is not the mode of the outer shift,
9456		 we can't do this if either shift is a right shift or ROTATE.
9457
9458		 Finally, we can't do any of these if the mode is too wide
9459		 unless the codes are the same.
9460
9461		 Handle the case where the shift codes are the same
9462		 first.  */
9463
9464	      if (code == first_code)
9465		{
9466		  if (GET_MODE (varop) != result_mode
9467		      && (code == ASHIFTRT || code == LSHIFTRT
9468			  || code == ROTATE))
9469		    break;
9470
9471		  count += first_count;
9472		  varop = XEXP (varop, 0);
9473		  continue;
9474		}
9475
9476	      if (code == ASHIFTRT
9477		  || (code == ROTATE && first_code == ASHIFTRT)
9478		  || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
9479		  || (GET_MODE (varop) != result_mode
9480		      && (first_code == ASHIFTRT || first_code == LSHIFTRT
9481			  || first_code == ROTATE
9482			  || code == ROTATE)))
9483		break;
9484
9485	      /* To compute the mask to apply after the shift, shift the
9486		 nonzero bits of the inner shift the same way the
9487		 outer shift will.  */
9488
9489	      mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
9490
9491	      mask_rtx
9492		= simplify_binary_operation (code, result_mode, mask_rtx,
9493					     GEN_INT (count));
9494
9495	      /* Give up if we can't compute an outer operation to use.  */
9496	      if (mask_rtx == 0
9497		  || GET_CODE (mask_rtx) != CONST_INT
9498		  || ! merge_outer_ops (&outer_op, &outer_const, AND,
9499					INTVAL (mask_rtx),
9500					result_mode, &complement_p))
9501		break;
9502
9503	      /* If the shifts are in the same direction, we add the
9504		 counts.  Otherwise, we subtract them.  */
9505	      signed_count = count;
9506	      if ((code == ASHIFTRT || code == LSHIFTRT)
9507		  == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9508		signed_count += first_count;
9509	      else
9510		signed_count -= first_count;
9511
9512	      /* If COUNT is positive, the new shift is usually CODE,
9513		 except for the two exceptions below, in which case it is
9514		 FIRST_CODE.  If the count is negative, FIRST_CODE should
9515		 always be used  */
9516	      if (signed_count > 0
9517		  && ((first_code == ROTATE && code == ASHIFT)
9518		      || (first_code == ASHIFTRT && code == LSHIFTRT)))
9519		code = first_code, count = signed_count;
9520	      else if (signed_count < 0)
9521		code = first_code, count = -signed_count;
9522	      else
9523		count = signed_count;
9524
9525	      varop = XEXP (varop, 0);
9526	      continue;
9527	    }
9528
9529	  /* If we have (A << B << C) for any shift, we can convert this to
9530	     (A << C << B).  This wins if A is a constant.  Only try this if
9531	     B is not a constant.  */
9532
9533	  else if (GET_CODE (varop) == code
9534		   && GET_CODE (XEXP (varop, 1)) != CONST_INT
9535		   && 0 != (new
9536			    = simplify_binary_operation (code, mode,
9537							 XEXP (varop, 0),
9538							 GEN_INT (count))))
9539	    {
9540	      varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1));
9541	      count = 0;
9542	      continue;
9543	    }
9544	  break;
9545
9546	case NOT:
9547	  /* Make this fit the case below.  */
9548	  varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9549			       GEN_INT (GET_MODE_MASK (mode)));
9550	  continue;
9551
9552	case IOR:
9553	case AND:
9554	case XOR:
9555	  /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9556	     with C the size of VAROP - 1 and the shift is logical if
9557	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9558	     we have an (le X 0) operation.   If we have an arithmetic shift
9559	     and STORE_FLAG_VALUE is 1 or we have a logical shift with
9560	     STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
9561
9562	  if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9563	      && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9564	      && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9565	      && (code == LSHIFTRT || code == ASHIFTRT)
9566	      && count == (unsigned int)
9567			  (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9568	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9569	    {
9570	      count = 0;
9571	      varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9572				  const0_rtx);
9573
9574	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9575		varop = gen_rtx_NEG (GET_MODE (varop), varop);
9576
9577	      continue;
9578	    }
9579
9580	  /* If we have (shift (logical)), move the logical to the outside
9581	     to allow it to possibly combine with another logical and the
9582	     shift to combine with another shift.  This also canonicalizes to
9583	     what a ZERO_EXTRACT looks like.  Also, some machines have
9584	     (and (shift)) insns.  */
9585
9586	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9587	      && (new = simplify_binary_operation (code, result_mode,
9588						   XEXP (varop, 1),
9589						   GEN_INT (count))) != 0
9590	      && GET_CODE (new) == CONST_INT
9591	      && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9592				  INTVAL (new), result_mode, &complement_p))
9593	    {
9594	      varop = XEXP (varop, 0);
9595	      continue;
9596	    }
9597
9598	  /* If we can't do that, try to simplify the shift in each arm of the
9599	     logical expression, make a new logical expression, and apply
9600	     the inverse distributive law.  */
9601	  {
9602	    rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9603					    XEXP (varop, 0), count);
9604	    rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9605					    XEXP (varop, 1), count);
9606
9607	    varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
9608	    varop = apply_distributive_law (varop);
9609
9610	    count = 0;
9611	  }
9612	  break;
9613
9614	case EQ:
9615	  /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9616	     says that the sign bit can be tested, FOO has mode MODE, C is
9617	     GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9618	     that may be nonzero.  */
9619	  if (code == LSHIFTRT
9620	      && XEXP (varop, 1) == const0_rtx
9621	      && GET_MODE (XEXP (varop, 0)) == result_mode
9622	      && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9623	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9624	      && ((STORE_FLAG_VALUE
9625		   & ((HOST_WIDE_INT) 1
9626		      < (GET_MODE_BITSIZE (result_mode) - 1))))
9627	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9628	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9629				  (HOST_WIDE_INT) 1, result_mode,
9630				  &complement_p))
9631	    {
9632	      varop = XEXP (varop, 0);
9633	      count = 0;
9634	      continue;
9635	    }
9636	  break;
9637
9638	case NEG:
9639	  /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9640	     than the number of bits in the mode is equivalent to A.  */
9641	  if (code == LSHIFTRT
9642	      && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9643	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9644	    {
9645	      varop = XEXP (varop, 0);
9646	      count = 0;
9647	      continue;
9648	    }
9649
9650	  /* NEG commutes with ASHIFT since it is multiplication.  Move the
9651	     NEG outside to allow shifts to combine.  */
9652	  if (code == ASHIFT
9653	      && merge_outer_ops (&outer_op, &outer_const, NEG,
9654				  (HOST_WIDE_INT) 0, result_mode,
9655				  &complement_p))
9656	    {
9657	      varop = XEXP (varop, 0);
9658	      continue;
9659	    }
9660	  break;
9661
9662	case PLUS:
9663	  /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9664	     is one less than the number of bits in the mode is
9665	     equivalent to (xor A 1).  */
9666	  if (code == LSHIFTRT
9667	      && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9668	      && XEXP (varop, 1) == constm1_rtx
9669	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9670	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9671				  (HOST_WIDE_INT) 1, result_mode,
9672				  &complement_p))
9673	    {
9674	      count = 0;
9675	      varop = XEXP (varop, 0);
9676	      continue;
9677	    }
9678
9679	  /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9680	     that might be nonzero in BAR are those being shifted out and those
9681	     bits are known zero in FOO, we can replace the PLUS with FOO.
9682	     Similarly in the other operand order.  This code occurs when
9683	     we are computing the size of a variable-size array.  */
9684
9685	  if ((code == ASHIFTRT || code == LSHIFTRT)
9686	      && count < HOST_BITS_PER_WIDE_INT
9687	      && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9688	      && (nonzero_bits (XEXP (varop, 1), result_mode)
9689		  & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9690	    {
9691	      varop = XEXP (varop, 0);
9692	      continue;
9693	    }
9694	  else if ((code == ASHIFTRT || code == LSHIFTRT)
9695		   && count < HOST_BITS_PER_WIDE_INT
9696		   && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9697		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9698			    >> count)
9699		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9700			    & nonzero_bits (XEXP (varop, 1),
9701						 result_mode)))
9702	    {
9703	      varop = XEXP (varop, 1);
9704	      continue;
9705	    }
9706
9707	  /* (ashift (plus foo C) N) is (plus (ashift foo N) C').  */
9708	  if (code == ASHIFT
9709	      && GET_CODE (XEXP (varop, 1)) == CONST_INT
9710	      && (new = simplify_binary_operation (ASHIFT, result_mode,
9711						   XEXP (varop, 1),
9712						   GEN_INT (count))) != 0
9713	      && GET_CODE (new) == CONST_INT
9714	      && merge_outer_ops (&outer_op, &outer_const, PLUS,
9715				  INTVAL (new), result_mode, &complement_p))
9716	    {
9717	      varop = XEXP (varop, 0);
9718	      continue;
9719	    }
9720	  break;
9721
9722	case MINUS:
9723	  /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9724	     with C the size of VAROP - 1 and the shift is logical if
9725	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9726	     we have a (gt X 0) operation.  If the shift is arithmetic with
9727	     STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9728	     we have a (neg (gt X 0)) operation.  */
9729
9730	  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9731	      && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9732	      && count == (unsigned int)
9733			  (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9734	      && (code == LSHIFTRT || code == ASHIFTRT)
9735	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9736	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (varop, 0), 1))
9737		 == count
9738	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9739	    {
9740	      count = 0;
9741	      varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9742				  const0_rtx);
9743
9744	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9745		varop = gen_rtx_NEG (GET_MODE (varop), varop);
9746
9747	      continue;
9748	    }
9749	  break;
9750
9751	case TRUNCATE:
9752	  /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9753	     if the truncate does not affect the value.  */
9754	  if (code == LSHIFTRT
9755	      && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9756	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9757	      && (INTVAL (XEXP (XEXP (varop, 0), 1))
9758		  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9759		      - GET_MODE_BITSIZE (GET_MODE (varop)))))
9760	    {
9761	      rtx varop_inner = XEXP (varop, 0);
9762
9763	      varop_inner
9764		= gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9765				    XEXP (varop_inner, 0),
9766				    GEN_INT
9767				    (count + INTVAL (XEXP (varop_inner, 1))));
9768	      varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
9769	      count = 0;
9770	      continue;
9771	    }
9772	  break;
9773
9774	default:
9775	  break;
9776	}
9777
9778      break;
9779    }
9780
9781  /* We need to determine what mode to do the shift in.  If the shift is
9782     a right shift or ROTATE, we must always do it in the mode it was
9783     originally done in.  Otherwise, we can do it in MODE, the widest mode
9784     encountered.  The code we care about is that of the shift that will
9785     actually be done, not the shift that was originally requested.  */
9786  shift_mode
9787    = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9788       ? result_mode : mode);
9789
9790  /* We have now finished analyzing the shift.  The result should be
9791     a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places.  If
9792     OUTER_OP is non-NIL, it is an operation that needs to be applied
9793     to the result of the shift.  OUTER_CONST is the relevant constant,
9794     but we must turn off all bits turned off in the shift.
9795
9796     If we were passed a value for X, see if we can use any pieces of
9797     it.  If not, make new rtx.  */
9798
9799  if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
9800      && GET_CODE (XEXP (x, 1)) == CONST_INT
9801      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == count)
9802    const_rtx = XEXP (x, 1);
9803  else
9804    const_rtx = GEN_INT (count);
9805
9806  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9807      && GET_MODE (XEXP (x, 0)) == shift_mode
9808      && SUBREG_REG (XEXP (x, 0)) == varop)
9809    varop = XEXP (x, 0);
9810  else if (GET_MODE (varop) != shift_mode)
9811    varop = gen_lowpart_for_combine (shift_mode, varop);
9812
9813  /* If we can't make the SUBREG, try to return what we were given.  */
9814  if (GET_CODE (varop) == CLOBBER)
9815    return x ? x : varop;
9816
9817  new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9818  if (new != 0)
9819    x = new;
9820  else
9821    x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx);
9822
9823  /* If we have an outer operation and we just made a shift, it is
9824     possible that we could have simplified the shift were it not
9825     for the outer operation.  So try to do the simplification
9826     recursively.  */
9827
9828  if (outer_op != NIL && GET_CODE (x) == code
9829      && GET_CODE (XEXP (x, 1)) == CONST_INT)
9830    x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9831			      INTVAL (XEXP (x, 1)));
9832
9833  /* If we were doing an LSHIFTRT in a wider mode than it was originally,
9834     turn off all the bits that the shift would have turned off.  */
9835  if (orig_code == LSHIFTRT && result_mode != shift_mode)
9836    x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9837				GET_MODE_MASK (result_mode) >> orig_count);
9838
9839  /* Do the remainder of the processing in RESULT_MODE.  */
9840  x = gen_lowpart_for_combine (result_mode, x);
9841
9842  /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9843     operation.  */
9844  if (complement_p)
9845    x = simplify_gen_unary (NOT, result_mode, x, result_mode);
9846
9847  if (outer_op != NIL)
9848    {
9849      if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9850	outer_const = trunc_int_for_mode (outer_const, result_mode);
9851
9852      if (outer_op == AND)
9853	x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9854      else if (outer_op == SET)
9855	/* This means that we have determined that the result is
9856	   equivalent to a constant.  This should be rare.  */
9857	x = GEN_INT (outer_const);
9858      else if (GET_RTX_CLASS (outer_op) == '1')
9859	x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
9860      else
9861	x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
9862    }
9863
9864  return x;
9865}
9866
9867/* Like recog, but we receive the address of a pointer to a new pattern.
9868   We try to match the rtx that the pointer points to.
9869   If that fails, we may try to modify or replace the pattern,
9870   storing the replacement into the same pointer object.
9871
9872   Modifications include deletion or addition of CLOBBERs.
9873
9874   PNOTES is a pointer to a location where any REG_UNUSED notes added for
9875   the CLOBBERs are placed.
9876
9877   The value is the final insn code from the pattern ultimately matched,
9878   or -1.  */
9879
9880static int
9881recog_for_combine (rtx *pnewpat, rtx insn, rtx *pnotes)
9882{
9883  rtx pat = *pnewpat;
9884  int insn_code_number;
9885  int num_clobbers_to_add = 0;
9886  int i;
9887  rtx notes = 0;
9888  rtx old_notes, old_pat;
9889
9890  /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9891     we use to indicate that something didn't match.  If we find such a
9892     thing, force rejection.  */
9893  if (GET_CODE (pat) == PARALLEL)
9894    for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9895      if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9896	  && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
9897	return -1;
9898
9899  old_pat = PATTERN (insn);
9900  old_notes = REG_NOTES (insn);
9901  PATTERN (insn) = pat;
9902  REG_NOTES (insn) = 0;
9903
9904  insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9905
9906  /* If it isn't, there is the possibility that we previously had an insn
9907     that clobbered some register as a side effect, but the combined
9908     insn doesn't need to do that.  So try once more without the clobbers
9909     unless this represents an ASM insn.  */
9910
9911  if (insn_code_number < 0 && ! check_asm_operands (pat)
9912      && GET_CODE (pat) == PARALLEL)
9913    {
9914      int pos;
9915
9916      for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9917	if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9918	  {
9919	    if (i != pos)
9920	      SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9921	    pos++;
9922	  }
9923
9924      SUBST_INT (XVECLEN (pat, 0), pos);
9925
9926      if (pos == 1)
9927	pat = XVECEXP (pat, 0, 0);
9928
9929      PATTERN (insn) = pat;
9930      insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9931    }
9932  PATTERN (insn) = old_pat;
9933  REG_NOTES (insn) = old_notes;
9934
9935  /* Recognize all noop sets, these will be killed by followup pass.  */
9936  if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
9937    insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
9938
9939  /* If we had any clobbers to add, make a new pattern than contains
9940     them.  Then check to make sure that all of them are dead.  */
9941  if (num_clobbers_to_add)
9942    {
9943      rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9944				     rtvec_alloc (GET_CODE (pat) == PARALLEL
9945						  ? (XVECLEN (pat, 0)
9946						     + num_clobbers_to_add)
9947						  : num_clobbers_to_add + 1));
9948
9949      if (GET_CODE (pat) == PARALLEL)
9950	for (i = 0; i < XVECLEN (pat, 0); i++)
9951	  XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9952      else
9953	XVECEXP (newpat, 0, 0) = pat;
9954
9955      add_clobbers (newpat, insn_code_number);
9956
9957      for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9958	   i < XVECLEN (newpat, 0); i++)
9959	{
9960	  if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9961	      && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9962	    return -1;
9963	  notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9964				     XEXP (XVECEXP (newpat, 0, i), 0), notes);
9965	}
9966      pat = newpat;
9967    }
9968
9969  *pnewpat = pat;
9970  *pnotes = notes;
9971
9972  return insn_code_number;
9973}
9974
9975/* Like gen_lowpart but for use by combine.  In combine it is not possible
9976   to create any new pseudoregs.  However, it is safe to create
9977   invalid memory addresses, because combine will try to recognize
9978   them and all they will do is make the combine attempt fail.
9979
9980   If for some reason this cannot do its job, an rtx
9981   (clobber (const_int 0)) is returned.
9982   An insn containing that will not be recognized.  */
9983
9984#undef gen_lowpart
9985
9986static rtx
9987gen_lowpart_for_combine (enum machine_mode mode, rtx x)
9988{
9989  rtx result;
9990
9991  if (GET_MODE (x) == mode)
9992    return x;
9993
9994  /* Return identity if this is a CONST or symbolic
9995     reference.  */
9996  if (mode == Pmode
9997      && (GET_CODE (x) == CONST
9998	  || GET_CODE (x) == SYMBOL_REF
9999	  || GET_CODE (x) == LABEL_REF))
10000    return x;
10001
10002  /* We can only support MODE being wider than a word if X is a
10003     constant integer or has a mode the same size.  */
10004
10005  if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
10006      && ! ((GET_MODE (x) == VOIDmode
10007	     && (GET_CODE (x) == CONST_INT
10008		 || GET_CODE (x) == CONST_DOUBLE))
10009	    || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
10010    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10011
10012  /* X might be a paradoxical (subreg (mem)).  In that case, gen_lowpart
10013     won't know what to do.  So we will strip off the SUBREG here and
10014     process normally.  */
10015  if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
10016    {
10017      x = SUBREG_REG (x);
10018      if (GET_MODE (x) == mode)
10019	return x;
10020    }
10021
10022  result = gen_lowpart_common (mode, x);
10023#ifdef CANNOT_CHANGE_MODE_CLASS
10024  if (result != 0 && GET_CODE (result) == SUBREG)
10025    record_subregs_of_mode (result);
10026#endif
10027
10028  if (result)
10029    return result;
10030
10031  if (GET_CODE (x) == MEM)
10032    {
10033      int offset = 0;
10034
10035      /* Refuse to work on a volatile memory ref or one with a mode-dependent
10036	 address.  */
10037      if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
10038	return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10039
10040      /* If we want to refer to something bigger than the original memref,
10041	 generate a perverse subreg instead.  That will force a reload
10042	 of the original memref X.  */
10043      if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
10044	return gen_rtx_SUBREG (mode, x, 0);
10045
10046      if (WORDS_BIG_ENDIAN)
10047	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
10048		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
10049
10050      if (BYTES_BIG_ENDIAN)
10051	{
10052	  /* Adjust the address so that the address-after-the-data is
10053	     unchanged.  */
10054	  offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
10055		     - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
10056	}
10057
10058      return adjust_address_nv (x, mode, offset);
10059    }
10060
10061  /* If X is a comparison operator, rewrite it in a new mode.  This
10062     probably won't match, but may allow further simplifications.  */
10063  else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
10064    return gen_rtx_fmt_ee (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
10065
10066  /* If we couldn't simplify X any other way, just enclose it in a
10067     SUBREG.  Normally, this SUBREG won't match, but some patterns may
10068     include an explicit SUBREG or we may simplify it further in combine.  */
10069  else
10070    {
10071      int offset = 0;
10072      rtx res;
10073      enum machine_mode sub_mode = GET_MODE (x);
10074
10075      offset = subreg_lowpart_offset (mode, sub_mode);
10076      if (sub_mode == VOIDmode)
10077	{
10078	  sub_mode = int_mode_for_mode (mode);
10079	  x = gen_lowpart_common (sub_mode, x);
10080	  if (x == 0)
10081	    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
10082	}
10083      res = simplify_gen_subreg (mode, x, sub_mode, offset);
10084      if (res)
10085	return res;
10086      return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10087    }
10088}
10089
10090/* These routines make binary and unary operations by first seeing if they
10091   fold; if not, a new expression is allocated.  */
10092
10093static rtx
10094gen_binary (enum rtx_code code, enum machine_mode mode, rtx op0, rtx op1)
10095{
10096  rtx result;
10097  rtx tem;
10098
10099  if (GET_CODE (op0) == CLOBBER)
10100    return op0;
10101  else if (GET_CODE (op1) == CLOBBER)
10102    return op1;
10103
10104  if (GET_RTX_CLASS (code) == 'c'
10105      && swap_commutative_operands_p (op0, op1))
10106    tem = op0, op0 = op1, op1 = tem;
10107
10108  if (GET_RTX_CLASS (code) == '<')
10109    {
10110      enum machine_mode op_mode = GET_MODE (op0);
10111
10112      /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
10113	 just (REL_OP X Y).  */
10114      if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
10115	{
10116	  op1 = XEXP (op0, 1);
10117	  op0 = XEXP (op0, 0);
10118	  op_mode = GET_MODE (op0);
10119	}
10120
10121      if (op_mode == VOIDmode)
10122	op_mode = GET_MODE (op1);
10123      result = simplify_relational_operation (code, op_mode, op0, op1);
10124    }
10125  else
10126    result = simplify_binary_operation (code, mode, op0, op1);
10127
10128  if (result)
10129    return result;
10130
10131  /* Put complex operands first and constants second.  */
10132  if (GET_RTX_CLASS (code) == 'c'
10133      && swap_commutative_operands_p (op0, op1))
10134    return gen_rtx_fmt_ee (code, mode, op1, op0);
10135
10136  /* If we are turning off bits already known off in OP0, we need not do
10137     an AND.  */
10138  else if (code == AND && GET_CODE (op1) == CONST_INT
10139	   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
10140	   && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
10141    return op0;
10142
10143  return gen_rtx_fmt_ee (code, mode, op0, op1);
10144}
10145
10146/* Simplify a comparison between *POP0 and *POP1 where CODE is the
10147   comparison code that will be tested.
10148
10149   The result is a possibly different comparison code to use.  *POP0 and
10150   *POP1 may be updated.
10151
10152   It is possible that we might detect that a comparison is either always
10153   true or always false.  However, we do not perform general constant
10154   folding in combine, so this knowledge isn't useful.  Such tautologies
10155   should have been detected earlier.  Hence we ignore all such cases.  */
10156
10157static enum rtx_code
10158simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
10159{
10160  rtx op0 = *pop0;
10161  rtx op1 = *pop1;
10162  rtx tem, tem1;
10163  int i;
10164  enum machine_mode mode, tmode;
10165
10166  /* Try a few ways of applying the same transformation to both operands.  */
10167  while (1)
10168    {
10169#ifndef WORD_REGISTER_OPERATIONS
10170      /* The test below this one won't handle SIGN_EXTENDs on these machines,
10171	 so check specially.  */
10172      if (code != GTU && code != GEU && code != LTU && code != LEU
10173	  && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
10174	  && GET_CODE (XEXP (op0, 0)) == ASHIFT
10175	  && GET_CODE (XEXP (op1, 0)) == ASHIFT
10176	  && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
10177	  && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
10178	  && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
10179	      == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
10180	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
10181	  && XEXP (op0, 1) == XEXP (op1, 1)
10182	  && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10183	  && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
10184	  && (INTVAL (XEXP (op0, 1))
10185	      == (GET_MODE_BITSIZE (GET_MODE (op0))
10186		  - (GET_MODE_BITSIZE
10187		     (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
10188	{
10189	  op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
10190	  op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
10191	}
10192#endif
10193
10194      /* If both operands are the same constant shift, see if we can ignore the
10195	 shift.  We can if the shift is a rotate or if the bits shifted out of
10196	 this shift are known to be zero for both inputs and if the type of
10197	 comparison is compatible with the shift.  */
10198      if (GET_CODE (op0) == GET_CODE (op1)
10199	  && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10200	  && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
10201	      || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
10202		  && (code != GT && code != LT && code != GE && code != LE))
10203	      || (GET_CODE (op0) == ASHIFTRT
10204		  && (code != GTU && code != LTU
10205		      && code != GEU && code != LEU)))
10206	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
10207	  && INTVAL (XEXP (op0, 1)) >= 0
10208	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10209	  && XEXP (op0, 1) == XEXP (op1, 1))
10210	{
10211	  enum machine_mode mode = GET_MODE (op0);
10212	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10213	  int shift_count = INTVAL (XEXP (op0, 1));
10214
10215	  if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
10216	    mask &= (mask >> shift_count) << shift_count;
10217	  else if (GET_CODE (op0) == ASHIFT)
10218	    mask = (mask & (mask << shift_count)) >> shift_count;
10219
10220	  if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
10221	      && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
10222	    op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
10223	  else
10224	    break;
10225	}
10226
10227      /* If both operands are AND's of a paradoxical SUBREG by constant, the
10228	 SUBREGs are of the same mode, and, in both cases, the AND would
10229	 be redundant if the comparison was done in the narrower mode,
10230	 do the comparison in the narrower mode (e.g., we are AND'ing with 1
10231	 and the operand's possibly nonzero bits are 0xffffff01; in that case
10232	 if we only care about QImode, we don't need the AND).  This case
10233	 occurs if the output mode of an scc insn is not SImode and
10234	 STORE_FLAG_VALUE == 1 (e.g., the 386).
10235
10236	 Similarly, check for a case where the AND's are ZERO_EXTEND
10237	 operations from some narrower mode even though a SUBREG is not
10238	 present.  */
10239
10240      else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
10241	       && GET_CODE (XEXP (op0, 1)) == CONST_INT
10242	       && GET_CODE (XEXP (op1, 1)) == CONST_INT)
10243	{
10244	  rtx inner_op0 = XEXP (op0, 0);
10245	  rtx inner_op1 = XEXP (op1, 0);
10246	  HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
10247	  HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
10248	  int changed = 0;
10249
10250	  if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
10251	      && (GET_MODE_SIZE (GET_MODE (inner_op0))
10252		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
10253	      && (GET_MODE (SUBREG_REG (inner_op0))
10254		  == GET_MODE (SUBREG_REG (inner_op1)))
10255	      && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
10256		  <= HOST_BITS_PER_WIDE_INT)
10257	      && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
10258					     GET_MODE (SUBREG_REG (inner_op0)))))
10259	      && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
10260					     GET_MODE (SUBREG_REG (inner_op1))))))
10261	    {
10262	      op0 = SUBREG_REG (inner_op0);
10263	      op1 = SUBREG_REG (inner_op1);
10264
10265	      /* The resulting comparison is always unsigned since we masked
10266		 off the original sign bit.  */
10267	      code = unsigned_condition (code);
10268
10269	      changed = 1;
10270	    }
10271
10272	  else if (c0 == c1)
10273	    for (tmode = GET_CLASS_NARROWEST_MODE
10274		 (GET_MODE_CLASS (GET_MODE (op0)));
10275		 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
10276	      if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
10277		{
10278		  op0 = gen_lowpart_for_combine (tmode, inner_op0);
10279		  op1 = gen_lowpart_for_combine (tmode, inner_op1);
10280		  code = unsigned_condition (code);
10281		  changed = 1;
10282		  break;
10283		}
10284
10285	  if (! changed)
10286	    break;
10287	}
10288
10289      /* If both operands are NOT, we can strip off the outer operation
10290	 and adjust the comparison code for swapped operands; similarly for
10291	 NEG, except that this must be an equality comparison.  */
10292      else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
10293	       || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
10294		   && (code == EQ || code == NE)))
10295	op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
10296
10297      else
10298	break;
10299    }
10300
10301  /* If the first operand is a constant, swap the operands and adjust the
10302     comparison code appropriately, but don't do this if the second operand
10303     is already a constant integer.  */
10304  if (swap_commutative_operands_p (op0, op1))
10305    {
10306      tem = op0, op0 = op1, op1 = tem;
10307      code = swap_condition (code);
10308    }
10309
10310  /* We now enter a loop during which we will try to simplify the comparison.
10311     For the most part, we only are concerned with comparisons with zero,
10312     but some things may really be comparisons with zero but not start
10313     out looking that way.  */
10314
10315  while (GET_CODE (op1) == CONST_INT)
10316    {
10317      enum machine_mode mode = GET_MODE (op0);
10318      unsigned int mode_width = GET_MODE_BITSIZE (mode);
10319      unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10320      int equality_comparison_p;
10321      int sign_bit_comparison_p;
10322      int unsigned_comparison_p;
10323      HOST_WIDE_INT const_op;
10324
10325      /* We only want to handle integral modes.  This catches VOIDmode,
10326	 CCmode, and the floating-point modes.  An exception is that we
10327	 can handle VOIDmode if OP0 is a COMPARE or a comparison
10328	 operation.  */
10329
10330      if (GET_MODE_CLASS (mode) != MODE_INT
10331	  && ! (mode == VOIDmode
10332		&& (GET_CODE (op0) == COMPARE
10333		    || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
10334	break;
10335
10336      /* Get the constant we are comparing against and turn off all bits
10337	 not on in our mode.  */
10338      const_op = INTVAL (op1);
10339      if (mode != VOIDmode)
10340	const_op = trunc_int_for_mode (const_op, mode);
10341      op1 = GEN_INT (const_op);
10342
10343      /* If we are comparing against a constant power of two and the value
10344	 being compared can only have that single bit nonzero (e.g., it was
10345	 `and'ed with that bit), we can replace this with a comparison
10346	 with zero.  */
10347      if (const_op
10348	  && (code == EQ || code == NE || code == GE || code == GEU
10349	      || code == LT || code == LTU)
10350	  && mode_width <= HOST_BITS_PER_WIDE_INT
10351	  && exact_log2 (const_op) >= 0
10352	  && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
10353	{
10354	  code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10355	  op1 = const0_rtx, const_op = 0;
10356	}
10357
10358      /* Similarly, if we are comparing a value known to be either -1 or
10359	 0 with -1, change it to the opposite comparison against zero.  */
10360
10361      if (const_op == -1
10362	  && (code == EQ || code == NE || code == GT || code == LE
10363	      || code == GEU || code == LTU)
10364	  && num_sign_bit_copies (op0, mode) == mode_width)
10365	{
10366	  code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10367	  op1 = const0_rtx, const_op = 0;
10368	}
10369
10370      /* Do some canonicalizations based on the comparison code.  We prefer
10371	 comparisons against zero and then prefer equality comparisons.
10372	 If we can reduce the size of a constant, we will do that too.  */
10373
10374      switch (code)
10375	{
10376	case LT:
10377	  /* < C is equivalent to <= (C - 1) */
10378	  if (const_op > 0)
10379	    {
10380	      const_op -= 1;
10381	      op1 = GEN_INT (const_op);
10382	      code = LE;
10383	      /* ... fall through to LE case below.  */
10384	    }
10385	  else
10386	    break;
10387
10388	case LE:
10389	  /* <= C is equivalent to < (C + 1); we do this for C < 0  */
10390	  if (const_op < 0)
10391	    {
10392	      const_op += 1;
10393	      op1 = GEN_INT (const_op);
10394	      code = LT;
10395	    }
10396
10397	  /* If we are doing a <= 0 comparison on a value known to have
10398	     a zero sign bit, we can replace this with == 0.  */
10399	  else if (const_op == 0
10400		   && mode_width <= HOST_BITS_PER_WIDE_INT
10401		   && (nonzero_bits (op0, mode)
10402		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10403	    code = EQ;
10404	  break;
10405
10406	case GE:
10407	  /* >= C is equivalent to > (C - 1).  */
10408	  if (const_op > 0)
10409	    {
10410	      const_op -= 1;
10411	      op1 = GEN_INT (const_op);
10412	      code = GT;
10413	      /* ... fall through to GT below.  */
10414	    }
10415	  else
10416	    break;
10417
10418	case GT:
10419	  /* > C is equivalent to >= (C + 1); we do this for C < 0.  */
10420	  if (const_op < 0)
10421	    {
10422	      const_op += 1;
10423	      op1 = GEN_INT (const_op);
10424	      code = GE;
10425	    }
10426
10427	  /* If we are doing a > 0 comparison on a value known to have
10428	     a zero sign bit, we can replace this with != 0.  */
10429	  else if (const_op == 0
10430		   && mode_width <= HOST_BITS_PER_WIDE_INT
10431		   && (nonzero_bits (op0, mode)
10432		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10433	    code = NE;
10434	  break;
10435
10436	case LTU:
10437	  /* < C is equivalent to <= (C - 1).  */
10438	  if (const_op > 0)
10439	    {
10440	      const_op -= 1;
10441	      op1 = GEN_INT (const_op);
10442	      code = LEU;
10443	      /* ... fall through ...  */
10444	    }
10445
10446	  /* (unsigned) < 0x80000000 is equivalent to >= 0.  */
10447	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10448		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10449	    {
10450	      const_op = 0, op1 = const0_rtx;
10451	      code = GE;
10452	      break;
10453	    }
10454	  else
10455	    break;
10456
10457	case LEU:
10458	  /* unsigned <= 0 is equivalent to == 0 */
10459	  if (const_op == 0)
10460	    code = EQ;
10461
10462	  /* (unsigned) <= 0x7fffffff is equivalent to >= 0.  */
10463	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10464		   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10465	    {
10466	      const_op = 0, op1 = const0_rtx;
10467	      code = GE;
10468	    }
10469	  break;
10470
10471	case GEU:
10472	  /* >= C is equivalent to < (C - 1).  */
10473	  if (const_op > 1)
10474	    {
10475	      const_op -= 1;
10476	      op1 = GEN_INT (const_op);
10477	      code = GTU;
10478	      /* ... fall through ...  */
10479	    }
10480
10481	  /* (unsigned) >= 0x80000000 is equivalent to < 0.  */
10482	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10483		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10484	    {
10485	      const_op = 0, op1 = const0_rtx;
10486	      code = LT;
10487	      break;
10488	    }
10489	  else
10490	    break;
10491
10492	case GTU:
10493	  /* unsigned > 0 is equivalent to != 0 */
10494	  if (const_op == 0)
10495	    code = NE;
10496
10497	  /* (unsigned) > 0x7fffffff is equivalent to < 0.  */
10498	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10499		   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10500	    {
10501	      const_op = 0, op1 = const0_rtx;
10502	      code = LT;
10503	    }
10504	  break;
10505
10506	default:
10507	  break;
10508	}
10509
10510      /* Compute some predicates to simplify code below.  */
10511
10512      equality_comparison_p = (code == EQ || code == NE);
10513      sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10514      unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10515			       || code == GEU);
10516
10517      /* If this is a sign bit comparison and we can do arithmetic in
10518	 MODE, say that we will only be needing the sign bit of OP0.  */
10519      if (sign_bit_comparison_p
10520	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10521	op0 = force_to_mode (op0, mode,
10522			     ((HOST_WIDE_INT) 1
10523			      << (GET_MODE_BITSIZE (mode) - 1)),
10524			     NULL_RTX, 0);
10525
10526      /* Now try cases based on the opcode of OP0.  If none of the cases
10527	 does a "continue", we exit this loop immediately after the
10528	 switch.  */
10529
10530      switch (GET_CODE (op0))
10531	{
10532	case ZERO_EXTRACT:
10533	  /* If we are extracting a single bit from a variable position in
10534	     a constant that has only a single bit set and are comparing it
10535	     with zero, we can convert this into an equality comparison
10536	     between the position and the location of the single bit.  */
10537	  /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
10538	     have already reduced the shift count modulo the word size.  */
10539	  if (!SHIFT_COUNT_TRUNCATED
10540	      && GET_CODE (XEXP (op0, 0)) == CONST_INT
10541	      && XEXP (op0, 1) == const1_rtx
10542	      && equality_comparison_p && const_op == 0
10543	      && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
10544	    {
10545	      if (BITS_BIG_ENDIAN)
10546		{
10547		  enum machine_mode new_mode
10548		    = mode_for_extraction (EP_extzv, 1);
10549		  if (new_mode == MAX_MACHINE_MODE)
10550		    i = BITS_PER_WORD - 1 - i;
10551		  else
10552		    {
10553		      mode = new_mode;
10554		      i = (GET_MODE_BITSIZE (mode) - 1 - i);
10555		    }
10556		}
10557
10558	      op0 = XEXP (op0, 2);
10559	      op1 = GEN_INT (i);
10560	      const_op = i;
10561
10562	      /* Result is nonzero iff shift count is equal to I.  */
10563	      code = reverse_condition (code);
10564	      continue;
10565	    }
10566
10567	  /* ... fall through ...  */
10568
10569	case SIGN_EXTRACT:
10570	  tem = expand_compound_operation (op0);
10571	  if (tem != op0)
10572	    {
10573	      op0 = tem;
10574	      continue;
10575	    }
10576	  break;
10577
10578	case NOT:
10579	  /* If testing for equality, we can take the NOT of the constant.  */
10580	  if (equality_comparison_p
10581	      && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10582	    {
10583	      op0 = XEXP (op0, 0);
10584	      op1 = tem;
10585	      continue;
10586	    }
10587
10588	  /* If just looking at the sign bit, reverse the sense of the
10589	     comparison.  */
10590	  if (sign_bit_comparison_p)
10591	    {
10592	      op0 = XEXP (op0, 0);
10593	      code = (code == GE ? LT : GE);
10594	      continue;
10595	    }
10596	  break;
10597
10598	case NEG:
10599	  /* If testing for equality, we can take the NEG of the constant.  */
10600	  if (equality_comparison_p
10601	      && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10602	    {
10603	      op0 = XEXP (op0, 0);
10604	      op1 = tem;
10605	      continue;
10606	    }
10607
10608	  /* The remaining cases only apply to comparisons with zero.  */
10609	  if (const_op != 0)
10610	    break;
10611
10612	  /* When X is ABS or is known positive,
10613	     (neg X) is < 0 if and only if X != 0.  */
10614
10615	  if (sign_bit_comparison_p
10616	      && (GET_CODE (XEXP (op0, 0)) == ABS
10617		  || (mode_width <= HOST_BITS_PER_WIDE_INT
10618		      && (nonzero_bits (XEXP (op0, 0), mode)
10619			  & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
10620	    {
10621	      op0 = XEXP (op0, 0);
10622	      code = (code == LT ? NE : EQ);
10623	      continue;
10624	    }
10625
10626	  /* If we have NEG of something whose two high-order bits are the
10627	     same, we know that "(-a) < 0" is equivalent to "a > 0".  */
10628	  if (num_sign_bit_copies (op0, mode) >= 2)
10629	    {
10630	      op0 = XEXP (op0, 0);
10631	      code = swap_condition (code);
10632	      continue;
10633	    }
10634	  break;
10635
10636	case ROTATE:
10637	  /* If we are testing equality and our count is a constant, we
10638	     can perform the inverse operation on our RHS.  */
10639	  if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10640	      && (tem = simplify_binary_operation (ROTATERT, mode,
10641						   op1, XEXP (op0, 1))) != 0)
10642	    {
10643	      op0 = XEXP (op0, 0);
10644	      op1 = tem;
10645	      continue;
10646	    }
10647
10648	  /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10649	     a particular bit.  Convert it to an AND of a constant of that
10650	     bit.  This will be converted into a ZERO_EXTRACT.  */
10651	  if (const_op == 0 && sign_bit_comparison_p
10652	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10653	      && mode_width <= HOST_BITS_PER_WIDE_INT)
10654	    {
10655	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10656					    ((HOST_WIDE_INT) 1
10657					     << (mode_width - 1
10658						 - INTVAL (XEXP (op0, 1)))));
10659	      code = (code == LT ? NE : EQ);
10660	      continue;
10661	    }
10662
10663	  /* Fall through.  */
10664
10665	case ABS:
10666	  /* ABS is ignorable inside an equality comparison with zero.  */
10667	  if (const_op == 0 && equality_comparison_p)
10668	    {
10669	      op0 = XEXP (op0, 0);
10670	      continue;
10671	    }
10672	  break;
10673
10674	case SIGN_EXTEND:
10675	  /* Can simplify (compare (zero/sign_extend FOO) CONST)
10676	     to (compare FOO CONST) if CONST fits in FOO's mode and we
10677	     are either testing inequality or have an unsigned comparison
10678	     with ZERO_EXTEND or a signed comparison with SIGN_EXTEND.  */
10679	  if (! unsigned_comparison_p
10680	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10681		  <= HOST_BITS_PER_WIDE_INT)
10682	      && ((unsigned HOST_WIDE_INT) const_op
10683		  < (((unsigned HOST_WIDE_INT) 1
10684		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
10685	    {
10686	      op0 = XEXP (op0, 0);
10687	      continue;
10688	    }
10689	  break;
10690
10691	case SUBREG:
10692	  /* Check for the case where we are comparing A - C1 with C2, that is
10693
10694	       (subreg:MODE (plus (A) (-C1))) op (C2)
10695
10696	     with C1 a constant, and try to lift the SUBREG, i.e. to do the
10697	     comparison in the wider mode.  One of the following two conditions
10698	     must be true in order for this to be valid:
10699
10700	       1. The mode extension results in the same bit pattern being added
10701		  on both sides and the comparison is equality or unsigned.  As
10702		  C2 has been truncated to fit in MODE, the pattern can only be
10703		  all 0s or all 1s.
10704
10705	       2. The mode extension results in the sign bit being copied on
10706		  each side.
10707
10708	     The difficulty here is that we have predicates for A but not for
10709	     (A - C1) so we need to check that C1 is within proper bounds so
10710	     as to perturbate A as little as possible.  */
10711
10712	  if (mode_width <= HOST_BITS_PER_WIDE_INT
10713	      && subreg_lowpart_p (op0)
10714	      && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width
10715	      && GET_CODE (SUBREG_REG (op0)) == PLUS
10716	      && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT)
10717	    {
10718	      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
10719	      rtx a = XEXP (SUBREG_REG (op0), 0);
10720	      HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
10721
10722	      if ((c1 > 0
10723	           && (unsigned HOST_WIDE_INT) c1
10724		       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)
10725		   && (equality_comparison_p || unsigned_comparison_p)
10726		   /* (A - C1) zero-extends if it is positive and sign-extends
10727		      if it is negative, C2 both zero- and sign-extends.  */
10728		   && ((0 == (nonzero_bits (a, inner_mode)
10729			      & ~GET_MODE_MASK (mode))
10730			&& const_op >= 0)
10731		       /* (A - C1) sign-extends if it is positive and 1-extends
10732			  if it is negative, C2 both sign- and 1-extends.  */
10733		       || (num_sign_bit_copies (a, inner_mode)
10734			   > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10735					     - mode_width)
10736			   && const_op < 0)))
10737		  || ((unsigned HOST_WIDE_INT) c1
10738		       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2)
10739		      /* (A - C1) always sign-extends, like C2.  */
10740		      && num_sign_bit_copies (a, inner_mode)
10741			 > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10742					   - mode_width - 1)))
10743		{
10744		  op0 = SUBREG_REG (op0);
10745		  continue;
10746	        }
10747	    }
10748
10749	  /* If the inner mode is narrower and we are extracting the low part,
10750	     we can treat the SUBREG as if it were a ZERO_EXTEND.  */
10751	  if (subreg_lowpart_p (op0)
10752	      && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10753	    /* Fall through */ ;
10754	  else
10755	    break;
10756
10757	  /* ... fall through ...  */
10758
10759	case ZERO_EXTEND:
10760	  if ((unsigned_comparison_p || equality_comparison_p)
10761	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10762		  <= HOST_BITS_PER_WIDE_INT)
10763	      && ((unsigned HOST_WIDE_INT) const_op
10764		  < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10765	    {
10766	      op0 = XEXP (op0, 0);
10767	      continue;
10768	    }
10769	  break;
10770
10771	case PLUS:
10772	  /* (eq (plus X A) B) -> (eq X (minus B A)).  We can only do
10773	     this for equality comparisons due to pathological cases involving
10774	     overflows.  */
10775	  if (equality_comparison_p
10776	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10777							op1, XEXP (op0, 1))))
10778	    {
10779	      op0 = XEXP (op0, 0);
10780	      op1 = tem;
10781	      continue;
10782	    }
10783
10784	  /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0.  */
10785	  if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10786	      && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10787	    {
10788	      op0 = XEXP (XEXP (op0, 0), 0);
10789	      code = (code == LT ? EQ : NE);
10790	      continue;
10791	    }
10792	  break;
10793
10794	case MINUS:
10795	  /* We used to optimize signed comparisons against zero, but that
10796	     was incorrect.  Unsigned comparisons against zero (GTU, LEU)
10797	     arrive here as equality comparisons, or (GEU, LTU) are
10798	     optimized away.  No need to special-case them.  */
10799
10800	  /* (eq (minus A B) C) -> (eq A (plus B C)) or
10801	     (eq B (minus A C)), whichever simplifies.  We can only do
10802	     this for equality comparisons due to pathological cases involving
10803	     overflows.  */
10804	  if (equality_comparison_p
10805	      && 0 != (tem = simplify_binary_operation (PLUS, mode,
10806							XEXP (op0, 1), op1)))
10807	    {
10808	      op0 = XEXP (op0, 0);
10809	      op1 = tem;
10810	      continue;
10811	    }
10812
10813	  if (equality_comparison_p
10814	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10815							XEXP (op0, 0), op1)))
10816	    {
10817	      op0 = XEXP (op0, 1);
10818	      op1 = tem;
10819	      continue;
10820	    }
10821
10822	  /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10823	     of bits in X minus 1, is one iff X > 0.  */
10824	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10825	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10826	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1))
10827		 == mode_width - 1
10828	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10829	    {
10830	      op0 = XEXP (op0, 1);
10831	      code = (code == GE ? LE : GT);
10832	      continue;
10833	    }
10834	  break;
10835
10836	case XOR:
10837	  /* (eq (xor A B) C) -> (eq A (xor B C)).  This is a simplification
10838	     if C is zero or B is a constant.  */
10839	  if (equality_comparison_p
10840	      && 0 != (tem = simplify_binary_operation (XOR, mode,
10841							XEXP (op0, 1), op1)))
10842	    {
10843	      op0 = XEXP (op0, 0);
10844	      op1 = tem;
10845	      continue;
10846	    }
10847	  break;
10848
10849	case EQ:  case NE:
10850	case UNEQ:  case LTGT:
10851	case LT:  case LTU:  case UNLT:  case LE:  case LEU:  case UNLE:
10852	case GT:  case GTU:  case UNGT:  case GE:  case GEU:  case UNGE:
10853        case UNORDERED: case ORDERED:
10854	  /* We can't do anything if OP0 is a condition code value, rather
10855	     than an actual data value.  */
10856	  if (const_op != 0
10857	      || CC0_P (XEXP (op0, 0))
10858	      || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10859	    break;
10860
10861	  /* Get the two operands being compared.  */
10862	  if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10863	    tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10864	  else
10865	    tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10866
10867	  /* Check for the cases where we simply want the result of the
10868	     earlier test or the opposite of that result.  */
10869	  if (code == NE || code == EQ
10870	      || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10871		  && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10872		  && (STORE_FLAG_VALUE
10873		      & (((HOST_WIDE_INT) 1
10874			  << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
10875		  && (code == LT || code == GE)))
10876	    {
10877	      enum rtx_code new_code;
10878	      if (code == LT || code == NE)
10879		new_code = GET_CODE (op0);
10880	      else
10881		new_code = combine_reversed_comparison_code (op0);
10882
10883	      if (new_code != UNKNOWN)
10884		{
10885		  code = new_code;
10886		  op0 = tem;
10887		  op1 = tem1;
10888		  continue;
10889		}
10890	    }
10891	  break;
10892
10893	case IOR:
10894	  /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
10895	     iff X <= 0.  */
10896	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10897	      && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10898	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10899	    {
10900	      op0 = XEXP (op0, 1);
10901	      code = (code == GE ? GT : LE);
10902	      continue;
10903	    }
10904	  break;
10905
10906	case AND:
10907	  /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1).  This
10908	     will be converted to a ZERO_EXTRACT later.  */
10909	  if (const_op == 0 && equality_comparison_p
10910	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
10911	      && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10912	    {
10913	      op0 = simplify_and_const_int
10914		(op0, mode, gen_rtx_LSHIFTRT (mode,
10915					      XEXP (op0, 1),
10916					      XEXP (XEXP (op0, 0), 1)),
10917		 (HOST_WIDE_INT) 1);
10918	      continue;
10919	    }
10920
10921	  /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10922	     zero and X is a comparison and C1 and C2 describe only bits set
10923	     in STORE_FLAG_VALUE, we can compare with X.  */
10924	  if (const_op == 0 && equality_comparison_p
10925	      && mode_width <= HOST_BITS_PER_WIDE_INT
10926	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10927	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10928	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10929	      && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
10930	      && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
10931	    {
10932	      mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10933		      << INTVAL (XEXP (XEXP (op0, 0), 1)));
10934	      if ((~STORE_FLAG_VALUE & mask) == 0
10935		  && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10936		      || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10937			  && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10938		{
10939		  op0 = XEXP (XEXP (op0, 0), 0);
10940		  continue;
10941		}
10942	    }
10943
10944	  /* If we are doing an equality comparison of an AND of a bit equal
10945	     to the sign bit, replace this with a LT or GE comparison of
10946	     the underlying value.  */
10947	  if (equality_comparison_p
10948	      && const_op == 0
10949	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10950	      && mode_width <= HOST_BITS_PER_WIDE_INT
10951	      && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10952		  == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
10953	    {
10954	      op0 = XEXP (op0, 0);
10955	      code = (code == EQ ? GE : LT);
10956	      continue;
10957	    }
10958
10959	  /* If this AND operation is really a ZERO_EXTEND from a narrower
10960	     mode, the constant fits within that mode, and this is either an
10961	     equality or unsigned comparison, try to do this comparison in
10962	     the narrower mode.  */
10963	  if ((equality_comparison_p || unsigned_comparison_p)
10964	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10965	      && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10966				   & GET_MODE_MASK (mode))
10967				  + 1)) >= 0
10968	      && const_op >> i == 0
10969	      && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10970	    {
10971	      op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10972	      continue;
10973	    }
10974
10975	  /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1
10976	     fits in both M1 and M2 and the SUBREG is either paradoxical
10977	     or represents the low part, permute the SUBREG and the AND
10978	     and try again.  */
10979	  if (GET_CODE (XEXP (op0, 0)) == SUBREG)
10980	    {
10981	      unsigned HOST_WIDE_INT c1;
10982	      tmode = GET_MODE (SUBREG_REG (XEXP (op0, 0)));
10983	      /* Require an integral mode, to avoid creating something like
10984		 (AND:SF ...).  */
10985	      if (SCALAR_INT_MODE_P (tmode)
10986		  /* It is unsafe to commute the AND into the SUBREG if the
10987		     SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
10988		     not defined.  As originally written the upper bits
10989		     have a defined value due to the AND operation.
10990		     However, if we commute the AND inside the SUBREG then
10991		     they no longer have defined values and the meaning of
10992		     the code has been changed.  */
10993		  && (0
10994#ifdef WORD_REGISTER_OPERATIONS
10995		      || (mode_width > GET_MODE_BITSIZE (tmode)
10996			  && mode_width <= BITS_PER_WORD)
10997#endif
10998		      || (mode_width <= GET_MODE_BITSIZE (tmode)
10999			  && subreg_lowpart_p (XEXP (op0, 0))))
11000		  && GET_CODE (XEXP (op0, 1)) == CONST_INT
11001		  && mode_width <= HOST_BITS_PER_WIDE_INT
11002		  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
11003		  && ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0
11004		  && (c1 & ~GET_MODE_MASK (tmode)) == 0
11005		  && c1 != mask
11006		  && c1 != GET_MODE_MASK (tmode))
11007		{
11008		  op0 = gen_binary (AND, tmode,
11009				    SUBREG_REG (XEXP (op0, 0)),
11010				    gen_int_mode (c1, tmode));
11011		  op0 = gen_lowpart_for_combine (mode, op0);
11012		  continue;
11013		}
11014	    }
11015
11016	  /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0).  */
11017	  if (const_op == 0 && equality_comparison_p
11018	      && XEXP (op0, 1) == const1_rtx
11019	      && GET_CODE (XEXP (op0, 0)) == NOT)
11020	    {
11021	      op0 = simplify_and_const_int
11022		(NULL_RTX, mode, XEXP (XEXP (op0, 0), 0), (HOST_WIDE_INT) 1);
11023	      code = (code == NE ? EQ : NE);
11024	      continue;
11025	    }
11026
11027	  /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
11028	     (eq (and (lshiftrt X) 1) 0).
11029	     Also handle the case where (not X) is expressed using xor.  */
11030	  if (const_op == 0 && equality_comparison_p
11031	      && XEXP (op0, 1) == const1_rtx
11032	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
11033	    {
11034	      rtx shift_op = XEXP (XEXP (op0, 0), 0);
11035	      rtx shift_count = XEXP (XEXP (op0, 0), 1);
11036
11037	      if (GET_CODE (shift_op) == NOT
11038		  || (GET_CODE (shift_op) == XOR
11039		      && GET_CODE (XEXP (shift_op, 1)) == CONST_INT
11040		      && GET_CODE (shift_count) == CONST_INT
11041		      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
11042		      && (INTVAL (XEXP (shift_op, 1))
11043			  == (HOST_WIDE_INT) 1 << INTVAL (shift_count))))
11044		{
11045		  op0 = simplify_and_const_int
11046		    (NULL_RTX, mode,
11047		     gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count),
11048		     (HOST_WIDE_INT) 1);
11049		  code = (code == NE ? EQ : NE);
11050		  continue;
11051		}
11052	    }
11053	  break;
11054
11055	case ASHIFT:
11056	  /* If we have (compare (ashift FOO N) (const_int C)) and
11057	     the high order N bits of FOO (N+1 if an inequality comparison)
11058	     are known to be zero, we can do this by comparing FOO with C
11059	     shifted right N bits so long as the low-order N bits of C are
11060	     zero.  */
11061	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
11062	      && INTVAL (XEXP (op0, 1)) >= 0
11063	      && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
11064		  < HOST_BITS_PER_WIDE_INT)
11065	      && ((const_op
11066		   & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
11067	      && mode_width <= HOST_BITS_PER_WIDE_INT
11068	      && (nonzero_bits (XEXP (op0, 0), mode)
11069		  & ~(mask >> (INTVAL (XEXP (op0, 1))
11070			       + ! equality_comparison_p))) == 0)
11071	    {
11072	      /* We must perform a logical shift, not an arithmetic one,
11073		 as we want the top N bits of C to be zero.  */
11074	      unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
11075
11076	      temp >>= INTVAL (XEXP (op0, 1));
11077	      op1 = gen_int_mode (temp, mode);
11078	      op0 = XEXP (op0, 0);
11079	      continue;
11080	    }
11081
11082	  /* If we are doing a sign bit comparison, it means we are testing
11083	     a particular bit.  Convert it to the appropriate AND.  */
11084	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
11085	      && mode_width <= HOST_BITS_PER_WIDE_INT)
11086	    {
11087	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11088					    ((HOST_WIDE_INT) 1
11089					     << (mode_width - 1
11090						 - INTVAL (XEXP (op0, 1)))));
11091	      code = (code == LT ? NE : EQ);
11092	      continue;
11093	    }
11094
11095	  /* If this an equality comparison with zero and we are shifting
11096	     the low bit to the sign bit, we can convert this to an AND of the
11097	     low-order bit.  */
11098	  if (const_op == 0 && equality_comparison_p
11099	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11100	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11101		 == mode_width - 1)
11102	    {
11103	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11104					    (HOST_WIDE_INT) 1);
11105	      continue;
11106	    }
11107	  break;
11108
11109	case ASHIFTRT:
11110	  /* If this is an equality comparison with zero, we can do this
11111	     as a logical shift, which might be much simpler.  */
11112	  if (equality_comparison_p && const_op == 0
11113	      && GET_CODE (XEXP (op0, 1)) == CONST_INT)
11114	    {
11115	      op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
11116					  XEXP (op0, 0),
11117					  INTVAL (XEXP (op0, 1)));
11118	      continue;
11119	    }
11120
11121	  /* If OP0 is a sign extension and CODE is not an unsigned comparison,
11122	     do the comparison in a narrower mode.  */
11123	  if (! unsigned_comparison_p
11124	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11125	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
11126	      && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
11127	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11128					 MODE_INT, 1)) != BLKmode
11129	      && (((unsigned HOST_WIDE_INT) const_op
11130		   + (GET_MODE_MASK (tmode) >> 1) + 1)
11131		  <= GET_MODE_MASK (tmode)))
11132	    {
11133	      op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
11134	      continue;
11135	    }
11136
11137	  /* Likewise if OP0 is a PLUS of a sign extension with a
11138	     constant, which is usually represented with the PLUS
11139	     between the shifts.  */
11140	  if (! unsigned_comparison_p
11141	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11142	      && GET_CODE (XEXP (op0, 0)) == PLUS
11143	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
11144	      && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
11145	      && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
11146	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11147					 MODE_INT, 1)) != BLKmode
11148	      && (((unsigned HOST_WIDE_INT) const_op
11149		   + (GET_MODE_MASK (tmode) >> 1) + 1)
11150		  <= GET_MODE_MASK (tmode)))
11151	    {
11152	      rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
11153	      rtx add_const = XEXP (XEXP (op0, 0), 1);
11154	      rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const,
11155					  XEXP (op0, 1));
11156
11157	      op0 = gen_binary (PLUS, tmode,
11158				gen_lowpart_for_combine (tmode, inner),
11159				new_const);
11160	      continue;
11161	    }
11162
11163	  /* ... fall through ...  */
11164	case LSHIFTRT:
11165	  /* If we have (compare (xshiftrt FOO N) (const_int C)) and
11166	     the low order N bits of FOO are known to be zero, we can do this
11167	     by comparing FOO with C shifted left N bits so long as no
11168	     overflow occurs.  */
11169	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
11170	      && INTVAL (XEXP (op0, 1)) >= 0
11171	      && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
11172	      && mode_width <= HOST_BITS_PER_WIDE_INT
11173	      && (nonzero_bits (XEXP (op0, 0), mode)
11174		  & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
11175	      && (((unsigned HOST_WIDE_INT) const_op
11176		   + (GET_CODE (op0) != LSHIFTRT
11177		      ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
11178			 + 1)
11179		      : 0))
11180		  <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
11181	    {
11182	      /* If the shift was logical, then we must make the condition
11183		 unsigned.  */
11184	      if (GET_CODE (op0) == LSHIFTRT)
11185		code = unsigned_condition (code);
11186
11187	      const_op <<= INTVAL (XEXP (op0, 1));
11188	      op1 = GEN_INT (const_op);
11189	      op0 = XEXP (op0, 0);
11190	      continue;
11191	    }
11192
11193	  /* If we are using this shift to extract just the sign bit, we
11194	     can replace this with an LT or GE comparison.  */
11195	  if (const_op == 0
11196	      && (equality_comparison_p || sign_bit_comparison_p)
11197	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11198	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11199		 == mode_width - 1)
11200	    {
11201	      op0 = XEXP (op0, 0);
11202	      code = (code == NE || code == GT ? LT : GE);
11203	      continue;
11204	    }
11205	  break;
11206
11207	default:
11208	  break;
11209	}
11210
11211      break;
11212    }
11213
11214  /* Now make any compound operations involved in this comparison.  Then,
11215     check for an outmost SUBREG on OP0 that is not doing anything or is
11216     paradoxical.  The latter transformation must only be performed when
11217     it is known that the "extra" bits will be the same in op0 and op1 or
11218     that they don't matter.  There are three cases to consider:
11219
11220     1. SUBREG_REG (op0) is a register.  In this case the bits are don't
11221     care bits and we can assume they have any convenient value.  So
11222     making the transformation is safe.
11223
11224     2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
11225     In this case the upper bits of op0 are undefined.  We should not make
11226     the simplification in that case as we do not know the contents of
11227     those bits.
11228
11229     3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
11230     NIL.  In that case we know those bits are zeros or ones.  We must
11231     also be sure that they are the same as the upper bits of op1.
11232
11233     We can never remove a SUBREG for a non-equality comparison because
11234     the sign bit is in a different place in the underlying object.  */
11235
11236  op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
11237  op1 = make_compound_operation (op1, SET);
11238
11239  if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
11240      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11241      && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
11242      && (code == NE || code == EQ))
11243    {
11244      if (GET_MODE_SIZE (GET_MODE (op0))
11245	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
11246	{
11247	  /* For paradoxical subregs, allow case 1 as above.  Case 3 isn't
11248	     implemented.  */
11249          if (GET_CODE (SUBREG_REG (op0)) == REG)
11250	    {
11251	      op0 = SUBREG_REG (op0);
11252	      op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
11253	    }
11254	}
11255      else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
11256		<= HOST_BITS_PER_WIDE_INT)
11257	       && (nonzero_bits (SUBREG_REG (op0),
11258				 GET_MODE (SUBREG_REG (op0)))
11259		   & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11260	{
11261	  tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)), op1);
11262
11263	  if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
11264	       & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11265	    op0 = SUBREG_REG (op0), op1 = tem;
11266	}
11267    }
11268
11269  /* We now do the opposite procedure: Some machines don't have compare
11270     insns in all modes.  If OP0's mode is an integer mode smaller than a
11271     word and we can't do a compare in that mode, see if there is a larger
11272     mode for which we can do the compare.  There are a number of cases in
11273     which we can use the wider mode.  */
11274
11275  mode = GET_MODE (op0);
11276  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
11277      && GET_MODE_SIZE (mode) < UNITS_PER_WORD
11278      && ! have_insn_for (COMPARE, mode))
11279    for (tmode = GET_MODE_WIDER_MODE (mode);
11280	 (tmode != VOIDmode
11281	  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
11282	 tmode = GET_MODE_WIDER_MODE (tmode))
11283      if (have_insn_for (COMPARE, tmode))
11284	{
11285	  int zero_extended;
11286
11287	  /* If the only nonzero bits in OP0 and OP1 are those in the
11288	     narrower mode and this is an equality or unsigned comparison,
11289	     we can use the wider mode.  Similarly for sign-extended
11290	     values, in which case it is true for all comparisons.  */
11291	  zero_extended = ((code == EQ || code == NE
11292			    || code == GEU || code == GTU
11293			    || code == LEU || code == LTU)
11294			   && (nonzero_bits (op0, tmode)
11295			       & ~GET_MODE_MASK (mode)) == 0
11296			   && ((GET_CODE (op1) == CONST_INT
11297				|| (nonzero_bits (op1, tmode)
11298				    & ~GET_MODE_MASK (mode)) == 0)));
11299
11300	  if (zero_extended
11301	      || ((num_sign_bit_copies (op0, tmode)
11302		   > (unsigned int) (GET_MODE_BITSIZE (tmode)
11303				     - GET_MODE_BITSIZE (mode)))
11304		  && (num_sign_bit_copies (op1, tmode)
11305		      > (unsigned int) (GET_MODE_BITSIZE (tmode)
11306					- GET_MODE_BITSIZE (mode)))))
11307	    {
11308	      /* If OP0 is an AND and we don't have an AND in MODE either,
11309		 make a new AND in the proper mode.  */
11310	      if (GET_CODE (op0) == AND
11311		  && !have_insn_for (AND, mode))
11312		op0 = gen_binary (AND, tmode,
11313				  gen_lowpart_for_combine (tmode,
11314							   XEXP (op0, 0)),
11315				  gen_lowpart_for_combine (tmode,
11316							   XEXP (op0, 1)));
11317
11318	      op0 = gen_lowpart_for_combine (tmode, op0);
11319	      if (zero_extended && GET_CODE (op1) == CONST_INT)
11320		op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode));
11321	      op1 = gen_lowpart_for_combine (tmode, op1);
11322	      break;
11323	    }
11324
11325	  /* If this is a test for negative, we can make an explicit
11326	     test of the sign bit.  */
11327
11328	  if (op1 == const0_rtx && (code == LT || code == GE)
11329	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11330	    {
11331	      op0 = gen_binary (AND, tmode,
11332				gen_lowpart_for_combine (tmode, op0),
11333				GEN_INT ((HOST_WIDE_INT) 1
11334					 << (GET_MODE_BITSIZE (mode) - 1)));
11335	      code = (code == LT) ? NE : EQ;
11336	      break;
11337	    }
11338	}
11339
11340#ifdef CANONICALIZE_COMPARISON
11341  /* If this machine only supports a subset of valid comparisons, see if we
11342     can convert an unsupported one into a supported one.  */
11343  CANONICALIZE_COMPARISON (code, op0, op1);
11344#endif
11345
11346  *pop0 = op0;
11347  *pop1 = op1;
11348
11349  return code;
11350}
11351
11352/* Like jump.c' reversed_comparison_code, but use combine infrastructure for
11353   searching backward.  */
11354static enum rtx_code
11355combine_reversed_comparison_code (rtx exp)
11356{
11357  enum rtx_code code1 = reversed_comparison_code (exp, NULL);
11358  rtx x;
11359
11360  if (code1 != UNKNOWN
11361      || GET_MODE_CLASS (GET_MODE (XEXP (exp, 0))) != MODE_CC)
11362    return code1;
11363  /* Otherwise try and find where the condition codes were last set and
11364     use that.  */
11365  x = get_last_value (XEXP (exp, 0));
11366  if (!x || GET_CODE (x) != COMPARE)
11367    return UNKNOWN;
11368  return reversed_comparison_code_parts (GET_CODE (exp),
11369					 XEXP (x, 0), XEXP (x, 1), NULL);
11370}
11371
11372/* Return comparison with reversed code of EXP and operands OP0 and OP1.
11373   Return NULL_RTX in case we fail to do the reversal.  */
11374static rtx
11375reversed_comparison (rtx exp, enum machine_mode mode, rtx op0, rtx op1)
11376{
11377  enum rtx_code reversed_code = combine_reversed_comparison_code (exp);
11378  if (reversed_code == UNKNOWN)
11379    return NULL_RTX;
11380  else
11381    return gen_binary (reversed_code, mode, op0, op1);
11382}
11383
11384/* Utility function for record_value_for_reg.  Count number of
11385   rtxs in X.  */
11386static int
11387count_rtxs (rtx x)
11388{
11389  enum rtx_code code = GET_CODE (x);
11390  const char *fmt;
11391  int i, ret = 1;
11392
11393  if (GET_RTX_CLASS (code) == '2'
11394      || GET_RTX_CLASS (code) == 'c')
11395    {
11396      rtx x0 = XEXP (x, 0);
11397      rtx x1 = XEXP (x, 1);
11398
11399      if (x0 == x1)
11400	return 1 + 2 * count_rtxs (x0);
11401
11402      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
11403	   || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
11404	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11405	return 2 + 2 * count_rtxs (x0)
11406	       + count_rtxs (x == XEXP (x1, 0)
11407			     ? XEXP (x1, 1) : XEXP (x1, 0));
11408
11409      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
11410	   || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
11411	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11412	return 2 + 2 * count_rtxs (x1)
11413	       + count_rtxs (x == XEXP (x0, 0)
11414			     ? XEXP (x0, 1) : XEXP (x0, 0));
11415    }
11416
11417  fmt = GET_RTX_FORMAT (code);
11418  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11419    if (fmt[i] == 'e')
11420      ret += count_rtxs (XEXP (x, i));
11421
11422  return ret;
11423}
11424
11425/* Utility function for following routine.  Called when X is part of a value
11426   being stored into reg_last_set_value.  Sets reg_last_set_table_tick
11427   for each register mentioned.  Similar to mention_regs in cse.c  */
11428
11429static void
11430update_table_tick (rtx x)
11431{
11432  enum rtx_code code = GET_CODE (x);
11433  const char *fmt = GET_RTX_FORMAT (code);
11434  int i;
11435
11436  if (code == REG)
11437    {
11438      unsigned int regno = REGNO (x);
11439      unsigned int endregno
11440	= regno + (regno < FIRST_PSEUDO_REGISTER
11441		   ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11442      unsigned int r;
11443
11444      for (r = regno; r < endregno; r++)
11445	reg_last_set_table_tick[r] = label_tick;
11446
11447      return;
11448    }
11449
11450  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11451    /* Note that we can't have an "E" in values stored; see
11452       get_last_value_validate.  */
11453    if (fmt[i] == 'e')
11454      {
11455	/* Check for identical subexpressions.  If x contains
11456	   identical subexpression we only have to traverse one of
11457	   them.  */
11458	if (i == 0
11459	    && (GET_RTX_CLASS (code) == '2'
11460		|| GET_RTX_CLASS (code) == 'c'))
11461	  {
11462	    /* Note that at this point x1 has already been
11463	       processed.  */
11464	    rtx x0 = XEXP (x, 0);
11465	    rtx x1 = XEXP (x, 1);
11466
11467	    /* If x0 and x1 are identical then there is no need to
11468	       process x0.  */
11469	    if (x0 == x1)
11470	      break;
11471
11472	    /* If x0 is identical to a subexpression of x1 then while
11473	       processing x1, x0 has already been processed.  Thus we
11474	       are done with x.  */
11475	    if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
11476		 || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
11477		&& (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11478	      break;
11479
11480	    /* If x1 is identical to a subexpression of x0 then we
11481	       still have to process the rest of x0.  */
11482	    if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
11483		 || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
11484		&& (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11485	      {
11486		update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
11487		break;
11488	      }
11489	  }
11490
11491	update_table_tick (XEXP (x, i));
11492      }
11493}
11494
11495/* Record that REG is set to VALUE in insn INSN.  If VALUE is zero, we
11496   are saying that the register is clobbered and we no longer know its
11497   value.  If INSN is zero, don't update reg_last_set; this is only permitted
11498   with VALUE also zero and is used to invalidate the register.  */
11499
11500static void
11501record_value_for_reg (rtx reg, rtx insn, rtx value)
11502{
11503  unsigned int regno = REGNO (reg);
11504  unsigned int endregno
11505    = regno + (regno < FIRST_PSEUDO_REGISTER
11506	       ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
11507  unsigned int i;
11508
11509  /* If VALUE contains REG and we have a previous value for REG, substitute
11510     the previous value.  */
11511  if (value && insn && reg_overlap_mentioned_p (reg, value))
11512    {
11513      rtx tem;
11514
11515      /* Set things up so get_last_value is allowed to see anything set up to
11516	 our insn.  */
11517      subst_low_cuid = INSN_CUID (insn);
11518      tem = get_last_value (reg);
11519
11520      /* If TEM is simply a binary operation with two CLOBBERs as operands,
11521	 it isn't going to be useful and will take a lot of time to process,
11522	 so just use the CLOBBER.  */
11523
11524      if (tem)
11525	{
11526	  if ((GET_RTX_CLASS (GET_CODE (tem)) == '2'
11527	       || GET_RTX_CLASS (GET_CODE (tem)) == 'c')
11528	      && GET_CODE (XEXP (tem, 0)) == CLOBBER
11529	      && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11530	    tem = XEXP (tem, 0);
11531	  else if (count_occurrences (value, reg, 1) >= 2)
11532	    {
11533	      /* If there are two or more occurrences of REG in VALUE,
11534		 prevent the value from growing too much.  */
11535	      if (count_rtxs (tem) > MAX_LAST_VALUE_RTL)
11536		tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
11537	    }
11538
11539	  value = replace_rtx (copy_rtx (value), reg, tem);
11540	}
11541    }
11542
11543  /* For each register modified, show we don't know its value, that
11544     we don't know about its bitwise content, that its value has been
11545     updated, and that we don't know the location of the death of the
11546     register.  */
11547  for (i = regno; i < endregno; i++)
11548    {
11549      if (insn)
11550	reg_last_set[i] = insn;
11551
11552      reg_last_set_value[i] = 0;
11553      reg_last_set_mode[i] = 0;
11554      reg_last_set_nonzero_bits[i] = 0;
11555      reg_last_set_sign_bit_copies[i] = 0;
11556      reg_last_death[i] = 0;
11557    }
11558
11559  /* Mark registers that are being referenced in this value.  */
11560  if (value)
11561    update_table_tick (value);
11562
11563  /* Now update the status of each register being set.
11564     If someone is using this register in this block, set this register
11565     to invalid since we will get confused between the two lives in this
11566     basic block.  This makes using this register always invalid.  In cse, we
11567     scan the table to invalidate all entries using this register, but this
11568     is too much work for us.  */
11569
11570  for (i = regno; i < endregno; i++)
11571    {
11572      reg_last_set_label[i] = label_tick;
11573      if (value && reg_last_set_table_tick[i] == label_tick)
11574	reg_last_set_invalid[i] = 1;
11575      else
11576	reg_last_set_invalid[i] = 0;
11577    }
11578
11579  /* The value being assigned might refer to X (like in "x++;").  In that
11580     case, we must replace it with (clobber (const_int 0)) to prevent
11581     infinite loops.  */
11582  if (value && ! get_last_value_validate (&value, insn,
11583					  reg_last_set_label[regno], 0))
11584    {
11585      value = copy_rtx (value);
11586      if (! get_last_value_validate (&value, insn,
11587				     reg_last_set_label[regno], 1))
11588	value = 0;
11589    }
11590
11591  /* For the main register being modified, update the value, the mode, the
11592     nonzero bits, and the number of sign bit copies.  */
11593
11594  reg_last_set_value[regno] = value;
11595
11596  if (value)
11597    {
11598      enum machine_mode mode = GET_MODE (reg);
11599      subst_low_cuid = INSN_CUID (insn);
11600      reg_last_set_mode[regno] = mode;
11601      if (GET_MODE_CLASS (mode) == MODE_INT
11602	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11603	mode = nonzero_bits_mode;
11604      reg_last_set_nonzero_bits[regno] = nonzero_bits (value, mode);
11605      reg_last_set_sign_bit_copies[regno]
11606	= num_sign_bit_copies (value, GET_MODE (reg));
11607    }
11608}
11609
11610/* Called via note_stores from record_dead_and_set_regs to handle one
11611   SET or CLOBBER in an insn.  DATA is the instruction in which the
11612   set is occurring.  */
11613
11614static void
11615record_dead_and_set_regs_1 (rtx dest, rtx setter, void *data)
11616{
11617  rtx record_dead_insn = (rtx) data;
11618
11619  if (GET_CODE (dest) == SUBREG)
11620    dest = SUBREG_REG (dest);
11621
11622  if (GET_CODE (dest) == REG)
11623    {
11624      /* If we are setting the whole register, we know its value.  Otherwise
11625	 show that we don't know the value.  We can handle SUBREG in
11626	 some cases.  */
11627      if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11628	record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11629      else if (GET_CODE (setter) == SET
11630	       && GET_CODE (SET_DEST (setter)) == SUBREG
11631	       && SUBREG_REG (SET_DEST (setter)) == dest
11632	       && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
11633	       && subreg_lowpart_p (SET_DEST (setter)))
11634	record_value_for_reg (dest, record_dead_insn,
11635			      gen_lowpart_for_combine (GET_MODE (dest),
11636						       SET_SRC (setter)));
11637      else
11638	record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11639    }
11640  else if (GET_CODE (dest) == MEM
11641	   /* Ignore pushes, they clobber nothing.  */
11642	   && ! push_operand (dest, GET_MODE (dest)))
11643    mem_last_set = INSN_CUID (record_dead_insn);
11644}
11645
11646/* Update the records of when each REG was most recently set or killed
11647   for the things done by INSN.  This is the last thing done in processing
11648   INSN in the combiner loop.
11649
11650   We update reg_last_set, reg_last_set_value, reg_last_set_mode,
11651   reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
11652   and also the similar information mem_last_set (which insn most recently
11653   modified memory) and last_call_cuid (which insn was the most recent
11654   subroutine call).  */
11655
11656static void
11657record_dead_and_set_regs (rtx insn)
11658{
11659  rtx link;
11660  unsigned int i;
11661
11662  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11663    {
11664      if (REG_NOTE_KIND (link) == REG_DEAD
11665	  && GET_CODE (XEXP (link, 0)) == REG)
11666	{
11667	  unsigned int regno = REGNO (XEXP (link, 0));
11668	  unsigned int endregno
11669	    = regno + (regno < FIRST_PSEUDO_REGISTER
11670		       ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
11671		       : 1);
11672
11673	  for (i = regno; i < endregno; i++)
11674	    reg_last_death[i] = insn;
11675	}
11676      else if (REG_NOTE_KIND (link) == REG_INC)
11677	record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11678    }
11679
11680  if (GET_CODE (insn) == CALL_INSN)
11681    {
11682      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11683	if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
11684	  {
11685	    reg_last_set_value[i] = 0;
11686	    reg_last_set_mode[i] = 0;
11687	    reg_last_set_nonzero_bits[i] = 0;
11688	    reg_last_set_sign_bit_copies[i] = 0;
11689	    reg_last_death[i] = 0;
11690	  }
11691
11692      last_call_cuid = mem_last_set = INSN_CUID (insn);
11693
11694      /* Don't bother recording what this insn does.  It might set the
11695	 return value register, but we can't combine into a call
11696	 pattern anyway, so there's no point trying (and it may cause
11697	 a crash, if e.g. we wind up asking for last_set_value of a
11698	 SUBREG of the return value register).  */
11699      return;
11700    }
11701
11702  note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11703}
11704
11705/* If a SUBREG has the promoted bit set, it is in fact a property of the
11706   register present in the SUBREG, so for each such SUBREG go back and
11707   adjust nonzero and sign bit information of the registers that are
11708   known to have some zero/sign bits set.
11709
11710   This is needed because when combine blows the SUBREGs away, the
11711   information on zero/sign bits is lost and further combines can be
11712   missed because of that.  */
11713
11714static void
11715record_promoted_value (rtx insn, rtx subreg)
11716{
11717  rtx links, set;
11718  unsigned int regno = REGNO (SUBREG_REG (subreg));
11719  enum machine_mode mode = GET_MODE (subreg);
11720
11721  if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11722    return;
11723
11724  for (links = LOG_LINKS (insn); links;)
11725    {
11726      insn = XEXP (links, 0);
11727      set = single_set (insn);
11728
11729      if (! set || GET_CODE (SET_DEST (set)) != REG
11730	  || REGNO (SET_DEST (set)) != regno
11731	  || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11732	{
11733	  links = XEXP (links, 1);
11734	  continue;
11735	}
11736
11737      if (reg_last_set[regno] == insn)
11738	{
11739	  if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
11740	    reg_last_set_nonzero_bits[regno] &= GET_MODE_MASK (mode);
11741	}
11742
11743      if (GET_CODE (SET_SRC (set)) == REG)
11744	{
11745	  regno = REGNO (SET_SRC (set));
11746	  links = LOG_LINKS (insn);
11747	}
11748      else
11749	break;
11750    }
11751}
11752
11753/* Scan X for promoted SUBREGs.  For each one found,
11754   note what it implies to the registers used in it.  */
11755
11756static void
11757check_promoted_subreg (rtx insn, rtx x)
11758{
11759  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11760      && GET_CODE (SUBREG_REG (x)) == REG)
11761    record_promoted_value (insn, x);
11762  else
11763    {
11764      const char *format = GET_RTX_FORMAT (GET_CODE (x));
11765      int i, j;
11766
11767      for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
11768	switch (format[i])
11769	  {
11770	  case 'e':
11771	    check_promoted_subreg (insn, XEXP (x, i));
11772	    break;
11773	  case 'V':
11774	  case 'E':
11775	    if (XVEC (x, i) != 0)
11776	      for (j = 0; j < XVECLEN (x, i); j++)
11777		check_promoted_subreg (insn, XVECEXP (x, i, j));
11778	    break;
11779	  }
11780    }
11781}
11782
11783/* Utility routine for the following function.  Verify that all the registers
11784   mentioned in *LOC are valid when *LOC was part of a value set when
11785   label_tick == TICK.  Return 0 if some are not.
11786
11787   If REPLACE is nonzero, replace the invalid reference with
11788   (clobber (const_int 0)) and return 1.  This replacement is useful because
11789   we often can get useful information about the form of a value (e.g., if
11790   it was produced by a shift that always produces -1 or 0) even though
11791   we don't know exactly what registers it was produced from.  */
11792
11793static int
11794get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
11795{
11796  rtx x = *loc;
11797  const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
11798  int len = GET_RTX_LENGTH (GET_CODE (x));
11799  int i;
11800
11801  if (GET_CODE (x) == REG)
11802    {
11803      unsigned int regno = REGNO (x);
11804      unsigned int endregno
11805	= regno + (regno < FIRST_PSEUDO_REGISTER
11806		   ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11807      unsigned int j;
11808
11809      for (j = regno; j < endregno; j++)
11810	if (reg_last_set_invalid[j]
11811	    /* If this is a pseudo-register that was only set once and not
11812	       live at the beginning of the function, it is always valid.  */
11813	    || (! (regno >= FIRST_PSEUDO_REGISTER
11814		   && REG_N_SETS (regno) == 1
11815		   && (! REGNO_REG_SET_P
11816		       (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno)))
11817		&& reg_last_set_label[j] > tick))
11818	  {
11819	    if (replace)
11820	      *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11821	    return replace;
11822	  }
11823
11824      return 1;
11825    }
11826  /* If this is a memory reference, make sure that there were
11827     no stores after it that might have clobbered the value.  We don't
11828     have alias info, so we assume any store invalidates it.  */
11829  else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
11830	   && INSN_CUID (insn) <= mem_last_set)
11831    {
11832      if (replace)
11833	*loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11834      return replace;
11835    }
11836
11837  for (i = 0; i < len; i++)
11838    {
11839      if (fmt[i] == 'e')
11840	{
11841	  /* Check for identical subexpressions.  If x contains
11842	     identical subexpression we only have to traverse one of
11843	     them.  */
11844	  if (i == 1
11845	      && (GET_RTX_CLASS (GET_CODE (x)) == '2'
11846		  || GET_RTX_CLASS (GET_CODE (x)) == 'c'))
11847	    {
11848	      /* Note that at this point x0 has already been checked
11849		 and found valid.  */
11850	      rtx x0 = XEXP (x, 0);
11851	      rtx x1 = XEXP (x, 1);
11852
11853	      /* If x0 and x1 are identical then x is also valid.  */
11854	      if (x0 == x1)
11855		return 1;
11856
11857	      /* If x1 is identical to a subexpression of x0 then
11858		 while checking x0, x1 has already been checked.  Thus
11859		 it is valid and so as x.  */
11860	      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
11861		   || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
11862		  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11863		return 1;
11864
11865	      /* If x0 is identical to a subexpression of x1 then x is
11866		 valid iff the rest of x1 is valid.  */
11867	      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
11868		   || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
11869		  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11870		return
11871		  get_last_value_validate (&XEXP (x1,
11872						  x0 == XEXP (x1, 0) ? 1 : 0),
11873					   insn, tick, replace);
11874	    }
11875
11876	  if (get_last_value_validate (&XEXP (x, i), insn, tick,
11877				       replace) == 0)
11878	    return 0;
11879	}
11880      /* Don't bother with these.  They shouldn't occur anyway.  */
11881      else if (fmt[i] == 'E')
11882	return 0;
11883    }
11884
11885  /* If we haven't found a reason for it to be invalid, it is valid.  */
11886  return 1;
11887}
11888
11889/* Get the last value assigned to X, if known.  Some registers
11890   in the value may be replaced with (clobber (const_int 0)) if their value
11891   is known longer known reliably.  */
11892
11893static rtx
11894get_last_value (rtx x)
11895{
11896  unsigned int regno;
11897  rtx value;
11898
11899  /* If this is a non-paradoxical SUBREG, get the value of its operand and
11900     then convert it to the desired mode.  If this is a paradoxical SUBREG,
11901     we cannot predict what values the "extra" bits might have.  */
11902  if (GET_CODE (x) == SUBREG
11903      && subreg_lowpart_p (x)
11904      && (GET_MODE_SIZE (GET_MODE (x))
11905	  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11906      && (value = get_last_value (SUBREG_REG (x))) != 0)
11907    return gen_lowpart_for_combine (GET_MODE (x), value);
11908
11909  if (GET_CODE (x) != REG)
11910    return 0;
11911
11912  regno = REGNO (x);
11913  value = reg_last_set_value[regno];
11914
11915  /* If we don't have a value, or if it isn't for this basic block and
11916     it's either a hard register, set more than once, or it's a live
11917     at the beginning of the function, return 0.
11918
11919     Because if it's not live at the beginning of the function then the reg
11920     is always set before being used (is never used without being set).
11921     And, if it's set only once, and it's always set before use, then all
11922     uses must have the same last value, even if it's not from this basic
11923     block.  */
11924
11925  if (value == 0
11926      || (reg_last_set_label[regno] != label_tick
11927	  && (regno < FIRST_PSEUDO_REGISTER
11928	      || REG_N_SETS (regno) != 1
11929	      || (REGNO_REG_SET_P
11930		  (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno)))))
11931    return 0;
11932
11933  /* If the value was set in a later insn than the ones we are processing,
11934     we can't use it even if the register was only set once.  */
11935  if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
11936    return 0;
11937
11938  /* If the value has all its registers valid, return it.  */
11939  if (get_last_value_validate (&value, reg_last_set[regno],
11940			       reg_last_set_label[regno], 0))
11941    return value;
11942
11943  /* Otherwise, make a copy and replace any invalid register with
11944     (clobber (const_int 0)).  If that fails for some reason, return 0.  */
11945
11946  value = copy_rtx (value);
11947  if (get_last_value_validate (&value, reg_last_set[regno],
11948			       reg_last_set_label[regno], 1))
11949    return value;
11950
11951  return 0;
11952}
11953
11954/* Return nonzero if expression X refers to a REG or to memory
11955   that is set in an instruction more recent than FROM_CUID.  */
11956
11957static int
11958use_crosses_set_p (rtx x, int from_cuid)
11959{
11960  const char *fmt;
11961  int i;
11962  enum rtx_code code = GET_CODE (x);
11963
11964  if (code == REG)
11965    {
11966      unsigned int regno = REGNO (x);
11967      unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
11968				 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11969
11970#ifdef PUSH_ROUNDING
11971      /* Don't allow uses of the stack pointer to be moved,
11972	 because we don't know whether the move crosses a push insn.  */
11973      if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
11974	return 1;
11975#endif
11976      for (; regno < endreg; regno++)
11977	if (reg_last_set[regno]
11978	    && INSN_CUID (reg_last_set[regno]) > from_cuid)
11979	  return 1;
11980      return 0;
11981    }
11982
11983  if (code == MEM && mem_last_set > from_cuid)
11984    return 1;
11985
11986  fmt = GET_RTX_FORMAT (code);
11987
11988  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11989    {
11990      if (fmt[i] == 'E')
11991	{
11992	  int j;
11993	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11994	    if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11995	      return 1;
11996	}
11997      else if (fmt[i] == 'e'
11998	       && use_crosses_set_p (XEXP (x, i), from_cuid))
11999	return 1;
12000    }
12001  return 0;
12002}
12003
12004/* Define three variables used for communication between the following
12005   routines.  */
12006
12007static unsigned int reg_dead_regno, reg_dead_endregno;
12008static int reg_dead_flag;
12009
12010/* Function called via note_stores from reg_dead_at_p.
12011
12012   If DEST is within [reg_dead_regno, reg_dead_endregno), set
12013   reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET.  */
12014
12015static void
12016reg_dead_at_p_1 (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
12017{
12018  unsigned int regno, endregno;
12019
12020  if (GET_CODE (dest) != REG)
12021    return;
12022
12023  regno = REGNO (dest);
12024  endregno = regno + (regno < FIRST_PSEUDO_REGISTER
12025		      ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
12026
12027  if (reg_dead_endregno > regno && reg_dead_regno < endregno)
12028    reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
12029}
12030
12031/* Return nonzero if REG is known to be dead at INSN.
12032
12033   We scan backwards from INSN.  If we hit a REG_DEAD note or a CLOBBER
12034   referencing REG, it is dead.  If we hit a SET referencing REG, it is
12035   live.  Otherwise, see if it is live or dead at the start of the basic
12036   block we are in.  Hard regs marked as being live in NEWPAT_USED_REGS
12037   must be assumed to be always live.  */
12038
12039static int
12040reg_dead_at_p (rtx reg, rtx insn)
12041{
12042  basic_block block;
12043  unsigned int i;
12044
12045  /* Set variables for reg_dead_at_p_1.  */
12046  reg_dead_regno = REGNO (reg);
12047  reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
12048					? HARD_REGNO_NREGS (reg_dead_regno,
12049							    GET_MODE (reg))
12050					: 1);
12051
12052  reg_dead_flag = 0;
12053
12054  /* Check that reg isn't mentioned in NEWPAT_USED_REGS.  */
12055  if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
12056    {
12057      for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12058	if (TEST_HARD_REG_BIT (newpat_used_regs, i))
12059	  return 0;
12060    }
12061
12062  /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
12063     beginning of function.  */
12064  for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
12065       insn = prev_nonnote_insn (insn))
12066    {
12067      note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
12068      if (reg_dead_flag)
12069	return reg_dead_flag == 1 ? 1 : 0;
12070
12071      if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
12072	return 1;
12073    }
12074
12075  /* Get the basic block that we were in.  */
12076  if (insn == 0)
12077    block = ENTRY_BLOCK_PTR->next_bb;
12078  else
12079    {
12080      FOR_EACH_BB (block)
12081	if (insn == BB_HEAD (block))
12082	  break;
12083
12084      if (block == EXIT_BLOCK_PTR)
12085	return 0;
12086    }
12087
12088  for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12089    if (REGNO_REG_SET_P (block->global_live_at_start, i))
12090      return 0;
12091
12092  return 1;
12093}
12094
12095/* Note hard registers in X that are used.  This code is similar to
12096   that in flow.c, but much simpler since we don't care about pseudos.  */
12097
12098static void
12099mark_used_regs_combine (rtx x)
12100{
12101  RTX_CODE code = GET_CODE (x);
12102  unsigned int regno;
12103  int i;
12104
12105  switch (code)
12106    {
12107    case LABEL_REF:
12108    case SYMBOL_REF:
12109    case CONST_INT:
12110    case CONST:
12111    case CONST_DOUBLE:
12112    case CONST_VECTOR:
12113    case PC:
12114    case ADDR_VEC:
12115    case ADDR_DIFF_VEC:
12116    case ASM_INPUT:
12117#ifdef HAVE_cc0
12118    /* CC0 must die in the insn after it is set, so we don't need to take
12119       special note of it here.  */
12120    case CC0:
12121#endif
12122      return;
12123
12124    case CLOBBER:
12125      /* If we are clobbering a MEM, mark any hard registers inside the
12126	 address as used.  */
12127      if (GET_CODE (XEXP (x, 0)) == MEM)
12128	mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
12129      return;
12130
12131    case REG:
12132      regno = REGNO (x);
12133      /* A hard reg in a wide mode may really be multiple registers.
12134	 If so, mark all of them just like the first.  */
12135      if (regno < FIRST_PSEUDO_REGISTER)
12136	{
12137	  unsigned int endregno, r;
12138
12139	  /* None of this applies to the stack, frame or arg pointers.  */
12140	  if (regno == STACK_POINTER_REGNUM
12141#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
12142	      || regno == HARD_FRAME_POINTER_REGNUM
12143#endif
12144#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
12145	      || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
12146#endif
12147	      || regno == FRAME_POINTER_REGNUM)
12148	    return;
12149
12150	  endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12151	  for (r = regno; r < endregno; r++)
12152	    SET_HARD_REG_BIT (newpat_used_regs, r);
12153	}
12154      return;
12155
12156    case SET:
12157      {
12158	/* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
12159	   the address.  */
12160	rtx testreg = SET_DEST (x);
12161
12162	while (GET_CODE (testreg) == SUBREG
12163	       || GET_CODE (testreg) == ZERO_EXTRACT
12164	       || GET_CODE (testreg) == SIGN_EXTRACT
12165	       || GET_CODE (testreg) == STRICT_LOW_PART)
12166	  testreg = XEXP (testreg, 0);
12167
12168	if (GET_CODE (testreg) == MEM)
12169	  mark_used_regs_combine (XEXP (testreg, 0));
12170
12171	mark_used_regs_combine (SET_SRC (x));
12172      }
12173      return;
12174
12175    default:
12176      break;
12177    }
12178
12179  /* Recursively scan the operands of this expression.  */
12180
12181  {
12182    const char *fmt = GET_RTX_FORMAT (code);
12183
12184    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12185      {
12186	if (fmt[i] == 'e')
12187	  mark_used_regs_combine (XEXP (x, i));
12188	else if (fmt[i] == 'E')
12189	  {
12190	    int j;
12191
12192	    for (j = 0; j < XVECLEN (x, i); j++)
12193	      mark_used_regs_combine (XVECEXP (x, i, j));
12194	  }
12195      }
12196  }
12197}
12198
12199/* Remove register number REGNO from the dead registers list of INSN.
12200
12201   Return the note used to record the death, if there was one.  */
12202
12203rtx
12204remove_death (unsigned int regno, rtx insn)
12205{
12206  rtx note = find_regno_note (insn, REG_DEAD, regno);
12207
12208  if (note)
12209    {
12210      REG_N_DEATHS (regno)--;
12211      remove_note (insn, note);
12212    }
12213
12214  return note;
12215}
12216
12217/* For each register (hardware or pseudo) used within expression X, if its
12218   death is in an instruction with cuid between FROM_CUID (inclusive) and
12219   TO_INSN (exclusive), put a REG_DEAD note for that register in the
12220   list headed by PNOTES.
12221
12222   That said, don't move registers killed by maybe_kill_insn.
12223
12224   This is done when X is being merged by combination into TO_INSN.  These
12225   notes will then be distributed as needed.  */
12226
12227static void
12228move_deaths (rtx x, rtx maybe_kill_insn, int from_cuid, rtx to_insn,
12229	     rtx *pnotes)
12230{
12231  const char *fmt;
12232  int len, i;
12233  enum rtx_code code = GET_CODE (x);
12234
12235  if (code == REG)
12236    {
12237      unsigned int regno = REGNO (x);
12238      rtx where_dead = reg_last_death[regno];
12239      rtx before_dead, after_dead;
12240
12241      /* Don't move the register if it gets killed in between from and to.  */
12242      if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
12243	  && ! reg_referenced_p (x, maybe_kill_insn))
12244	return;
12245
12246      /* WHERE_DEAD could be a USE insn made by combine, so first we
12247	 make sure that we have insns with valid INSN_CUID values.  */
12248      before_dead = where_dead;
12249      while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
12250	before_dead = PREV_INSN (before_dead);
12251
12252      after_dead = where_dead;
12253      while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
12254	after_dead = NEXT_INSN (after_dead);
12255
12256      if (before_dead && after_dead
12257	  && INSN_CUID (before_dead) >= from_cuid
12258	  && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
12259	      || (where_dead != after_dead
12260		  && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
12261	{
12262	  rtx note = remove_death (regno, where_dead);
12263
12264	  /* It is possible for the call above to return 0.  This can occur
12265	     when reg_last_death points to I2 or I1 that we combined with.
12266	     In that case make a new note.
12267
12268	     We must also check for the case where X is a hard register
12269	     and NOTE is a death note for a range of hard registers
12270	     including X.  In that case, we must put REG_DEAD notes for
12271	     the remaining registers in place of NOTE.  */
12272
12273	  if (note != 0 && regno < FIRST_PSEUDO_REGISTER
12274	      && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12275		  > GET_MODE_SIZE (GET_MODE (x))))
12276	    {
12277	      unsigned int deadregno = REGNO (XEXP (note, 0));
12278	      unsigned int deadend
12279		= (deadregno + HARD_REGNO_NREGS (deadregno,
12280						 GET_MODE (XEXP (note, 0))));
12281	      unsigned int ourend
12282		= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12283	      unsigned int i;
12284
12285	      for (i = deadregno; i < deadend; i++)
12286		if (i < regno || i >= ourend)
12287		  REG_NOTES (where_dead)
12288		    = gen_rtx_EXPR_LIST (REG_DEAD,
12289					 regno_reg_rtx[i],
12290					 REG_NOTES (where_dead));
12291	    }
12292
12293	  /* If we didn't find any note, or if we found a REG_DEAD note that
12294	     covers only part of the given reg, and we have a multi-reg hard
12295	     register, then to be safe we must check for REG_DEAD notes
12296	     for each register other than the first.  They could have
12297	     their own REG_DEAD notes lying around.  */
12298	  else if ((note == 0
12299		    || (note != 0
12300			&& (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12301			    < GET_MODE_SIZE (GET_MODE (x)))))
12302		   && regno < FIRST_PSEUDO_REGISTER
12303		   && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
12304	    {
12305	      unsigned int ourend
12306		= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12307	      unsigned int i, offset;
12308	      rtx oldnotes = 0;
12309
12310	      if (note)
12311		offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
12312	      else
12313		offset = 1;
12314
12315	      for (i = regno + offset; i < ourend; i++)
12316		move_deaths (regno_reg_rtx[i],
12317			     maybe_kill_insn, from_cuid, to_insn, &oldnotes);
12318	    }
12319
12320	  if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
12321	    {
12322	      XEXP (note, 1) = *pnotes;
12323	      *pnotes = note;
12324	    }
12325	  else
12326	    *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
12327
12328	  REG_N_DEATHS (regno)++;
12329	}
12330
12331      return;
12332    }
12333
12334  else if (GET_CODE (x) == SET)
12335    {
12336      rtx dest = SET_DEST (x);
12337
12338      move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
12339
12340      /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
12341	 that accesses one word of a multi-word item, some
12342	 piece of everything register in the expression is used by
12343	 this insn, so remove any old death.  */
12344      /* ??? So why do we test for equality of the sizes?  */
12345
12346      if (GET_CODE (dest) == ZERO_EXTRACT
12347	  || GET_CODE (dest) == STRICT_LOW_PART
12348	  || (GET_CODE (dest) == SUBREG
12349	      && (((GET_MODE_SIZE (GET_MODE (dest))
12350		    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
12351		  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
12352		       + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
12353	{
12354	  move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
12355	  return;
12356	}
12357
12358      /* If this is some other SUBREG, we know it replaces the entire
12359	 value, so use that as the destination.  */
12360      if (GET_CODE (dest) == SUBREG)
12361	dest = SUBREG_REG (dest);
12362
12363      /* If this is a MEM, adjust deaths of anything used in the address.
12364	 For a REG (the only other possibility), the entire value is
12365	 being replaced so the old value is not used in this insn.  */
12366
12367      if (GET_CODE (dest) == MEM)
12368	move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
12369		     to_insn, pnotes);
12370      return;
12371    }
12372
12373  else if (GET_CODE (x) == CLOBBER)
12374    return;
12375
12376  len = GET_RTX_LENGTH (code);
12377  fmt = GET_RTX_FORMAT (code);
12378
12379  for (i = 0; i < len; i++)
12380    {
12381      if (fmt[i] == 'E')
12382	{
12383	  int j;
12384	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12385	    move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
12386			 to_insn, pnotes);
12387	}
12388      else if (fmt[i] == 'e')
12389	move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
12390    }
12391}
12392
12393/* Return 1 if X is the target of a bit-field assignment in BODY, the
12394   pattern of an insn.  X must be a REG.  */
12395
12396static int
12397reg_bitfield_target_p (rtx x, rtx body)
12398{
12399  int i;
12400
12401  if (GET_CODE (body) == SET)
12402    {
12403      rtx dest = SET_DEST (body);
12404      rtx target;
12405      unsigned int regno, tregno, endregno, endtregno;
12406
12407      if (GET_CODE (dest) == ZERO_EXTRACT)
12408	target = XEXP (dest, 0);
12409      else if (GET_CODE (dest) == STRICT_LOW_PART)
12410	target = SUBREG_REG (XEXP (dest, 0));
12411      else
12412	return 0;
12413
12414      if (GET_CODE (target) == SUBREG)
12415	target = SUBREG_REG (target);
12416
12417      if (GET_CODE (target) != REG)
12418	return 0;
12419
12420      tregno = REGNO (target), regno = REGNO (x);
12421      if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
12422	return target == x;
12423
12424      endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
12425      endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12426
12427      return endregno > tregno && regno < endtregno;
12428    }
12429
12430  else if (GET_CODE (body) == PARALLEL)
12431    for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
12432      if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
12433	return 1;
12434
12435  return 0;
12436}
12437
12438/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
12439   as appropriate.  I3 and I2 are the insns resulting from the combination
12440   insns including FROM (I2 may be zero).
12441
12442   Each note in the list is either ignored or placed on some insns, depending
12443   on the type of note.  */
12444
12445static void
12446distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2)
12447{
12448  rtx note, next_note;
12449  rtx tem;
12450
12451  for (note = notes; note; note = next_note)
12452    {
12453      rtx place = 0, place2 = 0;
12454
12455      /* If this NOTE references a pseudo register, ensure it references
12456	 the latest copy of that register.  */
12457      if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
12458	  && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
12459	XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
12460
12461      next_note = XEXP (note, 1);
12462      switch (REG_NOTE_KIND (note))
12463	{
12464	case REG_BR_PROB:
12465	case REG_BR_PRED:
12466	  /* Doesn't matter much where we put this, as long as it's somewhere.
12467	     It is preferable to keep these notes on branches, which is most
12468	     likely to be i3.  */
12469	  place = i3;
12470	  break;
12471
12472	case REG_VALUE_PROFILE:
12473	  /* Just get rid of this note, as it is unused later anyway.  */
12474	  break;
12475
12476	case REG_VTABLE_REF:
12477	  /* ??? Should remain with *a particular* memory load.  Given the
12478	     nature of vtable data, the last insn seems relatively safe.  */
12479	  place = i3;
12480	  break;
12481
12482	case REG_NON_LOCAL_GOTO:
12483	  if (GET_CODE (i3) == JUMP_INSN)
12484	    place = i3;
12485	  else if (i2 && GET_CODE (i2) == JUMP_INSN)
12486	    place = i2;
12487	  else
12488	    abort ();
12489	  break;
12490
12491	case REG_EH_REGION:
12492	  /* These notes must remain with the call or trapping instruction.  */
12493	  if (GET_CODE (i3) == CALL_INSN)
12494	    place = i3;
12495	  else if (i2 && GET_CODE (i2) == CALL_INSN)
12496	    place = i2;
12497	  else if (flag_non_call_exceptions)
12498	    {
12499	      if (may_trap_p (i3))
12500		place = i3;
12501	      else if (i2 && may_trap_p (i2))
12502		place = i2;
12503	      /* ??? Otherwise assume we've combined things such that we
12504		 can now prove that the instructions can't trap.  Drop the
12505		 note in this case.  */
12506	    }
12507	  else
12508	    abort ();
12509	  break;
12510
12511	case REG_ALWAYS_RETURN:
12512	case REG_NORETURN:
12513	case REG_SETJMP:
12514	  /* These notes must remain with the call.  It should not be
12515	     possible for both I2 and I3 to be a call.  */
12516	  if (GET_CODE (i3) == CALL_INSN)
12517	    place = i3;
12518	  else if (i2 && GET_CODE (i2) == CALL_INSN)
12519	    place = i2;
12520	  else
12521	    abort ();
12522	  break;
12523
12524	case REG_UNUSED:
12525	  /* Any clobbers for i3 may still exist, and so we must process
12526	     REG_UNUSED notes from that insn.
12527
12528	     Any clobbers from i2 or i1 can only exist if they were added by
12529	     recog_for_combine.  In that case, recog_for_combine created the
12530	     necessary REG_UNUSED notes.  Trying to keep any original
12531	     REG_UNUSED notes from these insns can cause incorrect output
12532	     if it is for the same register as the original i3 dest.
12533	     In that case, we will notice that the register is set in i3,
12534	     and then add a REG_UNUSED note for the destination of i3, which
12535	     is wrong.  However, it is possible to have REG_UNUSED notes from
12536	     i2 or i1 for register which were both used and clobbered, so
12537	     we keep notes from i2 or i1 if they will turn into REG_DEAD
12538	     notes.  */
12539
12540	  /* If this register is set or clobbered in I3, put the note there
12541	     unless there is one already.  */
12542	  if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
12543	    {
12544	      if (from_insn != i3)
12545		break;
12546
12547	      if (! (GET_CODE (XEXP (note, 0)) == REG
12548		     ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12549		     : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12550		place = i3;
12551	    }
12552	  /* Otherwise, if this register is used by I3, then this register
12553	     now dies here, so we must put a REG_DEAD note here unless there
12554	     is one already.  */
12555	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12556		   && ! (GET_CODE (XEXP (note, 0)) == REG
12557			 ? find_regno_note (i3, REG_DEAD,
12558					    REGNO (XEXP (note, 0)))
12559			 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12560	    {
12561	      PUT_REG_NOTE_KIND (note, REG_DEAD);
12562	      place = i3;
12563	    }
12564	  break;
12565
12566	case REG_EQUAL:
12567	case REG_EQUIV:
12568	case REG_NOALIAS:
12569	  /* These notes say something about results of an insn.  We can
12570	     only support them if they used to be on I3 in which case they
12571	     remain on I3.  Otherwise they are ignored.
12572
12573	     If the note refers to an expression that is not a constant, we
12574	     must also ignore the note since we cannot tell whether the
12575	     equivalence is still true.  It might be possible to do
12576	     slightly better than this (we only have a problem if I2DEST
12577	     or I1DEST is present in the expression), but it doesn't
12578	     seem worth the trouble.  */
12579
12580	  if (from_insn == i3
12581	      && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12582	    place = i3;
12583	  break;
12584
12585	case REG_INC:
12586	case REG_NO_CONFLICT:
12587	  /* These notes say something about how a register is used.  They must
12588	     be present on any use of the register in I2 or I3.  */
12589	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12590	    place = i3;
12591
12592	  if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12593	    {
12594	      if (place)
12595		place2 = i2;
12596	      else
12597		place = i2;
12598	    }
12599	  break;
12600
12601	case REG_LABEL:
12602	  /* This can show up in several ways -- either directly in the
12603	     pattern, or hidden off in the constant pool with (or without?)
12604	     a REG_EQUAL note.  */
12605	  /* ??? Ignore the without-reg_equal-note problem for now.  */
12606	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12607	      || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12608		  && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12609		  && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12610	    place = i3;
12611
12612	  if (i2
12613	      && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12614		  || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12615		      && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12616		      && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12617	    {
12618	      if (place)
12619		place2 = i2;
12620	      else
12621		place = i2;
12622	    }
12623
12624	  /* Don't attach REG_LABEL note to a JUMP_INSN which has
12625	     JUMP_LABEL already.  Instead, decrement LABEL_NUSES.  */
12626	  if (place && GET_CODE (place) == JUMP_INSN && JUMP_LABEL (place))
12627	    {
12628	      if (JUMP_LABEL (place) != XEXP (note, 0))
12629		abort ();
12630	      if (GET_CODE (JUMP_LABEL (place)) == CODE_LABEL)
12631		LABEL_NUSES (JUMP_LABEL (place))--;
12632	      place = 0;
12633	    }
12634	  if (place2 && GET_CODE (place2) == JUMP_INSN && JUMP_LABEL (place2))
12635	    {
12636	      if (JUMP_LABEL (place2) != XEXP (note, 0))
12637		abort ();
12638	      if (GET_CODE (JUMP_LABEL (place2)) == CODE_LABEL)
12639		LABEL_NUSES (JUMP_LABEL (place2))--;
12640	      place2 = 0;
12641	    }
12642	  break;
12643
12644	case REG_NONNEG:
12645	  /* This note says something about the value of a register prior
12646	     to the execution of an insn.  It is too much trouble to see
12647	     if the note is still correct in all situations.  It is better
12648	     to simply delete it.  */
12649	  break;
12650
12651	case REG_RETVAL:
12652	  /* If the insn previously containing this note still exists,
12653	     put it back where it was.  Otherwise move it to the previous
12654	     insn.  Adjust the corresponding REG_LIBCALL note.  */
12655	  if (GET_CODE (from_insn) != NOTE)
12656	    place = from_insn;
12657	  else
12658	    {
12659	      tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
12660	      place = prev_real_insn (from_insn);
12661	      if (tem && place)
12662		XEXP (tem, 0) = place;
12663	      /* If we're deleting the last remaining instruction of a
12664		 libcall sequence, don't add the notes.  */
12665	      else if (XEXP (note, 0) == from_insn)
12666		tem = place = 0;
12667	      /* Don't add the dangling REG_RETVAL note.  */
12668	      else if (! tem)
12669		place = 0;
12670	    }
12671	  break;
12672
12673	case REG_LIBCALL:
12674	  /* This is handled similarly to REG_RETVAL.  */
12675	  if (GET_CODE (from_insn) != NOTE)
12676	    place = from_insn;
12677	  else
12678	    {
12679	      tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
12680	      place = next_real_insn (from_insn);
12681	      if (tem && place)
12682		XEXP (tem, 0) = place;
12683	      /* If we're deleting the last remaining instruction of a
12684		 libcall sequence, don't add the notes.  */
12685	      else if (XEXP (note, 0) == from_insn)
12686		tem = place = 0;
12687	      /* Don't add the dangling REG_LIBCALL note.  */
12688	      else if (! tem)
12689		place = 0;
12690	    }
12691	  break;
12692
12693	case REG_DEAD:
12694	  /* If the register is used as an input in I3, it dies there.
12695	     Similarly for I2, if it is nonzero and adjacent to I3.
12696
12697	     If the register is not used as an input in either I3 or I2
12698	     and it is not one of the registers we were supposed to eliminate,
12699	     there are two possibilities.  We might have a non-adjacent I2
12700	     or we might have somehow eliminated an additional register
12701	     from a computation.  For example, we might have had A & B where
12702	     we discover that B will always be zero.  In this case we will
12703	     eliminate the reference to A.
12704
12705	     In both cases, we must search to see if we can find a previous
12706	     use of A and put the death note there.  */
12707
12708	  if (from_insn
12709	      && GET_CODE (from_insn) == CALL_INSN
12710	      && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12711	    place = from_insn;
12712	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12713	    place = i3;
12714	  else if (i2 != 0 && next_nonnote_insn (i2) == i3
12715		   && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12716	    place = i2;
12717
12718	  if (place == 0)
12719	    {
12720	      basic_block bb = this_basic_block;
12721
12722	      for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
12723		{
12724		  if (! INSN_P (tem))
12725		    {
12726		      if (tem == BB_HEAD (bb))
12727			break;
12728		      continue;
12729		    }
12730
12731		  /* If the register is being set at TEM, see if that is all
12732		     TEM is doing.  If so, delete TEM.  Otherwise, make this
12733		     into a REG_UNUSED note instead.  Don't delete sets to
12734		     global register vars.  */
12735		  if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
12736		       || !global_regs[REGNO (XEXP (note, 0))])
12737		      && reg_set_p (XEXP (note, 0), PATTERN (tem)))
12738		    {
12739		      rtx set = single_set (tem);
12740		      rtx inner_dest = 0;
12741#ifdef HAVE_cc0
12742		      rtx cc0_setter = NULL_RTX;
12743#endif
12744
12745		      if (set != 0)
12746			for (inner_dest = SET_DEST (set);
12747			     (GET_CODE (inner_dest) == STRICT_LOW_PART
12748			      || GET_CODE (inner_dest) == SUBREG
12749			      || GET_CODE (inner_dest) == ZERO_EXTRACT);
12750			     inner_dest = XEXP (inner_dest, 0))
12751			  ;
12752
12753		      /* Verify that it was the set, and not a clobber that
12754			 modified the register.
12755
12756			 CC0 targets must be careful to maintain setter/user
12757			 pairs.  If we cannot delete the setter due to side
12758			 effects, mark the user with an UNUSED note instead
12759			 of deleting it.  */
12760
12761		      if (set != 0 && ! side_effects_p (SET_SRC (set))
12762			  && rtx_equal_p (XEXP (note, 0), inner_dest)
12763#ifdef HAVE_cc0
12764			  && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12765			      || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12766				  && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12767#endif
12768			  )
12769			{
12770			  /* Move the notes and links of TEM elsewhere.
12771			     This might delete other dead insns recursively.
12772			     First set the pattern to something that won't use
12773			     any register.  */
12774			  rtx old_notes = REG_NOTES (tem);
12775
12776			  PATTERN (tem) = pc_rtx;
12777			  REG_NOTES (tem) = NULL;
12778
12779			  distribute_notes (old_notes, tem, tem, NULL_RTX);
12780			  distribute_links (LOG_LINKS (tem));
12781
12782			  PUT_CODE (tem, NOTE);
12783			  NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
12784			  NOTE_SOURCE_FILE (tem) = 0;
12785
12786#ifdef HAVE_cc0
12787			  /* Delete the setter too.  */
12788			  if (cc0_setter)
12789			    {
12790			      PATTERN (cc0_setter) = pc_rtx;
12791			      old_notes = REG_NOTES (cc0_setter);
12792			      REG_NOTES (cc0_setter) = NULL;
12793
12794			      distribute_notes (old_notes, cc0_setter,
12795						cc0_setter, NULL_RTX);
12796			      distribute_links (LOG_LINKS (cc0_setter));
12797
12798			      PUT_CODE (cc0_setter, NOTE);
12799			      NOTE_LINE_NUMBER (cc0_setter)
12800				= NOTE_INSN_DELETED;
12801			      NOTE_SOURCE_FILE (cc0_setter) = 0;
12802			    }
12803#endif
12804			}
12805		      /* If the register is both set and used here, put the
12806			 REG_DEAD note here, but place a REG_UNUSED note
12807			 here too unless there already is one.  */
12808		      else if (reg_referenced_p (XEXP (note, 0),
12809						 PATTERN (tem)))
12810			{
12811			  place = tem;
12812
12813			  if (! find_regno_note (tem, REG_UNUSED,
12814						 REGNO (XEXP (note, 0))))
12815			    REG_NOTES (tem)
12816			      = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0),
12817						   REG_NOTES (tem));
12818			}
12819		      else
12820			{
12821			  PUT_REG_NOTE_KIND (note, REG_UNUSED);
12822
12823			  /*  If there isn't already a REG_UNUSED note, put one
12824			      here.  */
12825			  if (! find_regno_note (tem, REG_UNUSED,
12826						 REGNO (XEXP (note, 0))))
12827			    place = tem;
12828			  break;
12829			}
12830		    }
12831		  else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12832			   || (GET_CODE (tem) == CALL_INSN
12833			       && find_reg_fusage (tem, USE, XEXP (note, 0))))
12834		    {
12835		      place = tem;
12836
12837		      /* If we are doing a 3->2 combination, and we have a
12838			 register which formerly died in i3 and was not used
12839			 by i2, which now no longer dies in i3 and is used in
12840			 i2 but does not die in i2, and place is between i2
12841			 and i3, then we may need to move a link from place to
12842			 i2.  */
12843		      if (i2 && INSN_UID (place) <= max_uid_cuid
12844			  && INSN_CUID (place) > INSN_CUID (i2)
12845			  && from_insn
12846			  && INSN_CUID (from_insn) > INSN_CUID (i2)
12847			  && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12848			{
12849			  rtx links = LOG_LINKS (place);
12850			  LOG_LINKS (place) = 0;
12851			  distribute_links (links);
12852			}
12853		      break;
12854		    }
12855
12856		  if (tem == BB_HEAD (bb))
12857		    break;
12858		}
12859
12860	      /* We haven't found an insn for the death note and it
12861		 is still a REG_DEAD note, but we have hit the beginning
12862		 of the block.  If the existing life info says the reg
12863		 was dead, there's nothing left to do.  Otherwise, we'll
12864		 need to do a global life update after combine.  */
12865	      if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12866		  && REGNO_REG_SET_P (bb->global_live_at_start,
12867				      REGNO (XEXP (note, 0))))
12868		SET_BIT (refresh_blocks, this_basic_block->index);
12869	    }
12870
12871	  /* If the register is set or already dead at PLACE, we needn't do
12872	     anything with this note if it is still a REG_DEAD note.
12873	     We can here if it is set at all, not if is it totally replace,
12874	     which is what `dead_or_set_p' checks, so also check for it being
12875	     set partially.  */
12876
12877	  if (place && REG_NOTE_KIND (note) == REG_DEAD)
12878	    {
12879	      unsigned int regno = REGNO (XEXP (note, 0));
12880
12881	      /* Similarly, if the instruction on which we want to place
12882		 the note is a noop, we'll need do a global live update
12883		 after we remove them in delete_noop_moves.  */
12884	      if (noop_move_p (place))
12885		SET_BIT (refresh_blocks, this_basic_block->index);
12886
12887	      if (dead_or_set_p (place, XEXP (note, 0))
12888		  || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12889		{
12890		  /* Unless the register previously died in PLACE, clear
12891		     reg_last_death.  [I no longer understand why this is
12892		     being done.] */
12893		  if (reg_last_death[regno] != place)
12894		    reg_last_death[regno] = 0;
12895		  place = 0;
12896		}
12897	      else
12898		reg_last_death[regno] = place;
12899
12900	      /* If this is a death note for a hard reg that is occupying
12901		 multiple registers, ensure that we are still using all
12902		 parts of the object.  If we find a piece of the object
12903		 that is unused, we must arrange for an appropriate REG_DEAD
12904		 note to be added for it.  However, we can't just emit a USE
12905		 and tag the note to it, since the register might actually
12906		 be dead; so we recourse, and the recursive call then finds
12907		 the previous insn that used this register.  */
12908
12909	      if (place && regno < FIRST_PSEUDO_REGISTER
12910		  && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
12911		{
12912		  unsigned int endregno
12913		    = regno + HARD_REGNO_NREGS (regno,
12914						GET_MODE (XEXP (note, 0)));
12915		  int all_used = 1;
12916		  unsigned int i;
12917
12918		  for (i = regno; i < endregno; i++)
12919		    if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12920			 && ! find_regno_fusage (place, USE, i))
12921			|| dead_or_set_regno_p (place, i))
12922		      all_used = 0;
12923
12924		  if (! all_used)
12925		    {
12926		      /* Put only REG_DEAD notes for pieces that are
12927			 not already dead or set.  */
12928
12929		      for (i = regno; i < endregno;
12930			   i += HARD_REGNO_NREGS (i, reg_raw_mode[i]))
12931			{
12932			  rtx piece = regno_reg_rtx[i];
12933			  basic_block bb = this_basic_block;
12934
12935			  if (! dead_or_set_p (place, piece)
12936			      && ! reg_bitfield_target_p (piece,
12937							  PATTERN (place)))
12938			    {
12939			      rtx new_note
12940				= gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
12941
12942			      distribute_notes (new_note, place, place,
12943						NULL_RTX);
12944			    }
12945			  else if (! refers_to_regno_p (i, i + 1,
12946							PATTERN (place), 0)
12947				   && ! find_regno_fusage (place, USE, i))
12948			    for (tem = PREV_INSN (place); ;
12949				 tem = PREV_INSN (tem))
12950			      {
12951				if (! INSN_P (tem))
12952				  {
12953				    if (tem == BB_HEAD (bb))
12954				      {
12955					SET_BIT (refresh_blocks,
12956						 this_basic_block->index);
12957					break;
12958				      }
12959				    continue;
12960				  }
12961				if (dead_or_set_p (tem, piece)
12962				    || reg_bitfield_target_p (piece,
12963							      PATTERN (tem)))
12964				  {
12965				    REG_NOTES (tem)
12966				      = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
12967							   REG_NOTES (tem));
12968				    break;
12969				  }
12970			      }
12971
12972			}
12973
12974		      place = 0;
12975		    }
12976		}
12977	    }
12978	  break;
12979
12980	default:
12981	  /* Any other notes should not be present at this point in the
12982	     compilation.  */
12983	  abort ();
12984	}
12985
12986      if (place)
12987	{
12988	  XEXP (note, 1) = REG_NOTES (place);
12989	  REG_NOTES (place) = note;
12990	}
12991      else if ((REG_NOTE_KIND (note) == REG_DEAD
12992		|| REG_NOTE_KIND (note) == REG_UNUSED)
12993	       && GET_CODE (XEXP (note, 0)) == REG)
12994	REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
12995
12996      if (place2)
12997	{
12998	  if ((REG_NOTE_KIND (note) == REG_DEAD
12999	       || REG_NOTE_KIND (note) == REG_UNUSED)
13000	      && GET_CODE (XEXP (note, 0)) == REG)
13001	    REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
13002
13003	  REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
13004					       REG_NOTE_KIND (note),
13005					       XEXP (note, 0),
13006					       REG_NOTES (place2));
13007	}
13008    }
13009}
13010
13011/* Similarly to above, distribute the LOG_LINKS that used to be present on
13012   I3, I2, and I1 to new locations.  This is also called to add a link
13013   pointing at I3 when I3's destination is changed.  */
13014
13015static void
13016distribute_links (rtx links)
13017{
13018  rtx link, next_link;
13019
13020  for (link = links; link; link = next_link)
13021    {
13022      rtx place = 0;
13023      rtx insn;
13024      rtx set, reg;
13025
13026      next_link = XEXP (link, 1);
13027
13028      /* If the insn that this link points to is a NOTE or isn't a single
13029	 set, ignore it.  In the latter case, it isn't clear what we
13030	 can do other than ignore the link, since we can't tell which
13031	 register it was for.  Such links wouldn't be used by combine
13032	 anyway.
13033
13034	 It is not possible for the destination of the target of the link to
13035	 have been changed by combine.  The only potential of this is if we
13036	 replace I3, I2, and I1 by I3 and I2.  But in that case the
13037	 destination of I2 also remains unchanged.  */
13038
13039      if (GET_CODE (XEXP (link, 0)) == NOTE
13040	  || (set = single_set (XEXP (link, 0))) == 0)
13041	continue;
13042
13043      reg = SET_DEST (set);
13044      while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
13045	     || GET_CODE (reg) == SIGN_EXTRACT
13046	     || GET_CODE (reg) == STRICT_LOW_PART)
13047	reg = XEXP (reg, 0);
13048
13049      /* A LOG_LINK is defined as being placed on the first insn that uses
13050	 a register and points to the insn that sets the register.  Start
13051	 searching at the next insn after the target of the link and stop
13052	 when we reach a set of the register or the end of the basic block.
13053
13054	 Note that this correctly handles the link that used to point from
13055	 I3 to I2.  Also note that not much searching is typically done here
13056	 since most links don't point very far away.  */
13057
13058      for (insn = NEXT_INSN (XEXP (link, 0));
13059	   (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
13060		     || BB_HEAD (this_basic_block->next_bb) != insn));
13061	   insn = NEXT_INSN (insn))
13062	if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
13063	  {
13064	    if (reg_referenced_p (reg, PATTERN (insn)))
13065	      place = insn;
13066	    break;
13067	  }
13068	else if (GET_CODE (insn) == CALL_INSN
13069		 && find_reg_fusage (insn, USE, reg))
13070	  {
13071	    place = insn;
13072	    break;
13073	  }
13074	else if (INSN_P (insn) && reg_set_p (reg, insn))
13075	  break;
13076
13077      /* If we found a place to put the link, place it there unless there
13078	 is already a link to the same insn as LINK at that point.  */
13079
13080      if (place)
13081	{
13082	  rtx link2;
13083
13084	  for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
13085	    if (XEXP (link2, 0) == XEXP (link, 0))
13086	      break;
13087
13088	  if (link2 == 0)
13089	    {
13090	      XEXP (link, 1) = LOG_LINKS (place);
13091	      LOG_LINKS (place) = link;
13092
13093	      /* Set added_links_insn to the earliest insn we added a
13094		 link to.  */
13095	      if (added_links_insn == 0
13096		  || INSN_CUID (added_links_insn) > INSN_CUID (place))
13097		added_links_insn = place;
13098	    }
13099	}
13100    }
13101}
13102
13103/* Compute INSN_CUID for INSN, which is an insn made by combine.  */
13104
13105static int
13106insn_cuid (rtx insn)
13107{
13108  while (insn != 0 && INSN_UID (insn) > max_uid_cuid
13109	 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
13110    insn = NEXT_INSN (insn);
13111
13112  if (INSN_UID (insn) > max_uid_cuid)
13113    abort ();
13114
13115  return INSN_CUID (insn);
13116}
13117
13118void
13119dump_combine_stats (FILE *file)
13120{
13121  fnotice
13122    (file,
13123     ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
13124     combine_attempts, combine_merges, combine_extras, combine_successes);
13125}
13126
13127void
13128dump_combine_total_stats (FILE *file)
13129{
13130  fnotice
13131    (file,
13132     "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
13133     total_attempts, total_merges, total_extras, total_successes);
13134}
13135