combine.c revision 102780
1/* Optimize by combining instructions for GNU compiler.
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23   Portable Optimizer, but redone to work on our list-structured
24   representation for RTL instead of their string representation.
25
26   The LOG_LINKS of each insn identify the most recent assignment
27   to each REG used in the insn.  It is a list of previous insns,
28   each of which contains a SET for a REG that is used in this insn
29   and not used or set in between.  LOG_LINKs never cross basic blocks.
30   They were set up by the preceding pass (lifetime analysis).
31
32   We try to combine each pair of insns joined by a logical link.
33   We also try to combine triples of insns A, B and C when
34   C has a link back to B and B has a link back to A.
35
36   LOG_LINKS does not have links for use of the CC0.  They don't
37   need to, because the insn that sets the CC0 is always immediately
38   before the insn that tests it.  So we always regard a branch
39   insn as having a logical link to the preceding insn.  The same is true
40   for an insn explicitly using CC0.
41
42   We check (with use_crosses_set_p) to avoid combining in such a way
43   as to move a computation to a place where its value would be different.
44
45   Combination is done by mathematically substituting the previous
46   insn(s) values for the regs they set into the expressions in
47   the later insns that refer to these regs.  If the result is a valid insn
48   for our target machine, according to the machine description,
49   we install it, delete the earlier insns, and update the data flow
50   information (LOG_LINKS and REG_NOTES) for what we did.
51
52   There are a few exceptions where the dataflow information created by
53   flow.c aren't completely updated:
54
55   - reg_live_length is not updated
56   - reg_n_refs is not adjusted in the rare case when a register is
57     no longer required in a computation
58   - there are extremely rare cases (see distribute_regnotes) when a
59     REG_DEAD note is lost
60   - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61     removed because there is no way to know which register it was
62     linking
63
64   To simplify substitution, we combine only when the earlier insn(s)
65   consist of only a single assignment.  To simplify updating afterward,
66   we never combine when a subroutine call appears in the middle.
67
68   Since we do not represent assignments to CC0 explicitly except when that
69   is all an insn does, there is no LOG_LINKS entry in an insn that uses
70   the condition code for the insn that set the condition code.
71   Fortunately, these two insns must be consecutive.
72   Therefore, every JUMP_INSN is taken to have an implicit logical link
73   to the preceding insn.  This is not quite right, since non-jumps can
74   also use the condition code; but in practice such insns would not
75   combine anyway.  */
76
77#include "config.h"
78#include "system.h"
79#include "rtl.h"
80#include "tm_p.h"
81#include "flags.h"
82#include "regs.h"
83#include "hard-reg-set.h"
84#include "basic-block.h"
85#include "insn-config.h"
86#include "function.h"
87/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
88#include "expr.h"
89#include "insn-attr.h"
90#include "recog.h"
91#include "real.h"
92#include "toplev.h"
93
94/* It is not safe to use ordinary gen_lowpart in combine.
95   Use gen_lowpart_for_combine instead.  See comments there.  */
96#define gen_lowpart dont_use_gen_lowpart_you_dummy
97
98/* Number of attempts to combine instructions in this function.  */
99
100static int combine_attempts;
101
102/* Number of attempts that got as far as substitution in this function.  */
103
104static int combine_merges;
105
106/* Number of instructions combined with added SETs in this function.  */
107
108static int combine_extras;
109
110/* Number of instructions combined in this function.  */
111
112static int combine_successes;
113
114/* Totals over entire compilation.  */
115
116static int total_attempts, total_merges, total_extras, total_successes;
117
118
119/* Vector mapping INSN_UIDs to cuids.
120   The cuids are like uids but increase monotonically always.
121   Combine always uses cuids so that it can compare them.
122   But actually renumbering the uids, which we used to do,
123   proves to be a bad idea because it makes it hard to compare
124   the dumps produced by earlier passes with those from later passes.  */
125
126static int *uid_cuid;
127static int max_uid_cuid;
128
129/* Get the cuid of an insn.  */
130
131#define INSN_CUID(INSN) \
132(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
133
134/* In case BITS_PER_WORD == HOST_BITS_PER_WIDE_INT, shifting by
135   BITS_PER_WORD would invoke undefined behavior.  Work around it.  */
136
137#define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \
138  (((unsigned HOST_WIDE_INT) (val) << (BITS_PER_WORD - 1)) << 1)
139
140/* Maximum register number, which is the size of the tables below.  */
141
142static unsigned int combine_max_regno;
143
144/* Record last point of death of (hard or pseudo) register n.  */
145
146static rtx *reg_last_death;
147
148/* Record last point of modification of (hard or pseudo) register n.  */
149
150static rtx *reg_last_set;
151
152/* Record the cuid of the last insn that invalidated memory
153   (anything that writes memory, and subroutine calls, but not pushes).  */
154
155static int mem_last_set;
156
157/* Record the cuid of the last CALL_INSN
158   so we can tell whether a potential combination crosses any calls.  */
159
160static int last_call_cuid;
161
162/* When `subst' is called, this is the insn that is being modified
163   (by combining in a previous insn).  The PATTERN of this insn
164   is still the old pattern partially modified and it should not be
165   looked at, but this may be used to examine the successors of the insn
166   to judge whether a simplification is valid.  */
167
168static rtx subst_insn;
169
170/* This is an insn that belongs before subst_insn, but is not currently
171   on the insn chain.  */
172
173static rtx subst_prev_insn;
174
175/* This is the lowest CUID that `subst' is currently dealing with.
176   get_last_value will not return a value if the register was set at or
177   after this CUID.  If not for this mechanism, we could get confused if
178   I2 or I1 in try_combine were an insn that used the old value of a register
179   to obtain a new value.  In that case, we might erroneously get the
180   new value of the register when we wanted the old one.  */
181
182static int subst_low_cuid;
183
184/* This contains any hard registers that are used in newpat; reg_dead_at_p
185   must consider all these registers to be always live.  */
186
187static HARD_REG_SET newpat_used_regs;
188
189/* This is an insn to which a LOG_LINKS entry has been added.  If this
190   insn is the earlier than I2 or I3, combine should rescan starting at
191   that location.  */
192
193static rtx added_links_insn;
194
195/* Basic block number of the block in which we are performing combines.  */
196static int this_basic_block;
197
198/* A bitmap indicating which blocks had registers go dead at entry.
199   After combine, we'll need to re-do global life analysis with
200   those blocks as starting points.  */
201static sbitmap refresh_blocks;
202static int need_refresh;
203
204/* The next group of arrays allows the recording of the last value assigned
205   to (hard or pseudo) register n.  We use this information to see if a
206   operation being processed is redundant given a prior operation performed
207   on the register.  For example, an `and' with a constant is redundant if
208   all the zero bits are already known to be turned off.
209
210   We use an approach similar to that used by cse, but change it in the
211   following ways:
212
213   (1) We do not want to reinitialize at each label.
214   (2) It is useful, but not critical, to know the actual value assigned
215       to a register.  Often just its form is helpful.
216
217   Therefore, we maintain the following arrays:
218
219   reg_last_set_value		the last value assigned
220   reg_last_set_label		records the value of label_tick when the
221				register was assigned
222   reg_last_set_table_tick	records the value of label_tick when a
223				value using the register is assigned
224   reg_last_set_invalid		set to non-zero when it is not valid
225				to use the value of this register in some
226				register's value
227
228   To understand the usage of these tables, it is important to understand
229   the distinction between the value in reg_last_set_value being valid
230   and the register being validly contained in some other expression in the
231   table.
232
233   Entry I in reg_last_set_value is valid if it is non-zero, and either
234   reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
235
236   Register I may validly appear in any expression returned for the value
237   of another register if reg_n_sets[i] is 1.  It may also appear in the
238   value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
239   reg_last_set_invalid[j] is zero.
240
241   If an expression is found in the table containing a register which may
242   not validly appear in an expression, the register is replaced by
243   something that won't match, (clobber (const_int 0)).
244
245   reg_last_set_invalid[i] is set non-zero when register I is being assigned
246   to and reg_last_set_table_tick[i] == label_tick.  */
247
248/* Record last value assigned to (hard or pseudo) register n.  */
249
250static rtx *reg_last_set_value;
251
252/* Record the value of label_tick when the value for register n is placed in
253   reg_last_set_value[n].  */
254
255static int *reg_last_set_label;
256
257/* Record the value of label_tick when an expression involving register n
258   is placed in reg_last_set_value.  */
259
260static int *reg_last_set_table_tick;
261
262/* Set non-zero if references to register n in expressions should not be
263   used.  */
264
265static char *reg_last_set_invalid;
266
267/* Incremented for each label.  */
268
269static int label_tick;
270
271/* Some registers that are set more than once and used in more than one
272   basic block are nevertheless always set in similar ways.  For example,
273   a QImode register may be loaded from memory in two places on a machine
274   where byte loads zero extend.
275
276   We record in the following array what we know about the nonzero
277   bits of a register, specifically which bits are known to be zero.
278
279   If an entry is zero, it means that we don't know anything special.  */
280
281static unsigned HOST_WIDE_INT *reg_nonzero_bits;
282
283/* Mode used to compute significance in reg_nonzero_bits.  It is the largest
284   integer mode that can fit in HOST_BITS_PER_WIDE_INT.  */
285
286static enum machine_mode nonzero_bits_mode;
287
288/* Nonzero if we know that a register has some leading bits that are always
289   equal to the sign bit.  */
290
291static unsigned char *reg_sign_bit_copies;
292
293/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
294   It is zero while computing them and after combine has completed.  This
295   former test prevents propagating values based on previously set values,
296   which can be incorrect if a variable is modified in a loop.  */
297
298static int nonzero_sign_valid;
299
300/* These arrays are maintained in parallel with reg_last_set_value
301   and are used to store the mode in which the register was last set,
302   the bits that were known to be zero when it was last set, and the
303   number of sign bits copies it was known to have when it was last set.  */
304
305static enum machine_mode *reg_last_set_mode;
306static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
307static char *reg_last_set_sign_bit_copies;
308
309/* Record one modification to rtl structure
310   to be undone by storing old_contents into *where.
311   is_int is 1 if the contents are an int.  */
312
313struct undo
314{
315  struct undo *next;
316  int is_int;
317  union {rtx r; unsigned int i;} old_contents;
318  union {rtx *r; unsigned int *i;} where;
319};
320
321/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
322   num_undo says how many are currently recorded.
323
324   other_insn is nonzero if we have modified some other insn in the process
325   of working on subst_insn.  It must be verified too.  */
326
327struct undobuf
328{
329  struct undo *undos;
330  struct undo *frees;
331  rtx other_insn;
332};
333
334static struct undobuf undobuf;
335
336/* Number of times the pseudo being substituted for
337   was found and replaced.  */
338
339static int n_occurrences;
340
341static void do_SUBST			PARAMS ((rtx *, rtx));
342static void do_SUBST_INT		PARAMS ((unsigned int *,
343						 unsigned int));
344static void init_reg_last_arrays	PARAMS ((void));
345static void setup_incoming_promotions   PARAMS ((void));
346static void set_nonzero_bits_and_sign_copies  PARAMS ((rtx, rtx, void *));
347static int cant_combine_insn_p	PARAMS ((rtx));
348static int can_combine_p	PARAMS ((rtx, rtx, rtx, rtx, rtx *, rtx *));
349static int sets_function_arg_p	PARAMS ((rtx));
350static int combinable_i3pat	PARAMS ((rtx, rtx *, rtx, rtx, int, rtx *));
351static int contains_muldiv	PARAMS ((rtx));
352static rtx try_combine		PARAMS ((rtx, rtx, rtx, int *));
353static void undo_all		PARAMS ((void));
354static void undo_commit		PARAMS ((void));
355static rtx *find_split_point	PARAMS ((rtx *, rtx));
356static rtx subst		PARAMS ((rtx, rtx, rtx, int, int));
357static rtx combine_simplify_rtx	PARAMS ((rtx, enum machine_mode, int, int));
358static rtx simplify_if_then_else  PARAMS ((rtx));
359static rtx simplify_set		PARAMS ((rtx));
360static rtx simplify_logical	PARAMS ((rtx, int));
361static rtx expand_compound_operation  PARAMS ((rtx));
362static rtx expand_field_assignment  PARAMS ((rtx));
363static rtx make_extraction	PARAMS ((enum machine_mode, rtx, HOST_WIDE_INT,
364					 rtx, unsigned HOST_WIDE_INT, int,
365					 int, int));
366static rtx extract_left_shift	PARAMS ((rtx, int));
367static rtx make_compound_operation  PARAMS ((rtx, enum rtx_code));
368static int get_pos_from_mask	PARAMS ((unsigned HOST_WIDE_INT,
369					 unsigned HOST_WIDE_INT *));
370static rtx force_to_mode	PARAMS ((rtx, enum machine_mode,
371					 unsigned HOST_WIDE_INT, rtx, int));
372static rtx if_then_else_cond	PARAMS ((rtx, rtx *, rtx *));
373static rtx known_cond		PARAMS ((rtx, enum rtx_code, rtx, rtx));
374static int rtx_equal_for_field_assignment_p PARAMS ((rtx, rtx));
375static rtx make_field_assignment  PARAMS ((rtx));
376static rtx apply_distributive_law  PARAMS ((rtx));
377static rtx simplify_and_const_int  PARAMS ((rtx, enum machine_mode, rtx,
378					    unsigned HOST_WIDE_INT));
379static unsigned HOST_WIDE_INT nonzero_bits  PARAMS ((rtx, enum machine_mode));
380static unsigned int num_sign_bit_copies  PARAMS ((rtx, enum machine_mode));
381static int merge_outer_ops	PARAMS ((enum rtx_code *, HOST_WIDE_INT *,
382					 enum rtx_code, HOST_WIDE_INT,
383					 enum machine_mode, int *));
384static rtx simplify_shift_const	PARAMS ((rtx, enum rtx_code, enum machine_mode,
385					 rtx, int));
386static int recog_for_combine	PARAMS ((rtx *, rtx, rtx *));
387static rtx gen_lowpart_for_combine  PARAMS ((enum machine_mode, rtx));
388static rtx gen_binary		PARAMS ((enum rtx_code, enum machine_mode,
389					 rtx, rtx));
390static enum rtx_code simplify_comparison  PARAMS ((enum rtx_code, rtx *, rtx *));
391static void update_table_tick	PARAMS ((rtx));
392static void record_value_for_reg  PARAMS ((rtx, rtx, rtx));
393static void check_promoted_subreg PARAMS ((rtx, rtx));
394static void record_dead_and_set_regs_1  PARAMS ((rtx, rtx, void *));
395static void record_dead_and_set_regs  PARAMS ((rtx));
396static int get_last_value_validate  PARAMS ((rtx *, rtx, int, int));
397static rtx get_last_value	PARAMS ((rtx));
398static int use_crosses_set_p	PARAMS ((rtx, int));
399static void reg_dead_at_p_1	PARAMS ((rtx, rtx, void *));
400static int reg_dead_at_p	PARAMS ((rtx, rtx));
401static void move_deaths		PARAMS ((rtx, rtx, int, rtx, rtx *));
402static int reg_bitfield_target_p  PARAMS ((rtx, rtx));
403static void distribute_notes	PARAMS ((rtx, rtx, rtx, rtx, rtx, rtx));
404static void distribute_links	PARAMS ((rtx));
405static void mark_used_regs_combine PARAMS ((rtx));
406static int insn_cuid		PARAMS ((rtx));
407static void record_promoted_value PARAMS ((rtx, rtx));
408static rtx reversed_comparison  PARAMS ((rtx, enum machine_mode, rtx, rtx));
409static enum rtx_code combine_reversed_comparison_code PARAMS ((rtx));
410
411/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
412   insn.  The substitution can be undone by undo_all.  If INTO is already
413   set to NEWVAL, do not record this change.  Because computing NEWVAL might
414   also call SUBST, we have to compute it before we put anything into
415   the undo table.  */
416
417static void
418do_SUBST (into, newval)
419     rtx *into, newval;
420{
421  struct undo *buf;
422  rtx oldval = *into;
423
424  if (oldval == newval)
425    return;
426
427  /* We'd like to catch as many invalid transformations here as
428     possible.  Unfortunately, there are way too many mode changes
429     that are perfectly valid, so we'd waste too much effort for
430     little gain doing the checks here.  Focus on catching invalid
431     transformations involving integer constants.  */
432  if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
433      && GET_CODE (newval) == CONST_INT)
434    {
435      /* Sanity check that we're replacing oldval with a CONST_INT
436	 that is a valid sign-extension for the original mode.  */
437      if (INTVAL (newval) != trunc_int_for_mode (INTVAL (newval),
438						 GET_MODE (oldval)))
439	abort ();
440
441      /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
442	 CONST_INT is not valid, because after the replacement, the
443	 original mode would be gone.  Unfortunately, we can't tell
444	 when do_SUBST is called to replace the operand thereof, so we
445	 perform this test on oldval instead, checking whether an
446	 invalid replacement took place before we got here.  */
447      if ((GET_CODE (oldval) == SUBREG
448	   && GET_CODE (SUBREG_REG (oldval)) == CONST_INT)
449	  || (GET_CODE (oldval) == ZERO_EXTEND
450	      && GET_CODE (XEXP (oldval, 0)) == CONST_INT))
451	abort ();
452     }
453
454  if (undobuf.frees)
455    buf = undobuf.frees, undobuf.frees = buf->next;
456  else
457    buf = (struct undo *) xmalloc (sizeof (struct undo));
458
459  buf->is_int = 0;
460  buf->where.r = into;
461  buf->old_contents.r = oldval;
462  *into = newval;
463
464  buf->next = undobuf.undos, undobuf.undos = buf;
465}
466
467#define SUBST(INTO, NEWVAL)	do_SUBST(&(INTO), (NEWVAL))
468
469/* Similar to SUBST, but NEWVAL is an int expression.  Note that substitution
470   for the value of a HOST_WIDE_INT value (including CONST_INT) is
471   not safe.  */
472
473static void
474do_SUBST_INT (into, newval)
475     unsigned int *into, newval;
476{
477  struct undo *buf;
478  unsigned int oldval = *into;
479
480  if (oldval == newval)
481    return;
482
483  if (undobuf.frees)
484    buf = undobuf.frees, undobuf.frees = buf->next;
485  else
486    buf = (struct undo *) xmalloc (sizeof (struct undo));
487
488  buf->is_int = 1;
489  buf->where.i = into;
490  buf->old_contents.i = oldval;
491  *into = newval;
492
493  buf->next = undobuf.undos, undobuf.undos = buf;
494}
495
496#define SUBST_INT(INTO, NEWVAL)  do_SUBST_INT(&(INTO), (NEWVAL))
497
498/* Main entry point for combiner.  F is the first insn of the function.
499   NREGS is the first unused pseudo-reg number.
500
501   Return non-zero if the combiner has turned an indirect jump
502   instruction into a direct jump.  */
503int
504combine_instructions (f, nregs)
505     rtx f;
506     unsigned int nregs;
507{
508  rtx insn, next;
509#ifdef HAVE_cc0
510  rtx prev;
511#endif
512  int i;
513  rtx links, nextlinks;
514
515  int new_direct_jump_p = 0;
516
517  combine_attempts = 0;
518  combine_merges = 0;
519  combine_extras = 0;
520  combine_successes = 0;
521
522  combine_max_regno = nregs;
523
524  reg_nonzero_bits = ((unsigned HOST_WIDE_INT *)
525		      xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT)));
526  reg_sign_bit_copies
527    = (unsigned char *) xcalloc (nregs, sizeof (unsigned char));
528
529  reg_last_death = (rtx *) xmalloc (nregs * sizeof (rtx));
530  reg_last_set = (rtx *) xmalloc (nregs * sizeof (rtx));
531  reg_last_set_value = (rtx *) xmalloc (nregs * sizeof (rtx));
532  reg_last_set_table_tick = (int *) xmalloc (nregs * sizeof (int));
533  reg_last_set_label = (int *) xmalloc (nregs * sizeof (int));
534  reg_last_set_invalid = (char *) xmalloc (nregs * sizeof (char));
535  reg_last_set_mode
536    = (enum machine_mode *) xmalloc (nregs * sizeof (enum machine_mode));
537  reg_last_set_nonzero_bits
538    = (unsigned HOST_WIDE_INT *) xmalloc (nregs * sizeof (HOST_WIDE_INT));
539  reg_last_set_sign_bit_copies
540    = (char *) xmalloc (nregs * sizeof (char));
541
542  init_reg_last_arrays ();
543
544  init_recog_no_volatile ();
545
546  /* Compute maximum uid value so uid_cuid can be allocated.  */
547
548  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
549    if (INSN_UID (insn) > i)
550      i = INSN_UID (insn);
551
552  uid_cuid = (int *) xmalloc ((i + 1) * sizeof (int));
553  max_uid_cuid = i;
554
555  nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
556
557  /* Don't use reg_nonzero_bits when computing it.  This can cause problems
558     when, for example, we have j <<= 1 in a loop.  */
559
560  nonzero_sign_valid = 0;
561
562  /* Compute the mapping from uids to cuids.
563     Cuids are numbers assigned to insns, like uids,
564     except that cuids increase monotonically through the code.
565
566     Scan all SETs and see if we can deduce anything about what
567     bits are known to be zero for some registers and how many copies
568     of the sign bit are known to exist for those registers.
569
570     Also set any known values so that we can use it while searching
571     for what bits are known to be set.  */
572
573  label_tick = 1;
574
575  /* We need to initialize it here, because record_dead_and_set_regs may call
576     get_last_value.  */
577  subst_prev_insn = NULL_RTX;
578
579  setup_incoming_promotions ();
580
581  refresh_blocks = sbitmap_alloc (n_basic_blocks);
582  sbitmap_zero (refresh_blocks);
583  need_refresh = 0;
584
585  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
586    {
587      uid_cuid[INSN_UID (insn)] = ++i;
588      subst_low_cuid = i;
589      subst_insn = insn;
590
591      if (INSN_P (insn))
592	{
593	  note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
594		       NULL);
595	  record_dead_and_set_regs (insn);
596
597#ifdef AUTO_INC_DEC
598	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
599	    if (REG_NOTE_KIND (links) == REG_INC)
600	      set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
601						NULL);
602#endif
603	}
604
605      if (GET_CODE (insn) == CODE_LABEL)
606	label_tick++;
607    }
608
609  nonzero_sign_valid = 1;
610
611  /* Now scan all the insns in forward order.  */
612
613  this_basic_block = -1;
614  label_tick = 1;
615  last_call_cuid = 0;
616  mem_last_set = 0;
617  init_reg_last_arrays ();
618  setup_incoming_promotions ();
619
620  for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
621    {
622      next = 0;
623
624      /* If INSN starts a new basic block, update our basic block number.  */
625      if (this_basic_block + 1 < n_basic_blocks
626	  && BLOCK_HEAD (this_basic_block + 1) == insn)
627	this_basic_block++;
628
629      if (GET_CODE (insn) == CODE_LABEL)
630	label_tick++;
631
632      else if (INSN_P (insn))
633	{
634	  /* See if we know about function return values before this
635	     insn based upon SUBREG flags.  */
636	  check_promoted_subreg (insn, PATTERN (insn));
637
638	  /* Try this insn with each insn it links back to.  */
639
640	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
641	    if ((next = try_combine (insn, XEXP (links, 0),
642				     NULL_RTX, &new_direct_jump_p)) != 0)
643	      goto retry;
644
645	  /* Try each sequence of three linked insns ending with this one.  */
646
647	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
648	    {
649	      rtx link = XEXP (links, 0);
650
651	      /* If the linked insn has been replaced by a note, then there
652		 is no point in pursuing this chain any further.  */
653	      if (GET_CODE (link) == NOTE)
654		continue;
655
656	      for (nextlinks = LOG_LINKS (link);
657		   nextlinks;
658		   nextlinks = XEXP (nextlinks, 1))
659		if ((next = try_combine (insn, link,
660					 XEXP (nextlinks, 0),
661					 &new_direct_jump_p)) != 0)
662		  goto retry;
663	    }
664
665#ifdef HAVE_cc0
666	  /* Try to combine a jump insn that uses CC0
667	     with a preceding insn that sets CC0, and maybe with its
668	     logical predecessor as well.
669	     This is how we make decrement-and-branch insns.
670	     We need this special code because data flow connections
671	     via CC0 do not get entered in LOG_LINKS.  */
672
673	  if (GET_CODE (insn) == JUMP_INSN
674	      && (prev = prev_nonnote_insn (insn)) != 0
675	      && GET_CODE (prev) == INSN
676	      && sets_cc0_p (PATTERN (prev)))
677	    {
678	      if ((next = try_combine (insn, prev,
679				       NULL_RTX, &new_direct_jump_p)) != 0)
680		goto retry;
681
682	      for (nextlinks = LOG_LINKS (prev); nextlinks;
683		   nextlinks = XEXP (nextlinks, 1))
684		if ((next = try_combine (insn, prev,
685					 XEXP (nextlinks, 0),
686					 &new_direct_jump_p)) != 0)
687		  goto retry;
688	    }
689
690	  /* Do the same for an insn that explicitly references CC0.  */
691	  if (GET_CODE (insn) == INSN
692	      && (prev = prev_nonnote_insn (insn)) != 0
693	      && GET_CODE (prev) == INSN
694	      && sets_cc0_p (PATTERN (prev))
695	      && GET_CODE (PATTERN (insn)) == SET
696	      && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
697	    {
698	      if ((next = try_combine (insn, prev,
699				       NULL_RTX, &new_direct_jump_p)) != 0)
700		goto retry;
701
702	      for (nextlinks = LOG_LINKS (prev); nextlinks;
703		   nextlinks = XEXP (nextlinks, 1))
704		if ((next = try_combine (insn, prev,
705					 XEXP (nextlinks, 0),
706					 &new_direct_jump_p)) != 0)
707		  goto retry;
708	    }
709
710	  /* Finally, see if any of the insns that this insn links to
711	     explicitly references CC0.  If so, try this insn, that insn,
712	     and its predecessor if it sets CC0.  */
713	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
714	    if (GET_CODE (XEXP (links, 0)) == INSN
715		&& GET_CODE (PATTERN (XEXP (links, 0))) == SET
716		&& reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
717		&& (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
718		&& GET_CODE (prev) == INSN
719		&& sets_cc0_p (PATTERN (prev))
720		&& (next = try_combine (insn, XEXP (links, 0),
721					prev, &new_direct_jump_p)) != 0)
722	      goto retry;
723#endif
724
725	  /* Try combining an insn with two different insns whose results it
726	     uses.  */
727	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
728	    for (nextlinks = XEXP (links, 1); nextlinks;
729		 nextlinks = XEXP (nextlinks, 1))
730	      if ((next = try_combine (insn, XEXP (links, 0),
731				       XEXP (nextlinks, 0),
732				       &new_direct_jump_p)) != 0)
733		goto retry;
734
735	  if (GET_CODE (insn) != NOTE)
736	    record_dead_and_set_regs (insn);
737
738	retry:
739	  ;
740	}
741    }
742
743  delete_noop_moves (f);
744
745  if (need_refresh)
746    {
747      update_life_info (refresh_blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
748			PROP_DEATH_NOTES);
749    }
750
751  /* Clean up.  */
752  sbitmap_free (refresh_blocks);
753  free (reg_nonzero_bits);
754  free (reg_sign_bit_copies);
755  free (reg_last_death);
756  free (reg_last_set);
757  free (reg_last_set_value);
758  free (reg_last_set_table_tick);
759  free (reg_last_set_label);
760  free (reg_last_set_invalid);
761  free (reg_last_set_mode);
762  free (reg_last_set_nonzero_bits);
763  free (reg_last_set_sign_bit_copies);
764  free (uid_cuid);
765
766  {
767    struct undo *undo, *next;
768    for (undo = undobuf.frees; undo; undo = next)
769      {
770	next = undo->next;
771	free (undo);
772      }
773    undobuf.frees = 0;
774  }
775
776  total_attempts += combine_attempts;
777  total_merges += combine_merges;
778  total_extras += combine_extras;
779  total_successes += combine_successes;
780
781  nonzero_sign_valid = 0;
782
783  /* Make recognizer allow volatile MEMs again.  */
784  init_recog ();
785
786  return new_direct_jump_p;
787}
788
789/* Wipe the reg_last_xxx arrays in preparation for another pass.  */
790
791static void
792init_reg_last_arrays ()
793{
794  unsigned int nregs = combine_max_regno;
795
796  memset ((char *) reg_last_death, 0, nregs * sizeof (rtx));
797  memset ((char *) reg_last_set, 0, nregs * sizeof (rtx));
798  memset ((char *) reg_last_set_value, 0, nregs * sizeof (rtx));
799  memset ((char *) reg_last_set_table_tick, 0, nregs * sizeof (int));
800  memset ((char *) reg_last_set_label, 0, nregs * sizeof (int));
801  memset (reg_last_set_invalid, 0, nregs * sizeof (char));
802  memset ((char *) reg_last_set_mode, 0, nregs * sizeof (enum machine_mode));
803  memset ((char *) reg_last_set_nonzero_bits, 0, nregs * sizeof (HOST_WIDE_INT));
804  memset (reg_last_set_sign_bit_copies, 0, nregs * sizeof (char));
805}
806
807/* Set up any promoted values for incoming argument registers.  */
808
809static void
810setup_incoming_promotions ()
811{
812#ifdef PROMOTE_FUNCTION_ARGS
813  unsigned int regno;
814  rtx reg;
815  enum machine_mode mode;
816  int unsignedp;
817  rtx first = get_insns ();
818
819#ifndef OUTGOING_REGNO
820#define OUTGOING_REGNO(N) N
821#endif
822  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
823    /* Check whether this register can hold an incoming pointer
824       argument.  FUNCTION_ARG_REGNO_P tests outgoing register
825       numbers, so translate if necessary due to register windows.  */
826    if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
827	&& (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
828      {
829	record_value_for_reg
830	  (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
831				       : SIGN_EXTEND),
832				      GET_MODE (reg),
833				      gen_rtx_CLOBBER (mode, const0_rtx)));
834      }
835#endif
836}
837
838/* Called via note_stores.  If X is a pseudo that is narrower than
839   HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
840
841   If we are setting only a portion of X and we can't figure out what
842   portion, assume all bits will be used since we don't know what will
843   be happening.
844
845   Similarly, set how many bits of X are known to be copies of the sign bit
846   at all locations in the function.  This is the smallest number implied
847   by any set of X.  */
848
849static void
850set_nonzero_bits_and_sign_copies (x, set, data)
851     rtx x;
852     rtx set;
853     void *data ATTRIBUTE_UNUSED;
854{
855  unsigned int num;
856
857  if (GET_CODE (x) == REG
858      && REGNO (x) >= FIRST_PSEUDO_REGISTER
859      /* If this register is undefined at the start of the file, we can't
860	 say what its contents were.  */
861      && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, REGNO (x))
862      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
863    {
864      if (set == 0 || GET_CODE (set) == CLOBBER)
865	{
866	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
867	  reg_sign_bit_copies[REGNO (x)] = 1;
868	  return;
869	}
870
871      /* If this is a complex assignment, see if we can convert it into a
872	 simple assignment.  */
873      set = expand_field_assignment (set);
874
875      /* If this is a simple assignment, or we have a paradoxical SUBREG,
876	 set what we know about X.  */
877
878      if (SET_DEST (set) == x
879	  || (GET_CODE (SET_DEST (set)) == SUBREG
880	      && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
881		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
882	      && SUBREG_REG (SET_DEST (set)) == x))
883	{
884	  rtx src = SET_SRC (set);
885
886#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
887	  /* If X is narrower than a word and SRC is a non-negative
888	     constant that would appear negative in the mode of X,
889	     sign-extend it for use in reg_nonzero_bits because some
890	     machines (maybe most) will actually do the sign-extension
891	     and this is the conservative approach.
892
893	     ??? For 2.5, try to tighten up the MD files in this regard
894	     instead of this kludge.  */
895
896	  if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
897	      && GET_CODE (src) == CONST_INT
898	      && INTVAL (src) > 0
899	      && 0 != (INTVAL (src)
900		       & ((HOST_WIDE_INT) 1
901			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
902	    src = GEN_INT (INTVAL (src)
903			   | ((HOST_WIDE_INT) (-1)
904			      << GET_MODE_BITSIZE (GET_MODE (x))));
905#endif
906
907	  /* Don't call nonzero_bits if it cannot change anything.  */
908	  if (reg_nonzero_bits[REGNO (x)] != ~(unsigned HOST_WIDE_INT) 0)
909	    reg_nonzero_bits[REGNO (x)]
910	      |= nonzero_bits (src, nonzero_bits_mode);
911	  num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
912	  if (reg_sign_bit_copies[REGNO (x)] == 0
913	      || reg_sign_bit_copies[REGNO (x)] > num)
914	    reg_sign_bit_copies[REGNO (x)] = num;
915	}
916      else
917	{
918	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
919	  reg_sign_bit_copies[REGNO (x)] = 1;
920	}
921    }
922}
923
924/* See if INSN can be combined into I3.  PRED and SUCC are optionally
925   insns that were previously combined into I3 or that will be combined
926   into the merger of INSN and I3.
927
928   Return 0 if the combination is not allowed for any reason.
929
930   If the combination is allowed, *PDEST will be set to the single
931   destination of INSN and *PSRC to the single source, and this function
932   will return 1.  */
933
934static int
935can_combine_p (insn, i3, pred, succ, pdest, psrc)
936     rtx insn;
937     rtx i3;
938     rtx pred ATTRIBUTE_UNUSED;
939     rtx succ;
940     rtx *pdest, *psrc;
941{
942  int i;
943  rtx set = 0, src, dest;
944  rtx p;
945#ifdef AUTO_INC_DEC
946  rtx link;
947#endif
948  int all_adjacent = (succ ? (next_active_insn (insn) == succ
949			      && next_active_insn (succ) == i3)
950		      : next_active_insn (insn) == i3);
951
952  /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
953     or a PARALLEL consisting of such a SET and CLOBBERs.
954
955     If INSN has CLOBBER parallel parts, ignore them for our processing.
956     By definition, these happen during the execution of the insn.  When it
957     is merged with another insn, all bets are off.  If they are, in fact,
958     needed and aren't also supplied in I3, they may be added by
959     recog_for_combine.  Otherwise, it won't match.
960
961     We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
962     note.
963
964     Get the source and destination of INSN.  If more than one, can't
965     combine.  */
966
967  if (GET_CODE (PATTERN (insn)) == SET)
968    set = PATTERN (insn);
969  else if (GET_CODE (PATTERN (insn)) == PARALLEL
970	   && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
971    {
972      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
973	{
974	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
975
976	  switch (GET_CODE (elt))
977	    {
978	    /* This is important to combine floating point insns
979	       for the SH4 port.  */
980	    case USE:
981	      /* Combining an isolated USE doesn't make sense.
982		 We depend here on combinable_i3pat to reject them.  */
983	      /* The code below this loop only verifies that the inputs of
984		 the SET in INSN do not change.  We call reg_set_between_p
985		 to verify that the REG in the USE does not change between
986		 I3 and INSN.
987		 If the USE in INSN was for a pseudo register, the matching
988		 insn pattern will likely match any register; combining this
989		 with any other USE would only be safe if we knew that the
990		 used registers have identical values, or if there was
991		 something to tell them apart, e.g. different modes.  For
992		 now, we forgo such complicated tests and simply disallow
993		 combining of USES of pseudo registers with any other USE.  */
994	      if (GET_CODE (XEXP (elt, 0)) == REG
995		  && GET_CODE (PATTERN (i3)) == PARALLEL)
996		{
997		  rtx i3pat = PATTERN (i3);
998		  int i = XVECLEN (i3pat, 0) - 1;
999		  unsigned int regno = REGNO (XEXP (elt, 0));
1000
1001		  do
1002		    {
1003		      rtx i3elt = XVECEXP (i3pat, 0, i);
1004
1005		      if (GET_CODE (i3elt) == USE
1006			  && GET_CODE (XEXP (i3elt, 0)) == REG
1007			  && (REGNO (XEXP (i3elt, 0)) == regno
1008			      ? reg_set_between_p (XEXP (elt, 0),
1009						   PREV_INSN (insn), i3)
1010			      : regno >= FIRST_PSEUDO_REGISTER))
1011			return 0;
1012		    }
1013		  while (--i >= 0);
1014		}
1015	      break;
1016
1017	      /* We can ignore CLOBBERs.  */
1018	    case CLOBBER:
1019	      break;
1020
1021	    case SET:
1022	      /* Ignore SETs whose result isn't used but not those that
1023		 have side-effects.  */
1024	      if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1025		  && ! side_effects_p (elt))
1026		break;
1027
1028	      /* If we have already found a SET, this is a second one and
1029		 so we cannot combine with this insn.  */
1030	      if (set)
1031		return 0;
1032
1033	      set = elt;
1034	      break;
1035
1036	    default:
1037	      /* Anything else means we can't combine.  */
1038	      return 0;
1039	    }
1040	}
1041
1042      if (set == 0
1043	  /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1044	     so don't do anything with it.  */
1045	  || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1046	return 0;
1047    }
1048  else
1049    return 0;
1050
1051  if (set == 0)
1052    return 0;
1053
1054  set = expand_field_assignment (set);
1055  src = SET_SRC (set), dest = SET_DEST (set);
1056
1057  /* Don't eliminate a store in the stack pointer.  */
1058  if (dest == stack_pointer_rtx
1059      /* If we couldn't eliminate a field assignment, we can't combine.  */
1060      || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
1061      /* Don't combine with an insn that sets a register to itself if it has
1062	 a REG_EQUAL note.  This may be part of a REG_NO_CONFLICT sequence.  */
1063      || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1064      /* Can't merge an ASM_OPERANDS.  */
1065      || GET_CODE (src) == ASM_OPERANDS
1066      /* Can't merge a function call.  */
1067      || GET_CODE (src) == CALL
1068      /* Don't eliminate a function call argument.  */
1069      || (GET_CODE (i3) == CALL_INSN
1070	  && (find_reg_fusage (i3, USE, dest)
1071	      || (GET_CODE (dest) == REG
1072		  && REGNO (dest) < FIRST_PSEUDO_REGISTER
1073		  && global_regs[REGNO (dest)])))
1074      /* Don't substitute into an incremented register.  */
1075      || FIND_REG_INC_NOTE (i3, dest)
1076      || (succ && FIND_REG_INC_NOTE (succ, dest))
1077#if 0
1078      /* Don't combine the end of a libcall into anything.  */
1079      /* ??? This gives worse code, and appears to be unnecessary, since no
1080	 pass after flow uses REG_LIBCALL/REG_RETVAL notes.  Local-alloc does
1081	 use REG_RETVAL notes for noconflict blocks, but other code here
1082	 makes sure that those insns don't disappear.  */
1083      || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1084#endif
1085      /* Make sure that DEST is not used after SUCC but before I3.  */
1086      || (succ && ! all_adjacent
1087	  && reg_used_between_p (dest, succ, i3))
1088      /* Make sure that the value that is to be substituted for the register
1089	 does not use any registers whose values alter in between.  However,
1090	 If the insns are adjacent, a use can't cross a set even though we
1091	 think it might (this can happen for a sequence of insns each setting
1092	 the same destination; reg_last_set of that register might point to
1093	 a NOTE).  If INSN has a REG_EQUIV note, the register is always
1094	 equivalent to the memory so the substitution is valid even if there
1095	 are intervening stores.  Also, don't move a volatile asm or
1096	 UNSPEC_VOLATILE across any other insns.  */
1097      || (! all_adjacent
1098	  && (((GET_CODE (src) != MEM
1099		|| ! find_reg_note (insn, REG_EQUIV, src))
1100	       && use_crosses_set_p (src, INSN_CUID (insn)))
1101	      || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1102	      || GET_CODE (src) == UNSPEC_VOLATILE))
1103      /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1104	 better register allocation by not doing the combine.  */
1105      || find_reg_note (i3, REG_NO_CONFLICT, dest)
1106      || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1107      /* Don't combine across a CALL_INSN, because that would possibly
1108	 change whether the life span of some REGs crosses calls or not,
1109	 and it is a pain to update that information.
1110	 Exception: if source is a constant, moving it later can't hurt.
1111	 Accept that special case, because it helps -fforce-addr a lot.  */
1112      || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1113    return 0;
1114
1115  /* DEST must either be a REG or CC0.  */
1116  if (GET_CODE (dest) == REG)
1117    {
1118      /* If register alignment is being enforced for multi-word items in all
1119	 cases except for parameters, it is possible to have a register copy
1120	 insn referencing a hard register that is not allowed to contain the
1121	 mode being copied and which would not be valid as an operand of most
1122	 insns.  Eliminate this problem by not combining with such an insn.
1123
1124	 Also, on some machines we don't want to extend the life of a hard
1125	 register.  */
1126
1127      if (GET_CODE (src) == REG
1128	  && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1129	       && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1130	      /* Don't extend the life of a hard register unless it is
1131		 user variable (if we have few registers) or it can't
1132		 fit into the desired register (meaning something special
1133		 is going on).
1134		 Also avoid substituting a return register into I3, because
1135		 reload can't handle a conflict with constraints of other
1136		 inputs.  */
1137	      || (REGNO (src) < FIRST_PSEUDO_REGISTER
1138		  && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1139	return 0;
1140    }
1141  else if (GET_CODE (dest) != CC0)
1142    return 0;
1143
1144  /* Don't substitute for a register intended as a clobberable operand.
1145     Similarly, don't substitute an expression containing a register that
1146     will be clobbered in I3.  */
1147  if (GET_CODE (PATTERN (i3)) == PARALLEL)
1148    for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1149      if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
1150	  && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1151				       src)
1152	      || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
1153	return 0;
1154
1155  /* If INSN contains anything volatile, or is an `asm' (whether volatile
1156     or not), reject, unless nothing volatile comes between it and I3 */
1157
1158  if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1159    {
1160      /* Make sure succ doesn't contain a volatile reference.  */
1161      if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1162        return 0;
1163
1164      for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1165        if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1166	  return 0;
1167    }
1168
1169  /* If INSN is an asm, and DEST is a hard register, reject, since it has
1170     to be an explicit register variable, and was chosen for a reason.  */
1171
1172  if (GET_CODE (src) == ASM_OPERANDS
1173      && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1174    return 0;
1175
1176  /* If there are any volatile insns between INSN and I3, reject, because
1177     they might affect machine state.  */
1178
1179  for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1180    if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1181      return 0;
1182
1183  /* If INSN or I2 contains an autoincrement or autodecrement,
1184     make sure that register is not used between there and I3,
1185     and not already used in I3 either.
1186     Also insist that I3 not be a jump; if it were one
1187     and the incremented register were spilled, we would lose.  */
1188
1189#ifdef AUTO_INC_DEC
1190  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1191    if (REG_NOTE_KIND (link) == REG_INC
1192	&& (GET_CODE (i3) == JUMP_INSN
1193	    || reg_used_between_p (XEXP (link, 0), insn, i3)
1194	    || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1195      return 0;
1196#endif
1197
1198#ifdef HAVE_cc0
1199  /* Don't combine an insn that follows a CC0-setting insn.
1200     An insn that uses CC0 must not be separated from the one that sets it.
1201     We do, however, allow I2 to follow a CC0-setting insn if that insn
1202     is passed as I1; in that case it will be deleted also.
1203     We also allow combining in this case if all the insns are adjacent
1204     because that would leave the two CC0 insns adjacent as well.
1205     It would be more logical to test whether CC0 occurs inside I1 or I2,
1206     but that would be much slower, and this ought to be equivalent.  */
1207
1208  p = prev_nonnote_insn (insn);
1209  if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1210      && ! all_adjacent)
1211    return 0;
1212#endif
1213
1214  /* If we get here, we have passed all the tests and the combination is
1215     to be allowed.  */
1216
1217  *pdest = dest;
1218  *psrc = src;
1219
1220  return 1;
1221}
1222
1223/* Check if PAT is an insn - or a part of it - used to set up an
1224   argument for a function in a hard register.  */
1225
1226static int
1227sets_function_arg_p (pat)
1228     rtx pat;
1229{
1230  int i;
1231  rtx inner_dest;
1232
1233  switch (GET_CODE (pat))
1234    {
1235    case INSN:
1236      return sets_function_arg_p (PATTERN (pat));
1237
1238    case PARALLEL:
1239      for (i = XVECLEN (pat, 0); --i >= 0;)
1240	if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1241	  return 1;
1242
1243      break;
1244
1245    case SET:
1246      inner_dest = SET_DEST (pat);
1247      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1248	     || GET_CODE (inner_dest) == SUBREG
1249	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
1250	inner_dest = XEXP (inner_dest, 0);
1251
1252      return (GET_CODE (inner_dest) == REG
1253	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1254	      && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1255
1256    default:
1257      break;
1258    }
1259
1260  return 0;
1261}
1262
1263/* LOC is the location within I3 that contains its pattern or the component
1264   of a PARALLEL of the pattern.  We validate that it is valid for combining.
1265
1266   One problem is if I3 modifies its output, as opposed to replacing it
1267   entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1268   so would produce an insn that is not equivalent to the original insns.
1269
1270   Consider:
1271
1272         (set (reg:DI 101) (reg:DI 100))
1273	 (set (subreg:SI (reg:DI 101) 0) <foo>)
1274
1275   This is NOT equivalent to:
1276
1277         (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1278		    (set (reg:DI 101) (reg:DI 100))])
1279
1280   Not only does this modify 100 (in which case it might still be valid
1281   if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1282
1283   We can also run into a problem if I2 sets a register that I1
1284   uses and I1 gets directly substituted into I3 (not via I2).  In that
1285   case, we would be getting the wrong value of I2DEST into I3, so we
1286   must reject the combination.  This case occurs when I2 and I1 both
1287   feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1288   If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1289   of a SET must prevent combination from occurring.
1290
1291   Before doing the above check, we first try to expand a field assignment
1292   into a set of logical operations.
1293
1294   If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1295   we place a register that is both set and used within I3.  If more than one
1296   such register is detected, we fail.
1297
1298   Return 1 if the combination is valid, zero otherwise.  */
1299
1300static int
1301combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1302     rtx i3;
1303     rtx *loc;
1304     rtx i2dest;
1305     rtx i1dest;
1306     int i1_not_in_src;
1307     rtx *pi3dest_killed;
1308{
1309  rtx x = *loc;
1310
1311  if (GET_CODE (x) == SET)
1312    {
1313      rtx set = expand_field_assignment (x);
1314      rtx dest = SET_DEST (set);
1315      rtx src = SET_SRC (set);
1316      rtx inner_dest = dest;
1317
1318#if 0
1319      rtx inner_src = src;
1320#endif
1321
1322      SUBST (*loc, set);
1323
1324      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1325	     || GET_CODE (inner_dest) == SUBREG
1326	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
1327	inner_dest = XEXP (inner_dest, 0);
1328
1329  /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1330     was added.  */
1331#if 0
1332      while (GET_CODE (inner_src) == STRICT_LOW_PART
1333	     || GET_CODE (inner_src) == SUBREG
1334	     || GET_CODE (inner_src) == ZERO_EXTRACT)
1335	inner_src = XEXP (inner_src, 0);
1336
1337      /* If it is better that two different modes keep two different pseudos,
1338	 avoid combining them.  This avoids producing the following pattern
1339	 on a 386:
1340	  (set (subreg:SI (reg/v:QI 21) 0)
1341	       (lshiftrt:SI (reg/v:SI 20)
1342	           (const_int 24)))
1343	 If that were made, reload could not handle the pair of
1344	 reg 20/21, since it would try to get any GENERAL_REGS
1345	 but some of them don't handle QImode.  */
1346
1347      if (rtx_equal_p (inner_src, i2dest)
1348	  && GET_CODE (inner_dest) == REG
1349	  && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1350	return 0;
1351#endif
1352
1353      /* Check for the case where I3 modifies its output, as
1354	 discussed above.  */
1355      if ((inner_dest != dest
1356	   && (reg_overlap_mentioned_p (i2dest, inner_dest)
1357	       || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1358
1359	  /* This is the same test done in can_combine_p except we can't test
1360	     all_adjacent; we don't have to, since this instruction will stay
1361	     in place, thus we are not considering increasing the lifetime of
1362	     INNER_DEST.
1363
1364	     Also, if this insn sets a function argument, combining it with
1365	     something that might need a spill could clobber a previous
1366	     function argument; the all_adjacent test in can_combine_p also
1367	     checks this; here, we do a more specific test for this case.  */
1368
1369	  || (GET_CODE (inner_dest) == REG
1370	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1371	      && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1372					GET_MODE (inner_dest))))
1373	  || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1374	return 0;
1375
1376      /* If DEST is used in I3, it is being killed in this insn,
1377	 so record that for later.
1378	 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1379	 STACK_POINTER_REGNUM, since these are always considered to be
1380	 live.  Similarly for ARG_POINTER_REGNUM if it is fixed.  */
1381      if (pi3dest_killed && GET_CODE (dest) == REG
1382	  && reg_referenced_p (dest, PATTERN (i3))
1383	  && REGNO (dest) != FRAME_POINTER_REGNUM
1384#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1385	  && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1386#endif
1387#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1388	  && (REGNO (dest) != ARG_POINTER_REGNUM
1389	      || ! fixed_regs [REGNO (dest)])
1390#endif
1391	  && REGNO (dest) != STACK_POINTER_REGNUM)
1392	{
1393	  if (*pi3dest_killed)
1394	    return 0;
1395
1396	  *pi3dest_killed = dest;
1397	}
1398    }
1399
1400  else if (GET_CODE (x) == PARALLEL)
1401    {
1402      int i;
1403
1404      for (i = 0; i < XVECLEN (x, 0); i++)
1405	if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1406				i1_not_in_src, pi3dest_killed))
1407	  return 0;
1408    }
1409
1410  return 1;
1411}
1412
1413/* Return 1 if X is an arithmetic expression that contains a multiplication
1414   and division.  We don't count multiplications by powers of two here.  */
1415
1416static int
1417contains_muldiv (x)
1418     rtx x;
1419{
1420  switch (GET_CODE (x))
1421    {
1422    case MOD:  case DIV:  case UMOD:  case UDIV:
1423      return 1;
1424
1425    case MULT:
1426      return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1427		&& exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1428    default:
1429      switch (GET_RTX_CLASS (GET_CODE (x)))
1430	{
1431	case 'c':  case '<':  case '2':
1432	  return contains_muldiv (XEXP (x, 0))
1433	    || contains_muldiv (XEXP (x, 1));
1434
1435	case '1':
1436	  return contains_muldiv (XEXP (x, 0));
1437
1438	default:
1439	  return 0;
1440	}
1441    }
1442}
1443
1444/* Determine whether INSN can be used in a combination.  Return nonzero if
1445   not.  This is used in try_combine to detect early some cases where we
1446   can't perform combinations.  */
1447
1448static int
1449cant_combine_insn_p (insn)
1450     rtx insn;
1451{
1452  rtx set;
1453  rtx src, dest;
1454
1455  /* If this isn't really an insn, we can't do anything.
1456     This can occur when flow deletes an insn that it has merged into an
1457     auto-increment address.  */
1458  if (! INSN_P (insn))
1459    return 1;
1460
1461  /* Never combine loads and stores involving hard regs.  The register
1462     allocator can usually handle such reg-reg moves by tying.  If we allow
1463     the combiner to make substitutions of hard regs, we risk aborting in
1464     reload on machines that have SMALL_REGISTER_CLASSES.
1465     As an exception, we allow combinations involving fixed regs; these are
1466     not available to the register allocator so there's no risk involved.  */
1467
1468  set = single_set (insn);
1469  if (! set)
1470    return 0;
1471  src = SET_SRC (set);
1472  dest = SET_DEST (set);
1473  if (GET_CODE (src) == SUBREG)
1474    src = SUBREG_REG (src);
1475  if (GET_CODE (dest) == SUBREG)
1476    dest = SUBREG_REG (dest);
1477  if (REG_P (src) && REG_P (dest)
1478      && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1479	   && ! fixed_regs[REGNO (src)])
1480	  || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1481	      && ! fixed_regs[REGNO (dest)])))
1482    return 1;
1483
1484  return 0;
1485}
1486
1487/* Try to combine the insns I1 and I2 into I3.
1488   Here I1 and I2 appear earlier than I3.
1489   I1 can be zero; then we combine just I2 into I3.
1490
1491   If we are combining three insns and the resulting insn is not recognized,
1492   try splitting it into two insns.  If that happens, I2 and I3 are retained
1493   and I1 is pseudo-deleted by turning it into a NOTE.  Otherwise, I1 and I2
1494   are pseudo-deleted.
1495
1496   Return 0 if the combination does not work.  Then nothing is changed.
1497   If we did the combination, return the insn at which combine should
1498   resume scanning.
1499
1500   Set NEW_DIRECT_JUMP_P to a non-zero value if try_combine creates a
1501   new direct jump instruction.  */
1502
1503static rtx
1504try_combine (i3, i2, i1, new_direct_jump_p)
1505     rtx i3, i2, i1;
1506     int *new_direct_jump_p;
1507{
1508  /* New patterns for I3 and I2, respectively.  */
1509  rtx newpat, newi2pat = 0;
1510  int substed_i2 = 0, substed_i1 = 0;
1511  /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead.  */
1512  int added_sets_1, added_sets_2;
1513  /* Total number of SETs to put into I3.  */
1514  int total_sets;
1515  /* Nonzero is I2's body now appears in I3.  */
1516  int i2_is_used;
1517  /* INSN_CODEs for new I3, new I2, and user of condition code.  */
1518  int insn_code_number, i2_code_number = 0, other_code_number = 0;
1519  /* Contains I3 if the destination of I3 is used in its source, which means
1520     that the old life of I3 is being killed.  If that usage is placed into
1521     I2 and not in I3, a REG_DEAD note must be made.  */
1522  rtx i3dest_killed = 0;
1523  /* SET_DEST and SET_SRC of I2 and I1.  */
1524  rtx i2dest, i2src, i1dest = 0, i1src = 0;
1525  /* PATTERN (I2), or a copy of it in certain cases.  */
1526  rtx i2pat;
1527  /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC.  */
1528  int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1529  int i1_feeds_i3 = 0;
1530  /* Notes that must be added to REG_NOTES in I3 and I2.  */
1531  rtx new_i3_notes, new_i2_notes;
1532  /* Notes that we substituted I3 into I2 instead of the normal case.  */
1533  int i3_subst_into_i2 = 0;
1534  /* Notes that I1, I2 or I3 is a MULT operation.  */
1535  int have_mult = 0;
1536
1537  int maxreg;
1538  rtx temp;
1539  rtx link;
1540  int i;
1541
1542  /* Exit early if one of the insns involved can't be used for
1543     combinations.  */
1544  if (cant_combine_insn_p (i3)
1545      || cant_combine_insn_p (i2)
1546      || (i1 && cant_combine_insn_p (i1))
1547      /* We also can't do anything if I3 has a
1548	 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1549	 libcall.  */
1550#if 0
1551      /* ??? This gives worse code, and appears to be unnecessary, since no
1552	 pass after flow uses REG_LIBCALL/REG_RETVAL notes.  */
1553      || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1554#endif
1555      )
1556    return 0;
1557
1558  combine_attempts++;
1559  undobuf.other_insn = 0;
1560
1561  /* Reset the hard register usage information.  */
1562  CLEAR_HARD_REG_SET (newpat_used_regs);
1563
1564  /* If I1 and I2 both feed I3, they can be in any order.  To simplify the
1565     code below, set I1 to be the earlier of the two insns.  */
1566  if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1567    temp = i1, i1 = i2, i2 = temp;
1568
1569  added_links_insn = 0;
1570
1571  /* First check for one important special-case that the code below will
1572     not handle.  Namely, the case where I1 is zero, I2 is a PARALLEL
1573     and I3 is a SET whose SET_SRC is a SET_DEST in I2.  In that case,
1574     we may be able to replace that destination with the destination of I3.
1575     This occurs in the common code where we compute both a quotient and
1576     remainder into a structure, in which case we want to do the computation
1577     directly into the structure to avoid register-register copies.
1578
1579     Note that this case handles both multiple sets in I2 and also
1580     cases where I2 has a number of CLOBBER or PARALLELs.
1581
1582     We make very conservative checks below and only try to handle the
1583     most common cases of this.  For example, we only handle the case
1584     where I2 and I3 are adjacent to avoid making difficult register
1585     usage tests.  */
1586
1587  if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1588      && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1589      && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1590      && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1591      && GET_CODE (PATTERN (i2)) == PARALLEL
1592      && ! side_effects_p (SET_DEST (PATTERN (i3)))
1593      /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1594	 below would need to check what is inside (and reg_overlap_mentioned_p
1595	 doesn't support those codes anyway).  Don't allow those destinations;
1596	 the resulting insn isn't likely to be recognized anyway.  */
1597      && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1598      && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1599      && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1600				    SET_DEST (PATTERN (i3)))
1601      && next_real_insn (i2) == i3)
1602    {
1603      rtx p2 = PATTERN (i2);
1604
1605      /* Make sure that the destination of I3,
1606	 which we are going to substitute into one output of I2,
1607	 is not used within another output of I2.  We must avoid making this:
1608	 (parallel [(set (mem (reg 69)) ...)
1609		    (set (reg 69) ...)])
1610	 which is not well-defined as to order of actions.
1611	 (Besides, reload can't handle output reloads for this.)
1612
1613	 The problem can also happen if the dest of I3 is a memory ref,
1614	 if another dest in I2 is an indirect memory ref.  */
1615      for (i = 0; i < XVECLEN (p2, 0); i++)
1616	if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1617	     || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1618	    && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1619					SET_DEST (XVECEXP (p2, 0, i))))
1620	  break;
1621
1622      if (i == XVECLEN (p2, 0))
1623	for (i = 0; i < XVECLEN (p2, 0); i++)
1624	  if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1625	       || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1626	      && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1627	    {
1628	      combine_merges++;
1629
1630	      subst_insn = i3;
1631	      subst_low_cuid = INSN_CUID (i2);
1632
1633	      added_sets_2 = added_sets_1 = 0;
1634	      i2dest = SET_SRC (PATTERN (i3));
1635
1636	      /* Replace the dest in I2 with our dest and make the resulting
1637		 insn the new pattern for I3.  Then skip to where we
1638		 validate the pattern.  Everything was set up above.  */
1639	      SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1640		     SET_DEST (PATTERN (i3)));
1641
1642	      newpat = p2;
1643	      i3_subst_into_i2 = 1;
1644	      goto validate_replacement;
1645	    }
1646    }
1647
1648  /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1649     one of those words to another constant, merge them by making a new
1650     constant.  */
1651  if (i1 == 0
1652      && (temp = single_set (i2)) != 0
1653      && (GET_CODE (SET_SRC (temp)) == CONST_INT
1654	  || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1655      && GET_CODE (SET_DEST (temp)) == REG
1656      && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1657      && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1658      && GET_CODE (PATTERN (i3)) == SET
1659      && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1660      && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1661      && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1662      && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1663      && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1664    {
1665      HOST_WIDE_INT lo, hi;
1666
1667      if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1668	lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1669      else
1670	{
1671	  lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1672	  hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1673	}
1674
1675      if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
1676	{
1677	  /* We don't handle the case of the target word being wider
1678	     than a host wide int.  */
1679	  if (HOST_BITS_PER_WIDE_INT < BITS_PER_WORD)
1680	    abort ();
1681
1682	  lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
1683	  lo |= (INTVAL (SET_SRC (PATTERN (i3)))
1684		 & (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1685	}
1686      else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1687	hi = INTVAL (SET_SRC (PATTERN (i3)));
1688      else if (HOST_BITS_PER_WIDE_INT >= 2 * BITS_PER_WORD)
1689	{
1690	  int sign = -(int) ((unsigned HOST_WIDE_INT) lo
1691			     >> (HOST_BITS_PER_WIDE_INT - 1));
1692
1693	  lo &= ~ (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1694		   (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1695	  lo |= (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1696		 (INTVAL (SET_SRC (PATTERN (i3)))));
1697	  if (hi == sign)
1698	    hi = lo < 0 ? -1 : 0;
1699	}
1700      else
1701	/* We don't handle the case of the higher word not fitting
1702	   entirely in either hi or lo.  */
1703	abort ();
1704
1705      combine_merges++;
1706      subst_insn = i3;
1707      subst_low_cuid = INSN_CUID (i2);
1708      added_sets_2 = added_sets_1 = 0;
1709      i2dest = SET_DEST (temp);
1710
1711      SUBST (SET_SRC (temp),
1712	     immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1713
1714      newpat = PATTERN (i2);
1715      goto validate_replacement;
1716    }
1717
1718#ifndef HAVE_cc0
1719  /* If we have no I1 and I2 looks like:
1720	(parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1721		   (set Y OP)])
1722     make up a dummy I1 that is
1723	(set Y OP)
1724     and change I2 to be
1725        (set (reg:CC X) (compare:CC Y (const_int 0)))
1726
1727     (We can ignore any trailing CLOBBERs.)
1728
1729     This undoes a previous combination and allows us to match a branch-and-
1730     decrement insn.  */
1731
1732  if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1733      && XVECLEN (PATTERN (i2), 0) >= 2
1734      && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1735      && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1736	  == MODE_CC)
1737      && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1738      && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1739      && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1740      && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1741      && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1742		      SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1743    {
1744      for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1745	if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1746	  break;
1747
1748      if (i == 1)
1749	{
1750	  /* We make I1 with the same INSN_UID as I2.  This gives it
1751	     the same INSN_CUID for value tracking.  Our fake I1 will
1752	     never appear in the insn stream so giving it the same INSN_UID
1753	     as I2 will not cause a problem.  */
1754
1755	  subst_prev_insn = i1
1756	    = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1757			    XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1758			    NULL_RTX);
1759
1760	  SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1761	  SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1762		 SET_DEST (PATTERN (i1)));
1763	}
1764    }
1765#endif
1766
1767  /* Verify that I2 and I1 are valid for combining.  */
1768  if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1769      || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1770    {
1771      undo_all ();
1772      return 0;
1773    }
1774
1775  /* Record whether I2DEST is used in I2SRC and similarly for the other
1776     cases.  Knowing this will help in register status updating below.  */
1777  i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1778  i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1779  i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1780
1781  /* See if I1 directly feeds into I3.  It does if I1DEST is not used
1782     in I2SRC.  */
1783  i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1784
1785  /* Ensure that I3's pattern can be the destination of combines.  */
1786  if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1787			  i1 && i2dest_in_i1src && i1_feeds_i3,
1788			  &i3dest_killed))
1789    {
1790      undo_all ();
1791      return 0;
1792    }
1793
1794  /* See if any of the insns is a MULT operation.  Unless one is, we will
1795     reject a combination that is, since it must be slower.  Be conservative
1796     here.  */
1797  if (GET_CODE (i2src) == MULT
1798      || (i1 != 0 && GET_CODE (i1src) == MULT)
1799      || (GET_CODE (PATTERN (i3)) == SET
1800	  && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1801    have_mult = 1;
1802
1803  /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1804     We used to do this EXCEPT in one case: I3 has a post-inc in an
1805     output operand.  However, that exception can give rise to insns like
1806	mov r3,(r3)+
1807     which is a famous insn on the PDP-11 where the value of r3 used as the
1808     source was model-dependent.  Avoid this sort of thing.  */
1809
1810#if 0
1811  if (!(GET_CODE (PATTERN (i3)) == SET
1812	&& GET_CODE (SET_SRC (PATTERN (i3))) == REG
1813	&& GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1814	&& (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1815	    || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1816    /* It's not the exception.  */
1817#endif
1818#ifdef AUTO_INC_DEC
1819    for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1820      if (REG_NOTE_KIND (link) == REG_INC
1821	  && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1822	      || (i1 != 0
1823		  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1824	{
1825	  undo_all ();
1826	  return 0;
1827	}
1828#endif
1829
1830  /* See if the SETs in I1 or I2 need to be kept around in the merged
1831     instruction: whenever the value set there is still needed past I3.
1832     For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1833
1834     For the SET in I1, we have two cases:  If I1 and I2 independently
1835     feed into I3, the set in I1 needs to be kept around if I1DEST dies
1836     or is set in I3.  Otherwise (if I1 feeds I2 which feeds I3), the set
1837     in I1 needs to be kept around unless I1DEST dies or is set in either
1838     I2 or I3.  We can distinguish these cases by seeing if I2SRC mentions
1839     I1DEST.  If so, we know I1 feeds into I2.  */
1840
1841  added_sets_2 = ! dead_or_set_p (i3, i2dest);
1842
1843  added_sets_1
1844    = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1845	       : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1846
1847  /* If the set in I2 needs to be kept around, we must make a copy of
1848     PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1849     PATTERN (I2), we are only substituting for the original I1DEST, not into
1850     an already-substituted copy.  This also prevents making self-referential
1851     rtx.  If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1852     I2DEST.  */
1853
1854  i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1855	   ? gen_rtx_SET (VOIDmode, i2dest, i2src)
1856	   : PATTERN (i2));
1857
1858  if (added_sets_2)
1859    i2pat = copy_rtx (i2pat);
1860
1861  combine_merges++;
1862
1863  /* Substitute in the latest insn for the regs set by the earlier ones.  */
1864
1865  maxreg = max_reg_num ();
1866
1867  subst_insn = i3;
1868
1869  /* It is possible that the source of I2 or I1 may be performing an
1870     unneeded operation, such as a ZERO_EXTEND of something that is known
1871     to have the high part zero.  Handle that case by letting subst look at
1872     the innermost one of them.
1873
1874     Another way to do this would be to have a function that tries to
1875     simplify a single insn instead of merging two or more insns.  We don't
1876     do this because of the potential of infinite loops and because
1877     of the potential extra memory required.  However, doing it the way
1878     we are is a bit of a kludge and doesn't catch all cases.
1879
1880     But only do this if -fexpensive-optimizations since it slows things down
1881     and doesn't usually win.  */
1882
1883  if (flag_expensive_optimizations)
1884    {
1885      /* Pass pc_rtx so no substitutions are done, just simplifications.
1886	 The cases that we are interested in here do not involve the few
1887	 cases were is_replaced is checked.  */
1888      if (i1)
1889	{
1890	  subst_low_cuid = INSN_CUID (i1);
1891	  i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1892	}
1893      else
1894	{
1895	  subst_low_cuid = INSN_CUID (i2);
1896	  i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1897	}
1898    }
1899
1900#ifndef HAVE_cc0
1901  /* Many machines that don't use CC0 have insns that can both perform an
1902     arithmetic operation and set the condition code.  These operations will
1903     be represented as a PARALLEL with the first element of the vector
1904     being a COMPARE of an arithmetic operation with the constant zero.
1905     The second element of the vector will set some pseudo to the result
1906     of the same arithmetic operation.  If we simplify the COMPARE, we won't
1907     match such a pattern and so will generate an extra insn.   Here we test
1908     for this case, where both the comparison and the operation result are
1909     needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1910     I2SRC.  Later we will make the PARALLEL that contains I2.  */
1911
1912  if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1913      && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1914      && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1915      && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1916    {
1917#ifdef EXTRA_CC_MODES
1918      rtx *cc_use;
1919      enum machine_mode compare_mode;
1920#endif
1921
1922      newpat = PATTERN (i3);
1923      SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1924
1925      i2_is_used = 1;
1926
1927#ifdef EXTRA_CC_MODES
1928      /* See if a COMPARE with the operand we substituted in should be done
1929	 with the mode that is currently being used.  If not, do the same
1930	 processing we do in `subst' for a SET; namely, if the destination
1931	 is used only once, try to replace it with a register of the proper
1932	 mode and also replace the COMPARE.  */
1933      if (undobuf.other_insn == 0
1934	  && (cc_use = find_single_use (SET_DEST (newpat), i3,
1935					&undobuf.other_insn))
1936	  && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1937					      i2src, const0_rtx))
1938	      != GET_MODE (SET_DEST (newpat))))
1939	{
1940	  unsigned int regno = REGNO (SET_DEST (newpat));
1941	  rtx new_dest = gen_rtx_REG (compare_mode, regno);
1942
1943	  if (regno < FIRST_PSEUDO_REGISTER
1944	      || (REG_N_SETS (regno) == 1 && ! added_sets_2
1945		  && ! REG_USERVAR_P (SET_DEST (newpat))))
1946	    {
1947	      if (regno >= FIRST_PSEUDO_REGISTER)
1948		SUBST (regno_reg_rtx[regno], new_dest);
1949
1950	      SUBST (SET_DEST (newpat), new_dest);
1951	      SUBST (XEXP (*cc_use, 0), new_dest);
1952	      SUBST (SET_SRC (newpat),
1953		     gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
1954	    }
1955	  else
1956	    undobuf.other_insn = 0;
1957	}
1958#endif
1959    }
1960  else
1961#endif
1962    {
1963      n_occurrences = 0;		/* `subst' counts here */
1964
1965      /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1966	 need to make a unique copy of I2SRC each time we substitute it
1967	 to avoid self-referential rtl.  */
1968
1969      subst_low_cuid = INSN_CUID (i2);
1970      newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1971		      ! i1_feeds_i3 && i1dest_in_i1src);
1972      substed_i2 = 1;
1973
1974      /* Record whether i2's body now appears within i3's body.  */
1975      i2_is_used = n_occurrences;
1976    }
1977
1978  /* If we already got a failure, don't try to do more.  Otherwise,
1979     try to substitute in I1 if we have it.  */
1980
1981  if (i1 && GET_CODE (newpat) != CLOBBER)
1982    {
1983      /* Before we can do this substitution, we must redo the test done
1984	 above (see detailed comments there) that ensures  that I1DEST
1985	 isn't mentioned in any SETs in NEWPAT that are field assignments.  */
1986
1987      if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1988			      0, (rtx*) 0))
1989	{
1990	  undo_all ();
1991	  return 0;
1992	}
1993
1994      n_occurrences = 0;
1995      subst_low_cuid = INSN_CUID (i1);
1996      newpat = subst (newpat, i1dest, i1src, 0, 0);
1997      substed_i1 = 1;
1998    }
1999
2000  /* Fail if an autoincrement side-effect has been duplicated.  Be careful
2001     to count all the ways that I2SRC and I1SRC can be used.  */
2002  if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
2003       && i2_is_used + added_sets_2 > 1)
2004      || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2005	  && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
2006	      > 1))
2007      /* Fail if we tried to make a new register (we used to abort, but there's
2008	 really no reason to).  */
2009      || max_reg_num () != maxreg
2010      /* Fail if we couldn't do something and have a CLOBBER.  */
2011      || GET_CODE (newpat) == CLOBBER
2012      /* Fail if this new pattern is a MULT and we didn't have one before
2013	 at the outer level.  */
2014      || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
2015	  && ! have_mult))
2016    {
2017      undo_all ();
2018      return 0;
2019    }
2020
2021  /* If the actions of the earlier insns must be kept
2022     in addition to substituting them into the latest one,
2023     we must make a new PARALLEL for the latest insn
2024     to hold additional the SETs.  */
2025
2026  if (added_sets_1 || added_sets_2)
2027    {
2028      combine_extras++;
2029
2030      if (GET_CODE (newpat) == PARALLEL)
2031	{
2032	  rtvec old = XVEC (newpat, 0);
2033	  total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
2034	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2035	  memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
2036		  sizeof (old->elem[0]) * old->num_elem);
2037	}
2038      else
2039	{
2040	  rtx old = newpat;
2041	  total_sets = 1 + added_sets_1 + added_sets_2;
2042	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2043	  XVECEXP (newpat, 0, 0) = old;
2044	}
2045
2046      if (added_sets_1)
2047	XVECEXP (newpat, 0, --total_sets)
2048	  = (GET_CODE (PATTERN (i1)) == PARALLEL
2049	     ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
2050
2051      if (added_sets_2)
2052	{
2053	  /* If there is no I1, use I2's body as is.  We used to also not do
2054	     the subst call below if I2 was substituted into I3,
2055	     but that could lose a simplification.  */
2056	  if (i1 == 0)
2057	    XVECEXP (newpat, 0, --total_sets) = i2pat;
2058	  else
2059	    /* See comment where i2pat is assigned.  */
2060	    XVECEXP (newpat, 0, --total_sets)
2061	      = subst (i2pat, i1dest, i1src, 0, 0);
2062	}
2063    }
2064
2065  /* We come here when we are replacing a destination in I2 with the
2066     destination of I3.  */
2067 validate_replacement:
2068
2069  /* Note which hard regs this insn has as inputs.  */
2070  mark_used_regs_combine (newpat);
2071
2072  /* Is the result of combination a valid instruction?  */
2073  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2074
2075  /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2076     the second SET's destination is a register that is unused.  In that case,
2077     we just need the first SET.   This can occur when simplifying a divmod
2078     insn.  We *must* test for this case here because the code below that
2079     splits two independent SETs doesn't handle this case correctly when it
2080     updates the register status.  Also check the case where the first
2081     SET's destination is unused.  That would not cause incorrect code, but
2082     does cause an unneeded insn to remain.  */
2083
2084  if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2085      && XVECLEN (newpat, 0) == 2
2086      && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2087      && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2088      && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
2089      && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
2090      && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
2091      && asm_noperands (newpat) < 0)
2092    {
2093      newpat = XVECEXP (newpat, 0, 0);
2094      insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2095    }
2096
2097  else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2098	   && XVECLEN (newpat, 0) == 2
2099	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2100	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2101	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
2102	   && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
2103	   && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
2104	   && asm_noperands (newpat) < 0)
2105    {
2106      newpat = XVECEXP (newpat, 0, 1);
2107      insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2108    }
2109
2110  /* If we were combining three insns and the result is a simple SET
2111     with no ASM_OPERANDS that wasn't recognized, try to split it into two
2112     insns.  There are two ways to do this.  It can be split using a
2113     machine-specific method (like when you have an addition of a large
2114     constant) or by combine in the function find_split_point.  */
2115
2116  if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2117      && asm_noperands (newpat) < 0)
2118    {
2119      rtx m_split, *split;
2120      rtx ni2dest = i2dest;
2121
2122      /* See if the MD file can split NEWPAT.  If it can't, see if letting it
2123	 use I2DEST as a scratch register will help.  In the latter case,
2124	 convert I2DEST to the mode of the source of NEWPAT if we can.  */
2125
2126      m_split = split_insns (newpat, i3);
2127
2128      /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2129	 inputs of NEWPAT.  */
2130
2131      /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2132	 possible to try that as a scratch reg.  This would require adding
2133	 more code to make it work though.  */
2134
2135      if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
2136	{
2137	  /* If I2DEST is a hard register or the only use of a pseudo,
2138	     we can change its mode.  */
2139	  if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
2140	      && GET_MODE (SET_DEST (newpat)) != VOIDmode
2141	      && GET_CODE (i2dest) == REG
2142	      && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2143		  || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2144		      && ! REG_USERVAR_P (i2dest))))
2145	    ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
2146				   REGNO (i2dest));
2147
2148	  m_split = split_insns (gen_rtx_PARALLEL
2149				 (VOIDmode,
2150				  gen_rtvec (2, newpat,
2151					     gen_rtx_CLOBBER (VOIDmode,
2152							      ni2dest))),
2153				 i3);
2154	  /* If the split with the mode-changed register didn't work, try
2155	     the original register.  */
2156	  if (! m_split && ni2dest != i2dest)
2157	    {
2158	      ni2dest = i2dest;
2159	      m_split = split_insns (gen_rtx_PARALLEL
2160				     (VOIDmode,
2161				      gen_rtvec (2, newpat,
2162						 gen_rtx_CLOBBER (VOIDmode,
2163								  i2dest))),
2164				     i3);
2165	    }
2166	}
2167
2168      /* If we've split a jump pattern, we'll wind up with a sequence even
2169	 with one instruction.  We can handle that below, so extract it.  */
2170      if (m_split && GET_CODE (m_split) == SEQUENCE
2171	  && XVECLEN (m_split, 0) == 1)
2172	m_split = PATTERN (XVECEXP (m_split, 0, 0));
2173
2174      if (m_split && GET_CODE (m_split) != SEQUENCE)
2175	{
2176	  insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2177	  if (insn_code_number >= 0)
2178	    newpat = m_split;
2179	}
2180      else if (m_split && GET_CODE (m_split) == SEQUENCE
2181	       && XVECLEN (m_split, 0) == 2
2182	       && (next_real_insn (i2) == i3
2183		   || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
2184					   INSN_CUID (i2))))
2185	{
2186	  rtx i2set, i3set;
2187	  rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
2188	  newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
2189
2190	  i3set = single_set (XVECEXP (m_split, 0, 1));
2191	  i2set = single_set (XVECEXP (m_split, 0, 0));
2192
2193	  /* In case we changed the mode of I2DEST, replace it in the
2194	     pseudo-register table here.  We can't do it above in case this
2195	     code doesn't get executed and we do a split the other way.  */
2196
2197	  if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2198	    SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2199
2200	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2201
2202	  /* If I2 or I3 has multiple SETs, we won't know how to track
2203	     register status, so don't use these insns.  If I2's destination
2204	     is used between I2 and I3, we also can't use these insns.  */
2205
2206	  if (i2_code_number >= 0 && i2set && i3set
2207	      && (next_real_insn (i2) == i3
2208		  || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
2209	    insn_code_number = recog_for_combine (&newi3pat, i3,
2210						  &new_i3_notes);
2211	  if (insn_code_number >= 0)
2212	    newpat = newi3pat;
2213
2214	  /* It is possible that both insns now set the destination of I3.
2215	     If so, we must show an extra use of it.  */
2216
2217	  if (insn_code_number >= 0)
2218	    {
2219	      rtx new_i3_dest = SET_DEST (i3set);
2220	      rtx new_i2_dest = SET_DEST (i2set);
2221
2222	      while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2223		     || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2224		     || GET_CODE (new_i3_dest) == SUBREG)
2225		new_i3_dest = XEXP (new_i3_dest, 0);
2226
2227	      while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2228		     || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2229		     || GET_CODE (new_i2_dest) == SUBREG)
2230		new_i2_dest = XEXP (new_i2_dest, 0);
2231
2232	      if (GET_CODE (new_i3_dest) == REG
2233		  && GET_CODE (new_i2_dest) == REG
2234		  && REGNO (new_i3_dest) == REGNO (new_i2_dest))
2235		REG_N_SETS (REGNO (new_i2_dest))++;
2236	    }
2237	}
2238
2239      /* If we can split it and use I2DEST, go ahead and see if that
2240	 helps things be recognized.  Verify that none of the registers
2241	 are set between I2 and I3.  */
2242      if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
2243#ifdef HAVE_cc0
2244	  && GET_CODE (i2dest) == REG
2245#endif
2246	  /* We need I2DEST in the proper mode.  If it is a hard register
2247	     or the only use of a pseudo, we can change its mode.  */
2248	  && (GET_MODE (*split) == GET_MODE (i2dest)
2249	      || GET_MODE (*split) == VOIDmode
2250	      || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2251	      || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2252		  && ! REG_USERVAR_P (i2dest)))
2253	  && (next_real_insn (i2) == i3
2254	      || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2255	  /* We can't overwrite I2DEST if its value is still used by
2256	     NEWPAT.  */
2257	  && ! reg_referenced_p (i2dest, newpat))
2258	{
2259	  rtx newdest = i2dest;
2260	  enum rtx_code split_code = GET_CODE (*split);
2261	  enum machine_mode split_mode = GET_MODE (*split);
2262
2263	  /* Get NEWDEST as a register in the proper mode.  We have already
2264	     validated that we can do this.  */
2265	  if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2266	    {
2267	      newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
2268
2269	      if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2270		SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2271	    }
2272
2273	  /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2274	     an ASHIFT.  This can occur if it was inside a PLUS and hence
2275	     appeared to be a memory address.  This is a kludge.  */
2276	  if (split_code == MULT
2277	      && GET_CODE (XEXP (*split, 1)) == CONST_INT
2278	      && INTVAL (XEXP (*split, 1)) > 0
2279	      && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2280	    {
2281	      SUBST (*split, gen_rtx_ASHIFT (split_mode,
2282					     XEXP (*split, 0), GEN_INT (i)));
2283	      /* Update split_code because we may not have a multiply
2284		 anymore.  */
2285	      split_code = GET_CODE (*split);
2286	    }
2287
2288#ifdef INSN_SCHEDULING
2289	  /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2290	     be written as a ZERO_EXTEND.  */
2291	  if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2292	    SUBST (*split, gen_rtx_ZERO_EXTEND  (split_mode,
2293						 SUBREG_REG (*split)));
2294#endif
2295
2296	  newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
2297	  SUBST (*split, newdest);
2298	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2299
2300	  /* If the split point was a MULT and we didn't have one before,
2301	     don't use one now.  */
2302	  if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2303	    insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2304	}
2305    }
2306
2307  /* Check for a case where we loaded from memory in a narrow mode and
2308     then sign extended it, but we need both registers.  In that case,
2309     we have a PARALLEL with both loads from the same memory location.
2310     We can split this into a load from memory followed by a register-register
2311     copy.  This saves at least one insn, more if register allocation can
2312     eliminate the copy.
2313
2314     We cannot do this if the destination of the second assignment is
2315     a register that we have already assumed is zero-extended.  Similarly
2316     for a SUBREG of such a register.  */
2317
2318  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2319	   && GET_CODE (newpat) == PARALLEL
2320	   && XVECLEN (newpat, 0) == 2
2321	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2322	   && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2323	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2324	   && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2325			   XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2326	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2327				   INSN_CUID (i2))
2328	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2329	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2330	   && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2331		 (GET_CODE (temp) == REG
2332		  && reg_nonzero_bits[REGNO (temp)] != 0
2333		  && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2334		  && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2335		  && (reg_nonzero_bits[REGNO (temp)]
2336		      != GET_MODE_MASK (word_mode))))
2337	   && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2338		 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2339		     (GET_CODE (temp) == REG
2340		      && reg_nonzero_bits[REGNO (temp)] != 0
2341		      && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2342		      && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2343		      && (reg_nonzero_bits[REGNO (temp)]
2344			  != GET_MODE_MASK (word_mode)))))
2345	   && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2346					 SET_SRC (XVECEXP (newpat, 0, 1)))
2347	   && ! find_reg_note (i3, REG_UNUSED,
2348			       SET_DEST (XVECEXP (newpat, 0, 0))))
2349    {
2350      rtx ni2dest;
2351
2352      newi2pat = XVECEXP (newpat, 0, 0);
2353      ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2354      newpat = XVECEXP (newpat, 0, 1);
2355      SUBST (SET_SRC (newpat),
2356	     gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
2357      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2358
2359      if (i2_code_number >= 0)
2360	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2361
2362      if (insn_code_number >= 0)
2363	{
2364	  rtx insn;
2365	  rtx link;
2366
2367	  /* If we will be able to accept this, we have made a change to the
2368	     destination of I3.  This can invalidate a LOG_LINKS pointing
2369	     to I3.  No other part of combine.c makes such a transformation.
2370
2371	     The new I3 will have a destination that was previously the
2372	     destination of I1 or I2 and which was used in i2 or I3.  Call
2373	     distribute_links to make a LOG_LINK from the next use of
2374	     that destination.  */
2375
2376	  PATTERN (i3) = newpat;
2377	  distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
2378
2379	  /* I3 now uses what used to be its destination and which is
2380	     now I2's destination.  That means we need a LOG_LINK from
2381	     I3 to I2.  But we used to have one, so we still will.
2382
2383	     However, some later insn might be using I2's dest and have
2384	     a LOG_LINK pointing at I3.  We must remove this link.
2385	     The simplest way to remove the link is to point it at I1,
2386	     which we know will be a NOTE.  */
2387
2388	  for (insn = NEXT_INSN (i3);
2389	       insn && (this_basic_block == n_basic_blocks - 1
2390			|| insn != BLOCK_HEAD (this_basic_block + 1));
2391	       insn = NEXT_INSN (insn))
2392	    {
2393	      if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
2394		{
2395		  for (link = LOG_LINKS (insn); link;
2396		       link = XEXP (link, 1))
2397		    if (XEXP (link, 0) == i3)
2398		      XEXP (link, 0) = i1;
2399
2400		  break;
2401		}
2402	    }
2403	}
2404    }
2405
2406  /* Similarly, check for a case where we have a PARALLEL of two independent
2407     SETs but we started with three insns.  In this case, we can do the sets
2408     as two separate insns.  This case occurs when some SET allows two
2409     other insns to combine, but the destination of that SET is still live.  */
2410
2411  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2412	   && GET_CODE (newpat) == PARALLEL
2413	   && XVECLEN (newpat, 0) == 2
2414	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2415	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2416	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2417	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2418	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2419	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2420	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2421				   INSN_CUID (i2))
2422	   /* Don't pass sets with (USE (MEM ...)) dests to the following.  */
2423	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2424	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2425	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2426				  XVECEXP (newpat, 0, 0))
2427	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2428				  XVECEXP (newpat, 0, 1))
2429	   && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2430		 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
2431    {
2432      /* Normally, it doesn't matter which of the two is done first,
2433	 but it does if one references cc0.  In that case, it has to
2434	 be first.  */
2435#ifdef HAVE_cc0
2436      if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2437	{
2438	  newi2pat = XVECEXP (newpat, 0, 0);
2439	  newpat = XVECEXP (newpat, 0, 1);
2440	}
2441      else
2442#endif
2443	{
2444	  newi2pat = XVECEXP (newpat, 0, 1);
2445	  newpat = XVECEXP (newpat, 0, 0);
2446	}
2447
2448      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2449
2450      if (i2_code_number >= 0)
2451	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2452    }
2453
2454  /* If it still isn't recognized, fail and change things back the way they
2455     were.  */
2456  if ((insn_code_number < 0
2457       /* Is the result a reasonable ASM_OPERANDS?  */
2458       && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2459    {
2460      undo_all ();
2461      return 0;
2462    }
2463
2464  /* If we had to change another insn, make sure it is valid also.  */
2465  if (undobuf.other_insn)
2466    {
2467      rtx other_pat = PATTERN (undobuf.other_insn);
2468      rtx new_other_notes;
2469      rtx note, next;
2470
2471      CLEAR_HARD_REG_SET (newpat_used_regs);
2472
2473      other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2474					     &new_other_notes);
2475
2476      if (other_code_number < 0 && ! check_asm_operands (other_pat))
2477	{
2478	  undo_all ();
2479	  return 0;
2480	}
2481
2482      PATTERN (undobuf.other_insn) = other_pat;
2483
2484      /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2485	 are still valid.  Then add any non-duplicate notes added by
2486	 recog_for_combine.  */
2487      for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2488	{
2489	  next = XEXP (note, 1);
2490
2491	  if (REG_NOTE_KIND (note) == REG_UNUSED
2492	      && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2493	    {
2494	      if (GET_CODE (XEXP (note, 0)) == REG)
2495		REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2496
2497	      remove_note (undobuf.other_insn, note);
2498	    }
2499	}
2500
2501      for (note = new_other_notes; note; note = XEXP (note, 1))
2502	if (GET_CODE (XEXP (note, 0)) == REG)
2503	  REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
2504
2505      distribute_notes (new_other_notes, undobuf.other_insn,
2506			undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2507    }
2508#ifdef HAVE_cc0
2509  /* If I2 is the setter CC0 and I3 is the user CC0 then check whether
2510     they are adjacent to each other or not.  */
2511  {
2512    rtx p = prev_nonnote_insn (i3);
2513    if (p && p != i2 && GET_CODE (p) == INSN && newi2pat
2514	&& sets_cc0_p (newi2pat))
2515      {
2516	undo_all ();
2517	return 0;
2518      }
2519  }
2520#endif
2521
2522  /* We now know that we can do this combination.  Merge the insns and
2523     update the status of registers and LOG_LINKS.  */
2524
2525  {
2526    rtx i3notes, i2notes, i1notes = 0;
2527    rtx i3links, i2links, i1links = 0;
2528    rtx midnotes = 0;
2529    unsigned int regno;
2530    /* Compute which registers we expect to eliminate.  newi2pat may be setting
2531       either i3dest or i2dest, so we must check it.  Also, i1dest may be the
2532       same as i3dest, in which case newi2pat may be setting i1dest.  */
2533    rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2534		   || i2dest_in_i2src || i2dest_in_i1src
2535		   ? 0 : i2dest);
2536    rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2537		   || (newi2pat && reg_set_p (i1dest, newi2pat))
2538		   ? 0 : i1dest);
2539
2540    /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2541       clear them.  */
2542    i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2543    i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2544    if (i1)
2545      i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2546
2547    /* Ensure that we do not have something that should not be shared but
2548       occurs multiple times in the new insns.  Check this by first
2549       resetting all the `used' flags and then copying anything is shared.  */
2550
2551    reset_used_flags (i3notes);
2552    reset_used_flags (i2notes);
2553    reset_used_flags (i1notes);
2554    reset_used_flags (newpat);
2555    reset_used_flags (newi2pat);
2556    if (undobuf.other_insn)
2557      reset_used_flags (PATTERN (undobuf.other_insn));
2558
2559    i3notes = copy_rtx_if_shared (i3notes);
2560    i2notes = copy_rtx_if_shared (i2notes);
2561    i1notes = copy_rtx_if_shared (i1notes);
2562    newpat = copy_rtx_if_shared (newpat);
2563    newi2pat = copy_rtx_if_shared (newi2pat);
2564    if (undobuf.other_insn)
2565      reset_used_flags (PATTERN (undobuf.other_insn));
2566
2567    INSN_CODE (i3) = insn_code_number;
2568    PATTERN (i3) = newpat;
2569
2570    if (GET_CODE (i3) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (i3))
2571      {
2572	rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
2573
2574	reset_used_flags (call_usage);
2575	call_usage = copy_rtx (call_usage);
2576
2577	if (substed_i2)
2578	  replace_rtx (call_usage, i2dest, i2src);
2579
2580	if (substed_i1)
2581	  replace_rtx (call_usage, i1dest, i1src);
2582
2583	CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
2584      }
2585
2586    if (undobuf.other_insn)
2587      INSN_CODE (undobuf.other_insn) = other_code_number;
2588
2589    /* We had one special case above where I2 had more than one set and
2590       we replaced a destination of one of those sets with the destination
2591       of I3.  In that case, we have to update LOG_LINKS of insns later
2592       in this basic block.  Note that this (expensive) case is rare.
2593
2594       Also, in this case, we must pretend that all REG_NOTEs for I2
2595       actually came from I3, so that REG_UNUSED notes from I2 will be
2596       properly handled.  */
2597
2598    if (i3_subst_into_i2)
2599      {
2600	for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2601	  if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != USE
2602	      && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2603	      && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2604	      && ! find_reg_note (i2, REG_UNUSED,
2605				  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2606	    for (temp = NEXT_INSN (i2);
2607		 temp && (this_basic_block == n_basic_blocks - 1
2608			  || BLOCK_HEAD (this_basic_block) != temp);
2609		 temp = NEXT_INSN (temp))
2610	      if (temp != i3 && INSN_P (temp))
2611		for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2612		  if (XEXP (link, 0) == i2)
2613		    XEXP (link, 0) = i3;
2614
2615	if (i3notes)
2616	  {
2617	    rtx link = i3notes;
2618	    while (XEXP (link, 1))
2619	      link = XEXP (link, 1);
2620	    XEXP (link, 1) = i2notes;
2621	  }
2622	else
2623	  i3notes = i2notes;
2624	i2notes = 0;
2625      }
2626
2627    LOG_LINKS (i3) = 0;
2628    REG_NOTES (i3) = 0;
2629    LOG_LINKS (i2) = 0;
2630    REG_NOTES (i2) = 0;
2631
2632    if (newi2pat)
2633      {
2634	INSN_CODE (i2) = i2_code_number;
2635	PATTERN (i2) = newi2pat;
2636      }
2637    else
2638      {
2639	PUT_CODE (i2, NOTE);
2640	NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2641	NOTE_SOURCE_FILE (i2) = 0;
2642      }
2643
2644    if (i1)
2645      {
2646	LOG_LINKS (i1) = 0;
2647	REG_NOTES (i1) = 0;
2648	PUT_CODE (i1, NOTE);
2649	NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2650	NOTE_SOURCE_FILE (i1) = 0;
2651      }
2652
2653    /* Get death notes for everything that is now used in either I3 or
2654       I2 and used to die in a previous insn.  If we built two new
2655       patterns, move from I1 to I2 then I2 to I3 so that we get the
2656       proper movement on registers that I2 modifies.  */
2657
2658    if (newi2pat)
2659      {
2660	move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2661	move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2662      }
2663    else
2664      move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2665		   i3, &midnotes);
2666
2667    /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3.  */
2668    if (i3notes)
2669      distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2670			elim_i2, elim_i1);
2671    if (i2notes)
2672      distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2673			elim_i2, elim_i1);
2674    if (i1notes)
2675      distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2676			elim_i2, elim_i1);
2677    if (midnotes)
2678      distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2679			elim_i2, elim_i1);
2680
2681    /* Distribute any notes added to I2 or I3 by recog_for_combine.  We
2682       know these are REG_UNUSED and want them to go to the desired insn,
2683       so we always pass it as i3.  We have not counted the notes in
2684       reg_n_deaths yet, so we need to do so now.  */
2685
2686    if (newi2pat && new_i2_notes)
2687      {
2688	for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2689	  if (GET_CODE (XEXP (temp, 0)) == REG)
2690	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2691
2692	distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2693      }
2694
2695    if (new_i3_notes)
2696      {
2697	for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2698	  if (GET_CODE (XEXP (temp, 0)) == REG)
2699	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2700
2701	distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2702      }
2703
2704    /* If I3DEST was used in I3SRC, it really died in I3.  We may need to
2705       put a REG_DEAD note for it somewhere.  If NEWI2PAT exists and sets
2706       I3DEST, the death must be somewhere before I2, not I3.  If we passed I3
2707       in that case, it might delete I2.  Similarly for I2 and I1.
2708       Show an additional death due to the REG_DEAD note we make here.  If
2709       we discard it in distribute_notes, we will decrement it again.  */
2710
2711    if (i3dest_killed)
2712      {
2713	if (GET_CODE (i3dest_killed) == REG)
2714	  REG_N_DEATHS (REGNO (i3dest_killed))++;
2715
2716	if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
2717	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2718					       NULL_RTX),
2719			    NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
2720	else
2721	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2722					       NULL_RTX),
2723			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2724			    elim_i2, elim_i1);
2725      }
2726
2727    if (i2dest_in_i2src)
2728      {
2729	if (GET_CODE (i2dest) == REG)
2730	  REG_N_DEATHS (REGNO (i2dest))++;
2731
2732	if (newi2pat && reg_set_p (i2dest, newi2pat))
2733	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2734			    NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2735	else
2736	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2737			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2738			    NULL_RTX, NULL_RTX);
2739      }
2740
2741    if (i1dest_in_i1src)
2742      {
2743	if (GET_CODE (i1dest) == REG)
2744	  REG_N_DEATHS (REGNO (i1dest))++;
2745
2746	if (newi2pat && reg_set_p (i1dest, newi2pat))
2747	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2748			    NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2749	else
2750	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2751			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2752			    NULL_RTX, NULL_RTX);
2753      }
2754
2755    distribute_links (i3links);
2756    distribute_links (i2links);
2757    distribute_links (i1links);
2758
2759    if (GET_CODE (i2dest) == REG)
2760      {
2761	rtx link;
2762	rtx i2_insn = 0, i2_val = 0, set;
2763
2764	/* The insn that used to set this register doesn't exist, and
2765	   this life of the register may not exist either.  See if one of
2766	   I3's links points to an insn that sets I2DEST.  If it does,
2767	   that is now the last known value for I2DEST. If we don't update
2768	   this and I2 set the register to a value that depended on its old
2769	   contents, we will get confused.  If this insn is used, thing
2770	   will be set correctly in combine_instructions.  */
2771
2772	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2773	  if ((set = single_set (XEXP (link, 0))) != 0
2774	      && rtx_equal_p (i2dest, SET_DEST (set)))
2775	    i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2776
2777	record_value_for_reg (i2dest, i2_insn, i2_val);
2778
2779	/* If the reg formerly set in I2 died only once and that was in I3,
2780	   zero its use count so it won't make `reload' do any work.  */
2781	if (! added_sets_2
2782	    && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2783	    && ! i2dest_in_i2src)
2784	  {
2785	    regno = REGNO (i2dest);
2786	    REG_N_SETS (regno)--;
2787	  }
2788      }
2789
2790    if (i1 && GET_CODE (i1dest) == REG)
2791      {
2792	rtx link;
2793	rtx i1_insn = 0, i1_val = 0, set;
2794
2795	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2796	  if ((set = single_set (XEXP (link, 0))) != 0
2797	      && rtx_equal_p (i1dest, SET_DEST (set)))
2798	    i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2799
2800	record_value_for_reg (i1dest, i1_insn, i1_val);
2801
2802	regno = REGNO (i1dest);
2803	if (! added_sets_1 && ! i1dest_in_i1src)
2804	  REG_N_SETS (regno)--;
2805      }
2806
2807    /* Update reg_nonzero_bits et al for any changes that may have been made
2808       to this insn.  The order of set_nonzero_bits_and_sign_copies() is
2809       important.  Because newi2pat can affect nonzero_bits of newpat */
2810    if (newi2pat)
2811      note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
2812    note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
2813
2814    /* Set new_direct_jump_p if a new return or simple jump instruction
2815       has been created.
2816
2817       If I3 is now an unconditional jump, ensure that it has a
2818       BARRIER following it since it may have initially been a
2819       conditional jump.  It may also be the last nonnote insn.  */
2820
2821    if (GET_CODE (newpat) == RETURN || any_uncondjump_p (i3))
2822      {
2823	*new_direct_jump_p = 1;
2824
2825	if ((temp = next_nonnote_insn (i3)) == NULL_RTX
2826	    || GET_CODE (temp) != BARRIER)
2827	  emit_barrier_after (i3);
2828      }
2829    /* An NOOP jump does not need barrier, but it does need cleaning up
2830       of CFG.  */
2831    if (GET_CODE (newpat) == SET
2832	&& SET_SRC (newpat) == pc_rtx
2833	&& SET_DEST (newpat) == pc_rtx)
2834      *new_direct_jump_p = 1;
2835  }
2836
2837  combine_successes++;
2838  undo_commit ();
2839
2840  /* Clear this here, so that subsequent get_last_value calls are not
2841     affected.  */
2842  subst_prev_insn = NULL_RTX;
2843
2844  if (added_links_insn
2845      && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2846      && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2847    return added_links_insn;
2848  else
2849    return newi2pat ? i2 : i3;
2850}
2851
2852/* Undo all the modifications recorded in undobuf.  */
2853
2854static void
2855undo_all ()
2856{
2857  struct undo *undo, *next;
2858
2859  for (undo = undobuf.undos; undo; undo = next)
2860    {
2861      next = undo->next;
2862      if (undo->is_int)
2863	*undo->where.i = undo->old_contents.i;
2864      else
2865	*undo->where.r = undo->old_contents.r;
2866
2867      undo->next = undobuf.frees;
2868      undobuf.frees = undo;
2869    }
2870
2871  undobuf.undos = 0;
2872
2873  /* Clear this here, so that subsequent get_last_value calls are not
2874     affected.  */
2875  subst_prev_insn = NULL_RTX;
2876}
2877
2878/* We've committed to accepting the changes we made.  Move all
2879   of the undos to the free list.  */
2880
2881static void
2882undo_commit ()
2883{
2884  struct undo *undo, *next;
2885
2886  for (undo = undobuf.undos; undo; undo = next)
2887    {
2888      next = undo->next;
2889      undo->next = undobuf.frees;
2890      undobuf.frees = undo;
2891    }
2892  undobuf.undos = 0;
2893}
2894
2895
2896/* Find the innermost point within the rtx at LOC, possibly LOC itself,
2897   where we have an arithmetic expression and return that point.  LOC will
2898   be inside INSN.
2899
2900   try_combine will call this function to see if an insn can be split into
2901   two insns.  */
2902
2903static rtx *
2904find_split_point (loc, insn)
2905     rtx *loc;
2906     rtx insn;
2907{
2908  rtx x = *loc;
2909  enum rtx_code code = GET_CODE (x);
2910  rtx *split;
2911  unsigned HOST_WIDE_INT len = 0;
2912  HOST_WIDE_INT pos = 0;
2913  int unsignedp = 0;
2914  rtx inner = NULL_RTX;
2915
2916  /* First special-case some codes.  */
2917  switch (code)
2918    {
2919    case SUBREG:
2920#ifdef INSN_SCHEDULING
2921      /* If we are making a paradoxical SUBREG invalid, it becomes a split
2922	 point.  */
2923      if (GET_CODE (SUBREG_REG (x)) == MEM)
2924	return loc;
2925#endif
2926      return find_split_point (&SUBREG_REG (x), insn);
2927
2928    case MEM:
2929#ifdef HAVE_lo_sum
2930      /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2931	 using LO_SUM and HIGH.  */
2932      if (GET_CODE (XEXP (x, 0)) == CONST
2933	  || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2934	{
2935	  SUBST (XEXP (x, 0),
2936		 gen_rtx_LO_SUM (Pmode,
2937				 gen_rtx_HIGH (Pmode, XEXP (x, 0)),
2938				 XEXP (x, 0)));
2939	  return &XEXP (XEXP (x, 0), 0);
2940	}
2941#endif
2942
2943      /* If we have a PLUS whose second operand is a constant and the
2944	 address is not valid, perhaps will can split it up using
2945	 the machine-specific way to split large constants.  We use
2946	 the first pseudo-reg (one of the virtual regs) as a placeholder;
2947	 it will not remain in the result.  */
2948      if (GET_CODE (XEXP (x, 0)) == PLUS
2949	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2950	  && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2951	{
2952	  rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2953	  rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
2954				 subst_insn);
2955
2956	  /* This should have produced two insns, each of which sets our
2957	     placeholder.  If the source of the second is a valid address,
2958	     we can make put both sources together and make a split point
2959	     in the middle.  */
2960
2961	  if (seq && XVECLEN (seq, 0) == 2
2962	      && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2963	      && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2964	      && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2965	      && ! reg_mentioned_p (reg,
2966				    SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2967	      && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2968	      && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2969	      && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2970	      && memory_address_p (GET_MODE (x),
2971				   SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2972	    {
2973	      rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2974	      rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2975
2976	      /* Replace the placeholder in SRC2 with SRC1.  If we can
2977		 find where in SRC2 it was placed, that can become our
2978		 split point and we can replace this address with SRC2.
2979		 Just try two obvious places.  */
2980
2981	      src2 = replace_rtx (src2, reg, src1);
2982	      split = 0;
2983	      if (XEXP (src2, 0) == src1)
2984		split = &XEXP (src2, 0);
2985	      else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2986		       && XEXP (XEXP (src2, 0), 0) == src1)
2987		split = &XEXP (XEXP (src2, 0), 0);
2988
2989	      if (split)
2990		{
2991		  SUBST (XEXP (x, 0), src2);
2992		  return split;
2993		}
2994	    }
2995
2996	  /* If that didn't work, perhaps the first operand is complex and
2997	     needs to be computed separately, so make a split point there.
2998	     This will occur on machines that just support REG + CONST
2999	     and have a constant moved through some previous computation.  */
3000
3001	  else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
3002		   && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
3003			 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
3004			     == 'o')))
3005	    return &XEXP (XEXP (x, 0), 0);
3006	}
3007      break;
3008
3009    case SET:
3010#ifdef HAVE_cc0
3011      /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
3012	 ZERO_EXTRACT, the most likely reason why this doesn't match is that
3013	 we need to put the operand into a register.  So split at that
3014	 point.  */
3015
3016      if (SET_DEST (x) == cc0_rtx
3017	  && GET_CODE (SET_SRC (x)) != COMPARE
3018	  && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
3019	  && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
3020	  && ! (GET_CODE (SET_SRC (x)) == SUBREG
3021		&& GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
3022	return &SET_SRC (x);
3023#endif
3024
3025      /* See if we can split SET_SRC as it stands.  */
3026      split = find_split_point (&SET_SRC (x), insn);
3027      if (split && split != &SET_SRC (x))
3028	return split;
3029
3030      /* See if we can split SET_DEST as it stands.  */
3031      split = find_split_point (&SET_DEST (x), insn);
3032      if (split && split != &SET_DEST (x))
3033	return split;
3034
3035      /* See if this is a bitfield assignment with everything constant.  If
3036	 so, this is an IOR of an AND, so split it into that.  */
3037      if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
3038	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
3039	      <= HOST_BITS_PER_WIDE_INT)
3040	  && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
3041	  && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
3042	  && GET_CODE (SET_SRC (x)) == CONST_INT
3043	  && ((INTVAL (XEXP (SET_DEST (x), 1))
3044	       + INTVAL (XEXP (SET_DEST (x), 2)))
3045	      <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
3046	  && ! side_effects_p (XEXP (SET_DEST (x), 0)))
3047	{
3048	  HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
3049	  unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
3050	  unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
3051	  rtx dest = XEXP (SET_DEST (x), 0);
3052	  enum machine_mode mode = GET_MODE (dest);
3053	  unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
3054
3055	  if (BITS_BIG_ENDIAN)
3056	    pos = GET_MODE_BITSIZE (mode) - len - pos;
3057
3058	  if (src == mask)
3059	    SUBST (SET_SRC (x),
3060		   gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
3061	  else
3062	    SUBST (SET_SRC (x),
3063		   gen_binary (IOR, mode,
3064			       gen_binary (AND, mode, dest,
3065					   GEN_INT
3066					   (
3067					    trunc_int_for_mode
3068					    (~(mask << pos)
3069					     & GET_MODE_MASK (mode), mode))),
3070			       GEN_INT (src << pos)));
3071
3072	  SUBST (SET_DEST (x), dest);
3073
3074	  split = find_split_point (&SET_SRC (x), insn);
3075	  if (split && split != &SET_SRC (x))
3076	    return split;
3077	}
3078
3079      /* Otherwise, see if this is an operation that we can split into two.
3080	 If so, try to split that.  */
3081      code = GET_CODE (SET_SRC (x));
3082
3083      switch (code)
3084	{
3085	case AND:
3086	  /* If we are AND'ing with a large constant that is only a single
3087	     bit and the result is only being used in a context where we
3088	     need to know if it is zero or non-zero, replace it with a bit
3089	     extraction.  This will avoid the large constant, which might
3090	     have taken more than one insn to make.  If the constant were
3091	     not a valid argument to the AND but took only one insn to make,
3092	     this is no worse, but if it took more than one insn, it will
3093	     be better.  */
3094
3095	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3096	      && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
3097	      && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3098	      && GET_CODE (SET_DEST (x)) == REG
3099	      && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
3100	      && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3101	      && XEXP (*split, 0) == SET_DEST (x)
3102	      && XEXP (*split, 1) == const0_rtx)
3103	    {
3104	      rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3105						XEXP (SET_SRC (x), 0),
3106						pos, NULL_RTX, 1, 1, 0, 0);
3107	      if (extraction != 0)
3108		{
3109		  SUBST (SET_SRC (x), extraction);
3110		  return find_split_point (loc, insn);
3111		}
3112	    }
3113	  break;
3114
3115	case NE:
3116	  /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3117	     is known to be on, this can be converted into a NEG of a shift.  */
3118	  if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3119	      && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
3120	      && 1 <= (pos = exact_log2
3121		       (nonzero_bits (XEXP (SET_SRC (x), 0),
3122				      GET_MODE (XEXP (SET_SRC (x), 0))))))
3123	    {
3124	      enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3125
3126	      SUBST (SET_SRC (x),
3127		     gen_rtx_NEG (mode,
3128				  gen_rtx_LSHIFTRT (mode,
3129						    XEXP (SET_SRC (x), 0),
3130						    GEN_INT (pos))));
3131
3132	      split = find_split_point (&SET_SRC (x), insn);
3133	      if (split && split != &SET_SRC (x))
3134		return split;
3135	    }
3136	  break;
3137
3138	case SIGN_EXTEND:
3139	  inner = XEXP (SET_SRC (x), 0);
3140
3141	  /* We can't optimize if either mode is a partial integer
3142	     mode as we don't know how many bits are significant
3143	     in those modes.  */
3144	  if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3145	      || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3146	    break;
3147
3148	  pos = 0;
3149	  len = GET_MODE_BITSIZE (GET_MODE (inner));
3150	  unsignedp = 0;
3151	  break;
3152
3153	case SIGN_EXTRACT:
3154	case ZERO_EXTRACT:
3155	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3156	      && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3157	    {
3158	      inner = XEXP (SET_SRC (x), 0);
3159	      len = INTVAL (XEXP (SET_SRC (x), 1));
3160	      pos = INTVAL (XEXP (SET_SRC (x), 2));
3161
3162	      if (BITS_BIG_ENDIAN)
3163		pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
3164	      unsignedp = (code == ZERO_EXTRACT);
3165	    }
3166	  break;
3167
3168	default:
3169	  break;
3170	}
3171
3172      if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3173	{
3174	  enum machine_mode mode = GET_MODE (SET_SRC (x));
3175
3176	  /* For unsigned, we have a choice of a shift followed by an
3177	     AND or two shifts.  Use two shifts for field sizes where the
3178	     constant might be too large.  We assume here that we can
3179	     always at least get 8-bit constants in an AND insn, which is
3180	     true for every current RISC.  */
3181
3182	  if (unsignedp && len <= 8)
3183	    {
3184	      SUBST (SET_SRC (x),
3185		     gen_rtx_AND (mode,
3186				  gen_rtx_LSHIFTRT
3187				  (mode, gen_lowpart_for_combine (mode, inner),
3188				   GEN_INT (pos)),
3189				  GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
3190
3191	      split = find_split_point (&SET_SRC (x), insn);
3192	      if (split && split != &SET_SRC (x))
3193		return split;
3194	    }
3195	  else
3196	    {
3197	      SUBST (SET_SRC (x),
3198		     gen_rtx_fmt_ee
3199		     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
3200		      gen_rtx_ASHIFT (mode,
3201				      gen_lowpart_for_combine (mode, inner),
3202				      GEN_INT (GET_MODE_BITSIZE (mode)
3203					       - len - pos)),
3204		      GEN_INT (GET_MODE_BITSIZE (mode) - len)));
3205
3206	      split = find_split_point (&SET_SRC (x), insn);
3207	      if (split && split != &SET_SRC (x))
3208		return split;
3209	    }
3210	}
3211
3212      /* See if this is a simple operation with a constant as the second
3213	 operand.  It might be that this constant is out of range and hence
3214	 could be used as a split point.  */
3215      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3216	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3217	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
3218	  && CONSTANT_P (XEXP (SET_SRC (x), 1))
3219	  && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
3220	      || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3221		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
3222		      == 'o'))))
3223	return &XEXP (SET_SRC (x), 1);
3224
3225      /* Finally, see if this is a simple operation with its first operand
3226	 not in a register.  The operation might require this operand in a
3227	 register, so return it as a split point.  We can always do this
3228	 because if the first operand were another operation, we would have
3229	 already found it as a split point.  */
3230      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3231	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3232	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
3233	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
3234	  && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3235	return &XEXP (SET_SRC (x), 0);
3236
3237      return 0;
3238
3239    case AND:
3240    case IOR:
3241      /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3242	 it is better to write this as (not (ior A B)) so we can split it.
3243	 Similarly for IOR.  */
3244      if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3245	{
3246	  SUBST (*loc,
3247		 gen_rtx_NOT (GET_MODE (x),
3248			      gen_rtx_fmt_ee (code == IOR ? AND : IOR,
3249					      GET_MODE (x),
3250					      XEXP (XEXP (x, 0), 0),
3251					      XEXP (XEXP (x, 1), 0))));
3252	  return find_split_point (loc, insn);
3253	}
3254
3255      /* Many RISC machines have a large set of logical insns.  If the
3256	 second operand is a NOT, put it first so we will try to split the
3257	 other operand first.  */
3258      if (GET_CODE (XEXP (x, 1)) == NOT)
3259	{
3260	  rtx tem = XEXP (x, 0);
3261	  SUBST (XEXP (x, 0), XEXP (x, 1));
3262	  SUBST (XEXP (x, 1), tem);
3263	}
3264      break;
3265
3266    default:
3267      break;
3268    }
3269
3270  /* Otherwise, select our actions depending on our rtx class.  */
3271  switch (GET_RTX_CLASS (code))
3272    {
3273    case 'b':			/* This is ZERO_EXTRACT and SIGN_EXTRACT.  */
3274    case '3':
3275      split = find_split_point (&XEXP (x, 2), insn);
3276      if (split)
3277	return split;
3278      /* ... fall through ...  */
3279    case '2':
3280    case 'c':
3281    case '<':
3282      split = find_split_point (&XEXP (x, 1), insn);
3283      if (split)
3284	return split;
3285      /* ... fall through ...  */
3286    case '1':
3287      /* Some machines have (and (shift ...) ...) insns.  If X is not
3288	 an AND, but XEXP (X, 0) is, use it as our split point.  */
3289      if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3290	return &XEXP (x, 0);
3291
3292      split = find_split_point (&XEXP (x, 0), insn);
3293      if (split)
3294	return split;
3295      return loc;
3296    }
3297
3298  /* Otherwise, we don't have a split point.  */
3299  return 0;
3300}
3301
3302/* Throughout X, replace FROM with TO, and return the result.
3303   The result is TO if X is FROM;
3304   otherwise the result is X, but its contents may have been modified.
3305   If they were modified, a record was made in undobuf so that
3306   undo_all will (among other things) return X to its original state.
3307
3308   If the number of changes necessary is too much to record to undo,
3309   the excess changes are not made, so the result is invalid.
3310   The changes already made can still be undone.
3311   undobuf.num_undo is incremented for such changes, so by testing that
3312   the caller can tell whether the result is valid.
3313
3314   `n_occurrences' is incremented each time FROM is replaced.
3315
3316   IN_DEST is non-zero if we are processing the SET_DEST of a SET.
3317
3318   UNIQUE_COPY is non-zero if each substitution must be unique.  We do this
3319   by copying if `n_occurrences' is non-zero.  */
3320
3321static rtx
3322subst (x, from, to, in_dest, unique_copy)
3323     rtx x, from, to;
3324     int in_dest;
3325     int unique_copy;
3326{
3327  enum rtx_code code = GET_CODE (x);
3328  enum machine_mode op0_mode = VOIDmode;
3329  const char *fmt;
3330  int len, i;
3331  rtx new;
3332
3333/* Two expressions are equal if they are identical copies of a shared
3334   RTX or if they are both registers with the same register number
3335   and mode.  */
3336
3337#define COMBINE_RTX_EQUAL_P(X,Y)			\
3338  ((X) == (Y)						\
3339   || (GET_CODE (X) == REG && GET_CODE (Y) == REG	\
3340       && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3341
3342  if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3343    {
3344      n_occurrences++;
3345      return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3346    }
3347
3348  /* If X and FROM are the same register but different modes, they will
3349     not have been seen as equal above.  However, flow.c will make a
3350     LOG_LINKS entry for that case.  If we do nothing, we will try to
3351     rerecognize our original insn and, when it succeeds, we will
3352     delete the feeding insn, which is incorrect.
3353
3354     So force this insn not to match in this (rare) case.  */
3355  if (! in_dest && code == REG && GET_CODE (from) == REG
3356      && REGNO (x) == REGNO (from))
3357    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
3358
3359  /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3360     of which may contain things that can be combined.  */
3361  if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3362    return x;
3363
3364  /* It is possible to have a subexpression appear twice in the insn.
3365     Suppose that FROM is a register that appears within TO.
3366     Then, after that subexpression has been scanned once by `subst',
3367     the second time it is scanned, TO may be found.  If we were
3368     to scan TO here, we would find FROM within it and create a
3369     self-referent rtl structure which is completely wrong.  */
3370  if (COMBINE_RTX_EQUAL_P (x, to))
3371    return to;
3372
3373  /* Parallel asm_operands need special attention because all of the
3374     inputs are shared across the arms.  Furthermore, unsharing the
3375     rtl results in recognition failures.  Failure to handle this case
3376     specially can result in circular rtl.
3377
3378     Solve this by doing a normal pass across the first entry of the
3379     parallel, and only processing the SET_DESTs of the subsequent
3380     entries.  Ug.  */
3381
3382  if (code == PARALLEL
3383      && GET_CODE (XVECEXP (x, 0, 0)) == SET
3384      && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
3385    {
3386      new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3387
3388      /* If this substitution failed, this whole thing fails.  */
3389      if (GET_CODE (new) == CLOBBER
3390	  && XEXP (new, 0) == const0_rtx)
3391	return new;
3392
3393      SUBST (XVECEXP (x, 0, 0), new);
3394
3395      for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
3396	{
3397	  rtx dest = SET_DEST (XVECEXP (x, 0, i));
3398
3399	  if (GET_CODE (dest) != REG
3400	      && GET_CODE (dest) != CC0
3401	      && GET_CODE (dest) != PC)
3402	    {
3403	      new = subst (dest, from, to, 0, unique_copy);
3404
3405	      /* If this substitution failed, this whole thing fails.  */
3406	      if (GET_CODE (new) == CLOBBER
3407		  && XEXP (new, 0) == const0_rtx)
3408		return new;
3409
3410	      SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
3411	    }
3412	}
3413    }
3414  else
3415    {
3416      len = GET_RTX_LENGTH (code);
3417      fmt = GET_RTX_FORMAT (code);
3418
3419      /* We don't need to process a SET_DEST that is a register, CC0,
3420	 or PC, so set up to skip this common case.  All other cases
3421	 where we want to suppress replacing something inside a
3422	 SET_SRC are handled via the IN_DEST operand.  */
3423      if (code == SET
3424	  && (GET_CODE (SET_DEST (x)) == REG
3425	      || GET_CODE (SET_DEST (x)) == CC0
3426	      || GET_CODE (SET_DEST (x)) == PC))
3427	fmt = "ie";
3428
3429      /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3430	 constant.  */
3431      if (fmt[0] == 'e')
3432	op0_mode = GET_MODE (XEXP (x, 0));
3433
3434      for (i = 0; i < len; i++)
3435	{
3436	  if (fmt[i] == 'E')
3437	    {
3438	      int j;
3439	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3440		{
3441		  if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3442		    {
3443		      new = (unique_copy && n_occurrences
3444			     ? copy_rtx (to) : to);
3445		      n_occurrences++;
3446		    }
3447		  else
3448		    {
3449		      new = subst (XVECEXP (x, i, j), from, to, 0,
3450				   unique_copy);
3451
3452		      /* If this substitution failed, this whole thing
3453			 fails.  */
3454		      if (GET_CODE (new) == CLOBBER
3455			  && XEXP (new, 0) == const0_rtx)
3456			return new;
3457		    }
3458
3459		  SUBST (XVECEXP (x, i, j), new);
3460		}
3461	    }
3462	  else if (fmt[i] == 'e')
3463	    {
3464	      /* If this is a register being set, ignore it.  */
3465	      new = XEXP (x, i);
3466	      if (in_dest
3467		  && (code == SUBREG || code == STRICT_LOW_PART
3468		      || code == ZERO_EXTRACT)
3469		  && i == 0
3470		  && GET_CODE (new) == REG)
3471		;
3472
3473	      else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3474		{
3475		  /* In general, don't install a subreg involving two
3476		     modes not tieable.  It can worsen register
3477		     allocation, and can even make invalid reload
3478		     insns, since the reg inside may need to be copied
3479		     from in the outside mode, and that may be invalid
3480		     if it is an fp reg copied in integer mode.
3481
3482		     We allow two exceptions to this: It is valid if
3483		     it is inside another SUBREG and the mode of that
3484		     SUBREG and the mode of the inside of TO is
3485		     tieable and it is valid if X is a SET that copies
3486		     FROM to CC0.  */
3487
3488		  if (GET_CODE (to) == SUBREG
3489		      && ! MODES_TIEABLE_P (GET_MODE (to),
3490					    GET_MODE (SUBREG_REG (to)))
3491		      && ! (code == SUBREG
3492			    && MODES_TIEABLE_P (GET_MODE (x),
3493						GET_MODE (SUBREG_REG (to))))
3494#ifdef HAVE_cc0
3495		      && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3496#endif
3497		      )
3498		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3499
3500#ifdef CLASS_CANNOT_CHANGE_MODE
3501		  if (code == SUBREG
3502		      && GET_CODE (to) == REG
3503		      && REGNO (to) < FIRST_PSEUDO_REGISTER
3504		      && (TEST_HARD_REG_BIT
3505			  (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
3506			   REGNO (to)))
3507		      && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (to),
3508						     GET_MODE (x)))
3509		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3510#endif
3511
3512		  new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3513		  n_occurrences++;
3514		}
3515	      else
3516		/* If we are in a SET_DEST, suppress most cases unless we
3517		   have gone inside a MEM, in which case we want to
3518		   simplify the address.  We assume here that things that
3519		   are actually part of the destination have their inner
3520		   parts in the first expression.  This is true for SUBREG,
3521		   STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3522		   things aside from REG and MEM that should appear in a
3523		   SET_DEST.  */
3524		new = subst (XEXP (x, i), from, to,
3525			     (((in_dest
3526				&& (code == SUBREG || code == STRICT_LOW_PART
3527				    || code == ZERO_EXTRACT))
3528			       || code == SET)
3529			      && i == 0), unique_copy);
3530
3531	      /* If we found that we will have to reject this combination,
3532		 indicate that by returning the CLOBBER ourselves, rather than
3533		 an expression containing it.  This will speed things up as
3534		 well as prevent accidents where two CLOBBERs are considered
3535		 to be equal, thus producing an incorrect simplification.  */
3536
3537	      if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3538		return new;
3539
3540	      if (GET_CODE (new) == CONST_INT && GET_CODE (x) == SUBREG)
3541		{
3542		  enum machine_mode mode = GET_MODE (x);
3543		  x = simplify_subreg (mode, new,
3544				       GET_MODE (SUBREG_REG (x)),
3545				       SUBREG_BYTE (x));
3546		  if (! x)
3547		    x = gen_rtx_CLOBBER (mode, const0_rtx);
3548		}
3549	      else if (GET_CODE (new) == CONST_INT
3550		       && GET_CODE (x) == ZERO_EXTEND)
3551		{
3552		  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
3553						new, GET_MODE (XEXP (x, 0)));
3554		  if (! x)
3555		    abort ();
3556		}
3557	      else
3558		SUBST (XEXP (x, i), new);
3559	    }
3560	}
3561    }
3562
3563  /* Try to simplify X.  If the simplification changed the code, it is likely
3564     that further simplification will help, so loop, but limit the number
3565     of repetitions that will be performed.  */
3566
3567  for (i = 0; i < 4; i++)
3568    {
3569      /* If X is sufficiently simple, don't bother trying to do anything
3570	 with it.  */
3571      if (code != CONST_INT && code != REG && code != CLOBBER)
3572	x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest);
3573
3574      if (GET_CODE (x) == code)
3575	break;
3576
3577      code = GET_CODE (x);
3578
3579      /* We no longer know the original mode of operand 0 since we
3580	 have changed the form of X)  */
3581      op0_mode = VOIDmode;
3582    }
3583
3584  return x;
3585}
3586
3587/* Simplify X, a piece of RTL.  We just operate on the expression at the
3588   outer level; call `subst' to simplify recursively.  Return the new
3589   expression.
3590
3591   OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3592   will be the iteration even if an expression with a code different from
3593   X is returned; IN_DEST is nonzero if we are inside a SET_DEST.  */
3594
3595static rtx
3596combine_simplify_rtx (x, op0_mode, last, in_dest)
3597     rtx x;
3598     enum machine_mode op0_mode;
3599     int last;
3600     int in_dest;
3601{
3602  enum rtx_code code = GET_CODE (x);
3603  enum machine_mode mode = GET_MODE (x);
3604  rtx temp;
3605  rtx reversed;
3606  int i;
3607
3608  /* If this is a commutative operation, put a constant last and a complex
3609     expression first.  We don't need to do this for comparisons here.  */
3610  if (GET_RTX_CLASS (code) == 'c'
3611      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
3612    {
3613      temp = XEXP (x, 0);
3614      SUBST (XEXP (x, 0), XEXP (x, 1));
3615      SUBST (XEXP (x, 1), temp);
3616    }
3617
3618  /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3619     sign extension of a PLUS with a constant, reverse the order of the sign
3620     extension and the addition. Note that this not the same as the original
3621     code, but overflow is undefined for signed values.  Also note that the
3622     PLUS will have been partially moved "inside" the sign-extension, so that
3623     the first operand of X will really look like:
3624         (ashiftrt (plus (ashift A C4) C5) C4).
3625     We convert this to
3626         (plus (ashiftrt (ashift A C4) C2) C4)
3627     and replace the first operand of X with that expression.  Later parts
3628     of this function may simplify the expression further.
3629
3630     For example, if we start with (mult (sign_extend (plus A C1)) C2),
3631     we swap the SIGN_EXTEND and PLUS.  Later code will apply the
3632     distributive law to produce (plus (mult (sign_extend X) C1) C3).
3633
3634     We do this to simplify address expressions.  */
3635
3636  if ((code == PLUS || code == MINUS || code == MULT)
3637      && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3638      && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3639      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3640      && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3641      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3642      && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3643      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3644      && (temp = simplify_binary_operation (ASHIFTRT, mode,
3645					    XEXP (XEXP (XEXP (x, 0), 0), 1),
3646					    XEXP (XEXP (x, 0), 1))) != 0)
3647    {
3648      rtx new
3649	= simplify_shift_const (NULL_RTX, ASHIFT, mode,
3650				XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3651				INTVAL (XEXP (XEXP (x, 0), 1)));
3652
3653      new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3654				  INTVAL (XEXP (XEXP (x, 0), 1)));
3655
3656      SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3657    }
3658
3659  /* If this is a simple operation applied to an IF_THEN_ELSE, try
3660     applying it to the arms of the IF_THEN_ELSE.  This often simplifies
3661     things.  Check for cases where both arms are testing the same
3662     condition.
3663
3664     Don't do anything if all operands are very simple.  */
3665
3666  if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3667	|| GET_RTX_CLASS (code) == '<')
3668       && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3669	    && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3670		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3671		      == 'o')))
3672	   || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3673	       && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3674		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3675			 == 'o')))))
3676      || (GET_RTX_CLASS (code) == '1'
3677	  && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3678	       && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3679		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3680			 == 'o'))))))
3681    {
3682      rtx cond, true_rtx, false_rtx;
3683
3684      cond = if_then_else_cond (x, &true_rtx, &false_rtx);
3685      if (cond != 0
3686	  /* If everything is a comparison, what we have is highly unlikely
3687	     to be simpler, so don't use it.  */
3688	  && ! (GET_RTX_CLASS (code) == '<'
3689		&& (GET_RTX_CLASS (GET_CODE (true_rtx)) == '<'
3690		    || GET_RTX_CLASS (GET_CODE (false_rtx)) == '<')))
3691	{
3692	  rtx cop1 = const0_rtx;
3693	  enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3694
3695	  if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3696	    return x;
3697
3698	  /* Simplify the alternative arms; this may collapse the true and
3699	     false arms to store-flag values.  */
3700	  true_rtx = subst (true_rtx, pc_rtx, pc_rtx, 0, 0);
3701	  false_rtx = subst (false_rtx, pc_rtx, pc_rtx, 0, 0);
3702
3703	  /* If true_rtx and false_rtx are not general_operands, an if_then_else
3704	     is unlikely to be simpler.  */
3705	  if (general_operand (true_rtx, VOIDmode)
3706	      && general_operand (false_rtx, VOIDmode))
3707	    {
3708	      /* Restarting if we generate a store-flag expression will cause
3709		 us to loop.  Just drop through in this case.  */
3710
3711	      /* If the result values are STORE_FLAG_VALUE and zero, we can
3712		 just make the comparison operation.  */
3713	      if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
3714		x = gen_binary (cond_code, mode, cond, cop1);
3715	      else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
3716		       && reverse_condition (cond_code) != UNKNOWN)
3717		x = gen_binary (reverse_condition (cond_code),
3718				mode, cond, cop1);
3719
3720	      /* Likewise, we can make the negate of a comparison operation
3721		 if the result values are - STORE_FLAG_VALUE and zero.  */
3722	      else if (GET_CODE (true_rtx) == CONST_INT
3723		       && INTVAL (true_rtx) == - STORE_FLAG_VALUE
3724		       && false_rtx == const0_rtx)
3725		x = simplify_gen_unary (NEG, mode,
3726					gen_binary (cond_code, mode, cond,
3727						    cop1),
3728					mode);
3729	      else if (GET_CODE (false_rtx) == CONST_INT
3730		       && INTVAL (false_rtx) == - STORE_FLAG_VALUE
3731		       && true_rtx == const0_rtx)
3732		x = simplify_gen_unary (NEG, mode,
3733					gen_binary (reverse_condition
3734						    (cond_code),
3735						    mode, cond, cop1),
3736					mode);
3737	      else
3738		return gen_rtx_IF_THEN_ELSE (mode,
3739					     gen_binary (cond_code, VOIDmode,
3740							 cond, cop1),
3741					     true_rtx, false_rtx);
3742
3743	      code = GET_CODE (x);
3744	      op0_mode = VOIDmode;
3745	    }
3746	}
3747    }
3748
3749  /* Try to fold this expression in case we have constants that weren't
3750     present before.  */
3751  temp = 0;
3752  switch (GET_RTX_CLASS (code))
3753    {
3754    case '1':
3755      temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3756      break;
3757    case '<':
3758      {
3759	enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
3760	if (cmp_mode == VOIDmode)
3761	  {
3762	    cmp_mode = GET_MODE (XEXP (x, 1));
3763	    if (cmp_mode == VOIDmode)
3764	      cmp_mode = op0_mode;
3765	  }
3766	temp = simplify_relational_operation (code, cmp_mode,
3767					      XEXP (x, 0), XEXP (x, 1));
3768      }
3769#ifdef FLOAT_STORE_FLAG_VALUE
3770      if (temp != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3771	{
3772	  if (temp == const0_rtx)
3773	    temp = CONST0_RTX (mode);
3774	  else
3775	    temp = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE (mode), mode);
3776	}
3777#endif
3778      break;
3779    case 'c':
3780    case '2':
3781      temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3782      break;
3783    case 'b':
3784    case '3':
3785      temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3786					 XEXP (x, 1), XEXP (x, 2));
3787      break;
3788    }
3789
3790  if (temp)
3791    {
3792      x = temp;
3793      code = GET_CODE (temp);
3794      op0_mode = VOIDmode;
3795      mode = GET_MODE (temp);
3796    }
3797
3798  /* First see if we can apply the inverse distributive law.  */
3799  if (code == PLUS || code == MINUS
3800      || code == AND || code == IOR || code == XOR)
3801    {
3802      x = apply_distributive_law (x);
3803      code = GET_CODE (x);
3804      op0_mode = VOIDmode;
3805    }
3806
3807  /* If CODE is an associative operation not otherwise handled, see if we
3808     can associate some operands.  This can win if they are constants or
3809     if they are logically related (i.e. (a & b) & a).  */
3810  if ((code == PLUS || code == MINUS || code == MULT || code == DIV
3811       || code == AND || code == IOR || code == XOR
3812       || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3813      && ((INTEGRAL_MODE_P (mode) && code != DIV)
3814	  || (flag_unsafe_math_optimizations && FLOAT_MODE_P (mode))))
3815    {
3816      if (GET_CODE (XEXP (x, 0)) == code)
3817	{
3818	  rtx other = XEXP (XEXP (x, 0), 0);
3819	  rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3820	  rtx inner_op1 = XEXP (x, 1);
3821	  rtx inner;
3822
3823	  /* Make sure we pass the constant operand if any as the second
3824	     one if this is a commutative operation.  */
3825	  if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3826	    {
3827	      rtx tem = inner_op0;
3828	      inner_op0 = inner_op1;
3829	      inner_op1 = tem;
3830	    }
3831	  inner = simplify_binary_operation (code == MINUS ? PLUS
3832					     : code == DIV ? MULT
3833					     : code,
3834					     mode, inner_op0, inner_op1);
3835
3836	  /* For commutative operations, try the other pair if that one
3837	     didn't simplify.  */
3838	  if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3839	    {
3840	      other = XEXP (XEXP (x, 0), 1);
3841	      inner = simplify_binary_operation (code, mode,
3842						 XEXP (XEXP (x, 0), 0),
3843						 XEXP (x, 1));
3844	    }
3845
3846	  if (inner)
3847	    return gen_binary (code, mode, other, inner);
3848	}
3849    }
3850
3851  /* A little bit of algebraic simplification here.  */
3852  switch (code)
3853    {
3854    case MEM:
3855      /* Ensure that our address has any ASHIFTs converted to MULT in case
3856	 address-recognizing predicates are called later.  */
3857      temp = make_compound_operation (XEXP (x, 0), MEM);
3858      SUBST (XEXP (x, 0), temp);
3859      break;
3860
3861    case SUBREG:
3862      if (op0_mode == VOIDmode)
3863	op0_mode = GET_MODE (SUBREG_REG (x));
3864
3865      /* simplify_subreg can't use gen_lowpart_for_combine.  */
3866      if (CONSTANT_P (SUBREG_REG (x))
3867	  && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x))
3868	return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3869
3870      if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
3871        break;
3872      {
3873	rtx temp;
3874	temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
3875				SUBREG_BYTE (x));
3876	if (temp)
3877	  return temp;
3878      }
3879
3880      /* Don't change the mode of the MEM if that would change the meaning
3881	 of the address.  */
3882      if (GET_CODE (SUBREG_REG (x)) == MEM
3883	  && (MEM_VOLATILE_P (SUBREG_REG (x))
3884	      || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
3885	return gen_rtx_CLOBBER (mode, const0_rtx);
3886
3887      /* Note that we cannot do any narrowing for non-constants since
3888	 we might have been counting on using the fact that some bits were
3889	 zero.  We now do this in the SET.  */
3890
3891      break;
3892
3893    case NOT:
3894      /* (not (plus X -1)) can become (neg X).  */
3895      if (GET_CODE (XEXP (x, 0)) == PLUS
3896	  && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3897	return gen_rtx_NEG (mode, XEXP (XEXP (x, 0), 0));
3898
3899      /* Similarly, (not (neg X)) is (plus X -1).  */
3900      if (GET_CODE (XEXP (x, 0)) == NEG)
3901	return gen_rtx_PLUS (mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3902
3903      /* (not (xor X C)) for C constant is (xor X D) with D = ~C.  */
3904      if (GET_CODE (XEXP (x, 0)) == XOR
3905	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3906	  && (temp = simplify_unary_operation (NOT, mode,
3907					       XEXP (XEXP (x, 0), 1),
3908					       mode)) != 0)
3909	return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
3910
3911      /* (not (ashift 1 X)) is (rotate ~1 X).  We used to do this for operands
3912	 other than 1, but that is not valid.  We could do a similar
3913	 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3914	 but this doesn't seem common enough to bother with.  */
3915      if (GET_CODE (XEXP (x, 0)) == ASHIFT
3916	  && XEXP (XEXP (x, 0), 0) == const1_rtx)
3917	return gen_rtx_ROTATE (mode, simplify_gen_unary (NOT, mode,
3918							 const1_rtx, mode),
3919			       XEXP (XEXP (x, 0), 1));
3920
3921      if (GET_CODE (XEXP (x, 0)) == SUBREG
3922	  && subreg_lowpart_p (XEXP (x, 0))
3923	  && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3924	      < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3925	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3926	  && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3927	{
3928	  enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3929
3930	  x = gen_rtx_ROTATE (inner_mode,
3931			      simplify_gen_unary (NOT, inner_mode, const1_rtx,
3932						  inner_mode),
3933			      XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3934	  return gen_lowpart_for_combine (mode, x);
3935	}
3936
3937      /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3938	 reversing the comparison code if valid.  */
3939      if (STORE_FLAG_VALUE == -1
3940	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3941	  && (reversed = reversed_comparison (x, mode, XEXP (XEXP (x, 0), 0),
3942					      XEXP (XEXP (x, 0), 1))))
3943	return reversed;
3944
3945      /* (not (ashiftrt foo C)) where C is the number of bits in FOO minus 1
3946	 is (ge foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3947	 perform the above simplification.  */
3948
3949      if (STORE_FLAG_VALUE == -1
3950	  && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3951	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3952	  && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3953	return gen_rtx_GE (mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3954
3955      /* Apply De Morgan's laws to reduce number of patterns for machines
3956	 with negating logical insns (and-not, nand, etc.).  If result has
3957	 only one NOT, put it first, since that is how the patterns are
3958	 coded.  */
3959
3960      if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3961	{
3962	  rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3963	  enum machine_mode op_mode;
3964
3965	  op_mode = GET_MODE (in1);
3966	  in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
3967
3968	  op_mode = GET_MODE (in2);
3969	  if (op_mode == VOIDmode)
3970	    op_mode = mode;
3971	  in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
3972
3973	  if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
3974	    {
3975	      rtx tem = in2;
3976	      in2 = in1; in1 = tem;
3977	    }
3978
3979	  return gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3980				 mode, in1, in2);
3981	}
3982      break;
3983
3984    case NEG:
3985      /* (neg (plus X 1)) can become (not X).  */
3986      if (GET_CODE (XEXP (x, 0)) == PLUS
3987	  && XEXP (XEXP (x, 0), 1) == const1_rtx)
3988	return gen_rtx_NOT (mode, XEXP (XEXP (x, 0), 0));
3989
3990      /* Similarly, (neg (not X)) is (plus X 1).  */
3991      if (GET_CODE (XEXP (x, 0)) == NOT)
3992	return plus_constant (XEXP (XEXP (x, 0), 0), 1);
3993
3994      /* (neg (minus X Y)) can become (minus Y X).  */
3995      if (GET_CODE (XEXP (x, 0)) == MINUS
3996	  && (! FLOAT_MODE_P (mode)
3997	      /* x-y != -(y-x) with IEEE floating point.  */
3998	      || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3999	      || flag_unsafe_math_optimizations))
4000	return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
4001			   XEXP (XEXP (x, 0), 0));
4002
4003      /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1.  */
4004      if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
4005	  && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
4006	return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
4007
4008      /* NEG commutes with ASHIFT since it is multiplication.  Only do this
4009	 if we can then eliminate the NEG (e.g.,
4010	 if the operand is a constant).  */
4011
4012      if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4013	{
4014	  temp = simplify_unary_operation (NEG, mode,
4015					   XEXP (XEXP (x, 0), 0), mode);
4016	  if (temp)
4017	    return gen_binary (ASHIFT, mode, temp, XEXP (XEXP (x, 0), 1));
4018	}
4019
4020      temp = expand_compound_operation (XEXP (x, 0));
4021
4022      /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
4023	 replaced by (lshiftrt X C).  This will convert
4024	 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y).  */
4025
4026      if (GET_CODE (temp) == ASHIFTRT
4027	  && GET_CODE (XEXP (temp, 1)) == CONST_INT
4028	  && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
4029	return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
4030				     INTVAL (XEXP (temp, 1)));
4031
4032      /* If X has only a single bit that might be nonzero, say, bit I, convert
4033	 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
4034	 MODE minus 1.  This will convert (neg (zero_extract X 1 Y)) to
4035	 (sign_extract X 1 Y).  But only do this if TEMP isn't a register
4036	 or a SUBREG of one since we'd be making the expression more
4037	 complex if it was just a register.  */
4038
4039      if (GET_CODE (temp) != REG
4040	  && ! (GET_CODE (temp) == SUBREG
4041		&& GET_CODE (SUBREG_REG (temp)) == REG)
4042	  && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
4043	{
4044	  rtx temp1 = simplify_shift_const
4045	    (NULL_RTX, ASHIFTRT, mode,
4046	     simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
4047				   GET_MODE_BITSIZE (mode) - 1 - i),
4048	     GET_MODE_BITSIZE (mode) - 1 - i);
4049
4050	  /* If all we did was surround TEMP with the two shifts, we
4051	     haven't improved anything, so don't use it.  Otherwise,
4052	     we are better off with TEMP1.  */
4053	  if (GET_CODE (temp1) != ASHIFTRT
4054	      || GET_CODE (XEXP (temp1, 0)) != ASHIFT
4055	      || XEXP (XEXP (temp1, 0), 0) != temp)
4056	    return temp1;
4057	}
4058      break;
4059
4060    case TRUNCATE:
4061      /* We can't handle truncation to a partial integer mode here
4062	 because we don't know the real bitsize of the partial
4063	 integer mode.  */
4064      if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
4065	break;
4066
4067      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4068	  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4069				    GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4070	SUBST (XEXP (x, 0),
4071	       force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
4072			      GET_MODE_MASK (mode), NULL_RTX, 0));
4073
4074      /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI.  */
4075      if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4076	   || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4077	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4078	return XEXP (XEXP (x, 0), 0);
4079
4080      /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
4081	 (OP:SI foo:SI) if OP is NEG or ABS.  */
4082      if ((GET_CODE (XEXP (x, 0)) == ABS
4083	   || GET_CODE (XEXP (x, 0)) == NEG)
4084	  && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
4085	      || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
4086	  && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4087	return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4088				   XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4089
4090      /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
4091	 (truncate:SI x).  */
4092      if (GET_CODE (XEXP (x, 0)) == SUBREG
4093	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
4094	  && subreg_lowpart_p (XEXP (x, 0)))
4095	return SUBREG_REG (XEXP (x, 0));
4096
4097      /* If we know that the value is already truncated, we can
4098         replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
4099         is nonzero for the corresponding modes.  But don't do this
4100         for an (LSHIFTRT (MULT ...)) since this will cause problems
4101         with the umulXi3_highpart patterns.  */
4102      if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4103				 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4104	  && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4105	     >= GET_MODE_BITSIZE (mode) + 1
4106	  && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4107		&& GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
4108	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4109
4110      /* A truncate of a comparison can be replaced with a subreg if
4111         STORE_FLAG_VALUE permits.  This is like the previous test,
4112         but it works even if the comparison is done in a mode larger
4113         than HOST_BITS_PER_WIDE_INT.  */
4114      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4115	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4116	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
4117	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4118
4119      /* Similarly, a truncate of a register whose value is a
4120         comparison can be replaced with a subreg if STORE_FLAG_VALUE
4121         permits.  */
4122      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4123	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
4124	  && (temp = get_last_value (XEXP (x, 0)))
4125	  && GET_RTX_CLASS (GET_CODE (temp)) == '<')
4126	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4127
4128      break;
4129
4130    case FLOAT_TRUNCATE:
4131      /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF.  */
4132      if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4133	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4134	return XEXP (XEXP (x, 0), 0);
4135
4136      /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4137	 (OP:SF foo:SF) if OP is NEG or ABS.  */
4138      if ((GET_CODE (XEXP (x, 0)) == ABS
4139	   || GET_CODE (XEXP (x, 0)) == NEG)
4140	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4141	  && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4142	return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4143				   XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4144
4145      /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4146	 is (float_truncate:SF x).  */
4147      if (GET_CODE (XEXP (x, 0)) == SUBREG
4148	  && subreg_lowpart_p (XEXP (x, 0))
4149	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4150	return SUBREG_REG (XEXP (x, 0));
4151      break;
4152
4153#ifdef HAVE_cc0
4154    case COMPARE:
4155      /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4156	 using cc0, in which case we want to leave it as a COMPARE
4157	 so we can distinguish it from a register-register-copy.  */
4158      if (XEXP (x, 1) == const0_rtx)
4159	return XEXP (x, 0);
4160
4161      /* In IEEE floating point, x-0 is not the same as x.  */
4162      if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4163	   || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
4164	   || flag_unsafe_math_optimizations)
4165	  && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4166	return XEXP (x, 0);
4167      break;
4168#endif
4169
4170    case CONST:
4171      /* (const (const X)) can become (const X).  Do it this way rather than
4172	 returning the inner CONST since CONST can be shared with a
4173	 REG_EQUAL note.  */
4174      if (GET_CODE (XEXP (x, 0)) == CONST)
4175	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4176      break;
4177
4178#ifdef HAVE_lo_sum
4179    case LO_SUM:
4180      /* Convert (lo_sum (high FOO) FOO) to FOO.  This is necessary so we
4181	 can add in an offset.  find_split_point will split this address up
4182	 again if it doesn't match.  */
4183      if (GET_CODE (XEXP (x, 0)) == HIGH
4184	  && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4185	return XEXP (x, 1);
4186      break;
4187#endif
4188
4189    case PLUS:
4190      /* If we have (plus (plus (A const) B)), associate it so that CONST is
4191	 outermost.  That's because that's the way indexed addresses are
4192	 supposed to appear.  This code used to check many more cases, but
4193	 they are now checked elsewhere.  */
4194      if (GET_CODE (XEXP (x, 0)) == PLUS
4195	  && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4196	return gen_binary (PLUS, mode,
4197			   gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4198				       XEXP (x, 1)),
4199			   XEXP (XEXP (x, 0), 1));
4200
4201      /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4202	 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4203	 bit-field and can be replaced by either a sign_extend or a
4204	 sign_extract.  The `and' may be a zero_extend and the two
4205	 <c>, -<c> constants may be reversed.  */
4206      if (GET_CODE (XEXP (x, 0)) == XOR
4207	  && GET_CODE (XEXP (x, 1)) == CONST_INT
4208	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4209	  && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
4210	  && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4211	      || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4212	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4213	  && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4214	       && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4215	       && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4216		   == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
4217	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4218		  && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
4219		      == (unsigned int) i + 1))))
4220	return simplify_shift_const
4221	  (NULL_RTX, ASHIFTRT, mode,
4222	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
4223				 XEXP (XEXP (XEXP (x, 0), 0), 0),
4224				 GET_MODE_BITSIZE (mode) - (i + 1)),
4225	   GET_MODE_BITSIZE (mode) - (i + 1));
4226
4227      /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4228	 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4229	 is 1.  This produces better code than the alternative immediately
4230	 below.  */
4231      if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4232	  && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
4233	      || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))
4234	  && (reversed = reversed_comparison (XEXP (x, 0), mode,
4235					      XEXP (XEXP (x, 0), 0),
4236					      XEXP (XEXP (x, 0), 1))))
4237	return
4238	  simplify_gen_unary (NEG, mode, reversed, mode);
4239
4240      /* If only the low-order bit of X is possibly nonzero, (plus x -1)
4241	 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4242	 the bitsize of the mode - 1.  This allows simplification of
4243	 "a = (b & 8) == 0;"  */
4244      if (XEXP (x, 1) == constm1_rtx
4245	  && GET_CODE (XEXP (x, 0)) != REG
4246	  && ! (GET_CODE (XEXP (x,0)) == SUBREG
4247		&& GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
4248	  && nonzero_bits (XEXP (x, 0), mode) == 1)
4249	return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4250	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
4251				 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
4252				 GET_MODE_BITSIZE (mode) - 1),
4253	   GET_MODE_BITSIZE (mode) - 1);
4254
4255      /* If we are adding two things that have no bits in common, convert
4256	 the addition into an IOR.  This will often be further simplified,
4257	 for example in cases like ((a & 1) + (a & 2)), which can
4258	 become a & 3.  */
4259
4260      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4261	  && (nonzero_bits (XEXP (x, 0), mode)
4262	      & nonzero_bits (XEXP (x, 1), mode)) == 0)
4263	{
4264	  /* Try to simplify the expression further.  */
4265	  rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4266	  temp = combine_simplify_rtx (tor, mode, last, in_dest);
4267
4268	  /* If we could, great.  If not, do not go ahead with the IOR
4269	     replacement, since PLUS appears in many special purpose
4270	     address arithmetic instructions.  */
4271	  if (GET_CODE (temp) != CLOBBER && temp != tor)
4272	    return temp;
4273	}
4274      break;
4275
4276    case MINUS:
4277      /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4278	 by reversing the comparison code if valid.  */
4279      if (STORE_FLAG_VALUE == 1
4280	  && XEXP (x, 0) == const1_rtx
4281	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
4282	  && (reversed = reversed_comparison (XEXP (x, 1), mode,
4283					      XEXP (XEXP (x, 1), 0),
4284					      XEXP (XEXP (x, 1), 1))))
4285	return reversed;
4286
4287      /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4288	 (and <foo> (const_int pow2-1))  */
4289      if (GET_CODE (XEXP (x, 1)) == AND
4290	  && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4291	  && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4292	  && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4293	return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4294				       -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4295
4296      /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4297	 integers.  */
4298      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
4299	return gen_binary (MINUS, mode,
4300			   gen_binary (MINUS, mode, XEXP (x, 0),
4301				       XEXP (XEXP (x, 1), 0)),
4302			   XEXP (XEXP (x, 1), 1));
4303      break;
4304
4305    case MULT:
4306      /* If we have (mult (plus A B) C), apply the distributive law and then
4307	 the inverse distributive law to see if things simplify.  This
4308	 occurs mostly in addresses, often when unrolling loops.  */
4309
4310      if (GET_CODE (XEXP (x, 0)) == PLUS)
4311	{
4312	  x = apply_distributive_law
4313	    (gen_binary (PLUS, mode,
4314			 gen_binary (MULT, mode,
4315				     XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4316			 gen_binary (MULT, mode,
4317				     XEXP (XEXP (x, 0), 1),
4318				     copy_rtx (XEXP (x, 1)))));
4319
4320	  if (GET_CODE (x) != MULT)
4321	    return x;
4322	}
4323      /* Try simplify a*(b/c) as (a*b)/c.  */
4324      if (FLOAT_MODE_P (mode) && flag_unsafe_math_optimizations
4325	  && GET_CODE (XEXP (x, 0)) == DIV)
4326	{
4327	  rtx tem = simplify_binary_operation (MULT, mode,
4328					       XEXP (XEXP (x, 0), 0),
4329					       XEXP (x, 1));
4330	  if (tem)
4331	    return gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
4332	}
4333      break;
4334
4335    case UDIV:
4336      /* If this is a divide by a power of two, treat it as a shift if
4337	 its first operand is a shift.  */
4338      if (GET_CODE (XEXP (x, 1)) == CONST_INT
4339	  && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4340	  && (GET_CODE (XEXP (x, 0)) == ASHIFT
4341	      || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4342	      || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4343	      || GET_CODE (XEXP (x, 0)) == ROTATE
4344	      || GET_CODE (XEXP (x, 0)) == ROTATERT))
4345	return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4346      break;
4347
4348    case EQ:  case NE:
4349    case GT:  case GTU:  case GE:  case GEU:
4350    case LT:  case LTU:  case LE:  case LEU:
4351    case UNEQ:  case LTGT:
4352    case UNGT:  case UNGE:
4353    case UNLT:  case UNLE:
4354    case UNORDERED: case ORDERED:
4355      /* If the first operand is a condition code, we can't do anything
4356	 with it.  */
4357      if (GET_CODE (XEXP (x, 0)) == COMPARE
4358	  || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4359#ifdef HAVE_cc0
4360	      && XEXP (x, 0) != cc0_rtx
4361#endif
4362	      ))
4363	{
4364	  rtx op0 = XEXP (x, 0);
4365	  rtx op1 = XEXP (x, 1);
4366	  enum rtx_code new_code;
4367
4368	  if (GET_CODE (op0) == COMPARE)
4369	    op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4370
4371	  /* Simplify our comparison, if possible.  */
4372	  new_code = simplify_comparison (code, &op0, &op1);
4373
4374	  /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4375	     if only the low-order bit is possibly nonzero in X (such as when
4376	     X is a ZERO_EXTRACT of one bit).  Similarly, we can convert EQ to
4377	     (xor X 1) or (minus 1 X); we use the former.  Finally, if X is
4378	     known to be either 0 or -1, NE becomes a NEG and EQ becomes
4379	     (plus X 1).
4380
4381	     Remove any ZERO_EXTRACT we made when thinking this was a
4382	     comparison.  It may now be simpler to use, e.g., an AND.  If a
4383	     ZERO_EXTRACT is indeed appropriate, it will be placed back by
4384	     the call to make_compound_operation in the SET case.  */
4385
4386	  if (STORE_FLAG_VALUE == 1
4387	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4388	      && op1 == const0_rtx
4389	      && mode == GET_MODE (op0)
4390	      && nonzero_bits (op0, mode) == 1)
4391	    return gen_lowpart_for_combine (mode,
4392					    expand_compound_operation (op0));
4393
4394	  else if (STORE_FLAG_VALUE == 1
4395		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4396		   && op1 == const0_rtx
4397		   && mode == GET_MODE (op0)
4398		   && (num_sign_bit_copies (op0, mode)
4399		       == GET_MODE_BITSIZE (mode)))
4400	    {
4401	      op0 = expand_compound_operation (op0);
4402	      return simplify_gen_unary (NEG, mode,
4403					 gen_lowpart_for_combine (mode, op0),
4404					 mode);
4405	    }
4406
4407	  else if (STORE_FLAG_VALUE == 1
4408		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4409		   && op1 == const0_rtx
4410		   && mode == GET_MODE (op0)
4411		   && nonzero_bits (op0, mode) == 1)
4412	    {
4413	      op0 = expand_compound_operation (op0);
4414	      return gen_binary (XOR, mode,
4415				 gen_lowpart_for_combine (mode, op0),
4416				 const1_rtx);
4417	    }
4418
4419	  else if (STORE_FLAG_VALUE == 1
4420		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4421		   && op1 == const0_rtx
4422		   && mode == GET_MODE (op0)
4423		   && (num_sign_bit_copies (op0, mode)
4424		       == GET_MODE_BITSIZE (mode)))
4425	    {
4426	      op0 = expand_compound_operation (op0);
4427	      return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
4428	    }
4429
4430	  /* If STORE_FLAG_VALUE is -1, we have cases similar to
4431	     those above.  */
4432	  if (STORE_FLAG_VALUE == -1
4433	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4434	      && op1 == const0_rtx
4435	      && (num_sign_bit_copies (op0, mode)
4436		  == GET_MODE_BITSIZE (mode)))
4437	    return gen_lowpart_for_combine (mode,
4438					    expand_compound_operation (op0));
4439
4440	  else if (STORE_FLAG_VALUE == -1
4441		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4442		   && op1 == const0_rtx
4443		   && mode == GET_MODE (op0)
4444		   && nonzero_bits (op0, mode) == 1)
4445	    {
4446	      op0 = expand_compound_operation (op0);
4447	      return simplify_gen_unary (NEG, mode,
4448					 gen_lowpart_for_combine (mode, op0),
4449					 mode);
4450	    }
4451
4452	  else if (STORE_FLAG_VALUE == -1
4453		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4454		   && op1 == const0_rtx
4455		   && mode == GET_MODE (op0)
4456		   && (num_sign_bit_copies (op0, mode)
4457		       == GET_MODE_BITSIZE (mode)))
4458	    {
4459	      op0 = expand_compound_operation (op0);
4460	      return simplify_gen_unary (NOT, mode,
4461					 gen_lowpart_for_combine (mode, op0),
4462					 mode);
4463	    }
4464
4465	  /* If X is 0/1, (eq X 0) is X-1.  */
4466	  else if (STORE_FLAG_VALUE == -1
4467		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4468		   && op1 == const0_rtx
4469		   && mode == GET_MODE (op0)
4470		   && nonzero_bits (op0, mode) == 1)
4471	    {
4472	      op0 = expand_compound_operation (op0);
4473	      return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
4474	    }
4475
4476	  /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4477	     one bit that might be nonzero, we can convert (ne x 0) to
4478	     (ashift x c) where C puts the bit in the sign bit.  Remove any
4479	     AND with STORE_FLAG_VALUE when we are done, since we are only
4480	     going to test the sign bit.  */
4481	  if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4482	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4483	      && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4484		  == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE(mode)-1))
4485	      && op1 == const0_rtx
4486	      && mode == GET_MODE (op0)
4487	      && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4488	    {
4489	      x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4490					expand_compound_operation (op0),
4491					GET_MODE_BITSIZE (mode) - 1 - i);
4492	      if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4493		return XEXP (x, 0);
4494	      else
4495		return x;
4496	    }
4497
4498	  /* If the code changed, return a whole new comparison.  */
4499	  if (new_code != code)
4500	    return gen_rtx_fmt_ee (new_code, mode, op0, op1);
4501
4502	  /* Otherwise, keep this operation, but maybe change its operands.
4503	     This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR).  */
4504	  SUBST (XEXP (x, 0), op0);
4505	  SUBST (XEXP (x, 1), op1);
4506	}
4507      break;
4508
4509    case IF_THEN_ELSE:
4510      return simplify_if_then_else (x);
4511
4512    case ZERO_EXTRACT:
4513    case SIGN_EXTRACT:
4514    case ZERO_EXTEND:
4515    case SIGN_EXTEND:
4516      /* If we are processing SET_DEST, we are done.  */
4517      if (in_dest)
4518	return x;
4519
4520      return expand_compound_operation (x);
4521
4522    case SET:
4523      return simplify_set (x);
4524
4525    case AND:
4526    case IOR:
4527    case XOR:
4528      return simplify_logical (x, last);
4529
4530    case ABS:
4531      /* (abs (neg <foo>)) -> (abs <foo>) */
4532      if (GET_CODE (XEXP (x, 0)) == NEG)
4533	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4534
4535      /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4536         do nothing.  */
4537      if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4538	break;
4539
4540      /* If operand is something known to be positive, ignore the ABS.  */
4541      if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4542	  || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4543	       <= HOST_BITS_PER_WIDE_INT)
4544	      && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4545		   & ((HOST_WIDE_INT) 1
4546		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4547		  == 0)))
4548	return XEXP (x, 0);
4549
4550      /* If operand is known to be only -1 or 0, convert ABS to NEG.  */
4551      if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4552	return gen_rtx_NEG (mode, XEXP (x, 0));
4553
4554      break;
4555
4556    case FFS:
4557      /* (ffs (*_extend <X>)) = (ffs <X>) */
4558      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4559	  || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4560	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4561      break;
4562
4563    case FLOAT:
4564      /* (float (sign_extend <X>)) = (float <X>).  */
4565      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4566	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4567      break;
4568
4569    case ASHIFT:
4570    case LSHIFTRT:
4571    case ASHIFTRT:
4572    case ROTATE:
4573    case ROTATERT:
4574      /* If this is a shift by a constant amount, simplify it.  */
4575      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4576	return simplify_shift_const (x, code, mode, XEXP (x, 0),
4577				     INTVAL (XEXP (x, 1)));
4578
4579#ifdef SHIFT_COUNT_TRUNCATED
4580      else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4581	SUBST (XEXP (x, 1),
4582	       force_to_mode (XEXP (x, 1), GET_MODE (x),
4583			      ((HOST_WIDE_INT) 1
4584			       << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4585			      - 1,
4586			      NULL_RTX, 0));
4587#endif
4588
4589      break;
4590
4591    case VEC_SELECT:
4592      {
4593	rtx op0 = XEXP (x, 0);
4594	rtx op1 = XEXP (x, 1);
4595	int len;
4596
4597	if (GET_CODE (op1) != PARALLEL)
4598	  abort ();
4599	len = XVECLEN (op1, 0);
4600	if (len == 1
4601	    && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
4602	    && GET_CODE (op0) == VEC_CONCAT)
4603	  {
4604	    int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
4605
4606	    /* Try to find the element in the VEC_CONCAT.  */
4607	    for (;;)
4608	      {
4609		if (GET_MODE (op0) == GET_MODE (x))
4610		  return op0;
4611		if (GET_CODE (op0) == VEC_CONCAT)
4612		  {
4613		    HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
4614		    if (op0_size < offset)
4615		      op0 = XEXP (op0, 0);
4616		    else
4617		      {
4618			offset -= op0_size;
4619			op0 = XEXP (op0, 1);
4620		      }
4621		  }
4622		else
4623		  break;
4624	      }
4625	  }
4626      }
4627
4628      break;
4629
4630    default:
4631      break;
4632    }
4633
4634  return x;
4635}
4636
4637/* Simplify X, an IF_THEN_ELSE expression.  Return the new expression.  */
4638
4639static rtx
4640simplify_if_then_else (x)
4641     rtx x;
4642{
4643  enum machine_mode mode = GET_MODE (x);
4644  rtx cond = XEXP (x, 0);
4645  rtx true_rtx = XEXP (x, 1);
4646  rtx false_rtx = XEXP (x, 2);
4647  enum rtx_code true_code = GET_CODE (cond);
4648  int comparison_p = GET_RTX_CLASS (true_code) == '<';
4649  rtx temp;
4650  int i;
4651  enum rtx_code false_code;
4652  rtx reversed;
4653
4654  /* Simplify storing of the truth value.  */
4655  if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
4656    return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4657
4658  /* Also when the truth value has to be reversed.  */
4659  if (comparison_p
4660      && true_rtx == const0_rtx && false_rtx == const_true_rtx
4661      && (reversed = reversed_comparison (cond, mode, XEXP (cond, 0),
4662					  XEXP (cond, 1))))
4663    return reversed;
4664
4665  /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4666     in it is being compared against certain values.  Get the true and false
4667     comparisons and see if that says anything about the value of each arm.  */
4668
4669  if (comparison_p
4670      && ((false_code = combine_reversed_comparison_code (cond))
4671	  != UNKNOWN)
4672      && GET_CODE (XEXP (cond, 0)) == REG)
4673    {
4674      HOST_WIDE_INT nzb;
4675      rtx from = XEXP (cond, 0);
4676      rtx true_val = XEXP (cond, 1);
4677      rtx false_val = true_val;
4678      int swapped = 0;
4679
4680      /* If FALSE_CODE is EQ, swap the codes and arms.  */
4681
4682      if (false_code == EQ)
4683	{
4684	  swapped = 1, true_code = EQ, false_code = NE;
4685	  temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4686	}
4687
4688      /* If we are comparing against zero and the expression being tested has
4689	 only a single bit that might be nonzero, that is its value when it is
4690	 not equal to zero.  Similarly if it is known to be -1 or 0.  */
4691
4692      if (true_code == EQ && true_val == const0_rtx
4693	  && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4694	false_code = EQ, false_val = GEN_INT (nzb);
4695      else if (true_code == EQ && true_val == const0_rtx
4696	       && (num_sign_bit_copies (from, GET_MODE (from))
4697		   == GET_MODE_BITSIZE (GET_MODE (from))))
4698	false_code = EQ, false_val = constm1_rtx;
4699
4700      /* Now simplify an arm if we know the value of the register in the
4701	 branch and it is used in the arm.  Be careful due to the potential
4702	 of locally-shared RTL.  */
4703
4704      if (reg_mentioned_p (from, true_rtx))
4705	true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
4706				      from, true_val),
4707		      pc_rtx, pc_rtx, 0, 0);
4708      if (reg_mentioned_p (from, false_rtx))
4709	false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
4710				   from, false_val),
4711		       pc_rtx, pc_rtx, 0, 0);
4712
4713      SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
4714      SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
4715
4716      true_rtx = XEXP (x, 1);
4717      false_rtx = XEXP (x, 2);
4718      true_code = GET_CODE (cond);
4719    }
4720
4721  /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4722     reversed, do so to avoid needing two sets of patterns for
4723     subtract-and-branch insns.  Similarly if we have a constant in the true
4724     arm, the false arm is the same as the first operand of the comparison, or
4725     the false arm is more complicated than the true arm.  */
4726
4727  if (comparison_p
4728      && combine_reversed_comparison_code (cond) != UNKNOWN
4729      && (true_rtx == pc_rtx
4730	  || (CONSTANT_P (true_rtx)
4731	      && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx)
4732	  || true_rtx == const0_rtx
4733	  || (GET_RTX_CLASS (GET_CODE (true_rtx)) == 'o'
4734	      && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4735	  || (GET_CODE (true_rtx) == SUBREG
4736	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true_rtx))) == 'o'
4737	      && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4738	  || reg_mentioned_p (true_rtx, false_rtx)
4739	  || rtx_equal_p (false_rtx, XEXP (cond, 0))))
4740    {
4741      true_code = reversed_comparison_code (cond, NULL);
4742      SUBST (XEXP (x, 0),
4743	     reversed_comparison (cond, GET_MODE (cond), XEXP (cond, 0),
4744				  XEXP (cond, 1)));
4745
4746      SUBST (XEXP (x, 1), false_rtx);
4747      SUBST (XEXP (x, 2), true_rtx);
4748
4749      temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4750      cond = XEXP (x, 0);
4751
4752      /* It is possible that the conditional has been simplified out.  */
4753      true_code = GET_CODE (cond);
4754      comparison_p = GET_RTX_CLASS (true_code) == '<';
4755    }
4756
4757  /* If the two arms are identical, we don't need the comparison.  */
4758
4759  if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
4760    return true_rtx;
4761
4762  /* Convert a == b ? b : a to "a".  */
4763  if (true_code == EQ && ! side_effects_p (cond)
4764      && (! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
4765      && rtx_equal_p (XEXP (cond, 0), false_rtx)
4766      && rtx_equal_p (XEXP (cond, 1), true_rtx))
4767    return false_rtx;
4768  else if (true_code == NE && ! side_effects_p (cond)
4769	   && (! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
4770	   && rtx_equal_p (XEXP (cond, 0), true_rtx)
4771	   && rtx_equal_p (XEXP (cond, 1), false_rtx))
4772    return true_rtx;
4773
4774  /* Look for cases where we have (abs x) or (neg (abs X)).  */
4775
4776  if (GET_MODE_CLASS (mode) == MODE_INT
4777      && GET_CODE (false_rtx) == NEG
4778      && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
4779      && comparison_p
4780      && rtx_equal_p (true_rtx, XEXP (cond, 0))
4781      && ! side_effects_p (true_rtx))
4782    switch (true_code)
4783      {
4784      case GT:
4785      case GE:
4786	return simplify_gen_unary (ABS, mode, true_rtx, mode);
4787      case LT:
4788      case LE:
4789	return
4790	  simplify_gen_unary (NEG, mode,
4791			      simplify_gen_unary (ABS, mode, true_rtx, mode),
4792			      mode);
4793      default:
4794	break;
4795      }
4796
4797  /* Look for MIN or MAX.  */
4798
4799  if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
4800      && comparison_p
4801      && rtx_equal_p (XEXP (cond, 0), true_rtx)
4802      && rtx_equal_p (XEXP (cond, 1), false_rtx)
4803      && ! side_effects_p (cond))
4804    switch (true_code)
4805      {
4806      case GE:
4807      case GT:
4808	return gen_binary (SMAX, mode, true_rtx, false_rtx);
4809      case LE:
4810      case LT:
4811	return gen_binary (SMIN, mode, true_rtx, false_rtx);
4812      case GEU:
4813      case GTU:
4814	return gen_binary (UMAX, mode, true_rtx, false_rtx);
4815      case LEU:
4816      case LTU:
4817	return gen_binary (UMIN, mode, true_rtx, false_rtx);
4818      default:
4819	break;
4820      }
4821
4822  /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4823     second operand is zero, this can be done as (OP Z (mult COND C2)) where
4824     C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4825     SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4826     We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4827     neither 1 or -1, but it isn't worth checking for.  */
4828
4829  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4830      && comparison_p && mode != VOIDmode && ! side_effects_p (x))
4831    {
4832      rtx t = make_compound_operation (true_rtx, SET);
4833      rtx f = make_compound_operation (false_rtx, SET);
4834      rtx cond_op0 = XEXP (cond, 0);
4835      rtx cond_op1 = XEXP (cond, 1);
4836      enum rtx_code op = NIL, extend_op = NIL;
4837      enum machine_mode m = mode;
4838      rtx z = 0, c1 = NULL_RTX;
4839
4840      if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4841	   || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4842	   || GET_CODE (t) == ASHIFT
4843	   || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4844	  && rtx_equal_p (XEXP (t, 0), f))
4845	c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4846
4847      /* If an identity-zero op is commutative, check whether there
4848	 would be a match if we swapped the operands.  */
4849      else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4850		|| GET_CODE (t) == XOR)
4851	       && rtx_equal_p (XEXP (t, 1), f))
4852	c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4853      else if (GET_CODE (t) == SIGN_EXTEND
4854	       && (GET_CODE (XEXP (t, 0)) == PLUS
4855		   || GET_CODE (XEXP (t, 0)) == MINUS
4856		   || GET_CODE (XEXP (t, 0)) == IOR
4857		   || GET_CODE (XEXP (t, 0)) == XOR
4858		   || GET_CODE (XEXP (t, 0)) == ASHIFT
4859		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4860		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4861	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4862	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4863	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4864	       && (num_sign_bit_copies (f, GET_MODE (f))
4865		   > (GET_MODE_BITSIZE (mode)
4866		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4867	{
4868	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4869	  extend_op = SIGN_EXTEND;
4870	  m = GET_MODE (XEXP (t, 0));
4871	}
4872      else if (GET_CODE (t) == SIGN_EXTEND
4873	       && (GET_CODE (XEXP (t, 0)) == PLUS
4874		   || GET_CODE (XEXP (t, 0)) == IOR
4875		   || GET_CODE (XEXP (t, 0)) == XOR)
4876	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4877	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4878	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4879	       && (num_sign_bit_copies (f, GET_MODE (f))
4880		   > (GET_MODE_BITSIZE (mode)
4881		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4882	{
4883	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4884	  extend_op = SIGN_EXTEND;
4885	  m = GET_MODE (XEXP (t, 0));
4886	}
4887      else if (GET_CODE (t) == ZERO_EXTEND
4888	       && (GET_CODE (XEXP (t, 0)) == PLUS
4889		   || GET_CODE (XEXP (t, 0)) == MINUS
4890		   || GET_CODE (XEXP (t, 0)) == IOR
4891		   || GET_CODE (XEXP (t, 0)) == XOR
4892		   || GET_CODE (XEXP (t, 0)) == ASHIFT
4893		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4894		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4895	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4896	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4897	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4898	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4899	       && ((nonzero_bits (f, GET_MODE (f))
4900		    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4901		   == 0))
4902	{
4903	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4904	  extend_op = ZERO_EXTEND;
4905	  m = GET_MODE (XEXP (t, 0));
4906	}
4907      else if (GET_CODE (t) == ZERO_EXTEND
4908	       && (GET_CODE (XEXP (t, 0)) == PLUS
4909		   || GET_CODE (XEXP (t, 0)) == IOR
4910		   || GET_CODE (XEXP (t, 0)) == XOR)
4911	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4912	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4913	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4914	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4915	       && ((nonzero_bits (f, GET_MODE (f))
4916		    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4917		   == 0))
4918	{
4919	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4920	  extend_op = ZERO_EXTEND;
4921	  m = GET_MODE (XEXP (t, 0));
4922	}
4923
4924      if (z)
4925	{
4926	  temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4927			pc_rtx, pc_rtx, 0, 0);
4928	  temp = gen_binary (MULT, m, temp,
4929			     gen_binary (MULT, m, c1, const_true_rtx));
4930	  temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4931	  temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4932
4933	  if (extend_op != NIL)
4934	    temp = simplify_gen_unary (extend_op, mode, temp, m);
4935
4936	  return temp;
4937	}
4938    }
4939
4940  /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4941     1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4942     negation of a single bit, we can convert this operation to a shift.  We
4943     can actually do this more generally, but it doesn't seem worth it.  */
4944
4945  if (true_code == NE && XEXP (cond, 1) == const0_rtx
4946      && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
4947      && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4948	   && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
4949	  || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4950	       == GET_MODE_BITSIZE (mode))
4951	      && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
4952    return
4953      simplify_shift_const (NULL_RTX, ASHIFT, mode,
4954			    gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
4955
4956  return x;
4957}
4958
4959/* Simplify X, a SET expression.  Return the new expression.  */
4960
4961static rtx
4962simplify_set (x)
4963     rtx x;
4964{
4965  rtx src = SET_SRC (x);
4966  rtx dest = SET_DEST (x);
4967  enum machine_mode mode
4968    = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4969  rtx other_insn;
4970  rtx *cc_use;
4971
4972  /* (set (pc) (return)) gets written as (return).  */
4973  if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4974    return src;
4975
4976  /* Now that we know for sure which bits of SRC we are using, see if we can
4977     simplify the expression for the object knowing that we only need the
4978     low-order bits.  */
4979
4980  if (GET_MODE_CLASS (mode) == MODE_INT)
4981    {
4982      src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
4983      SUBST (SET_SRC (x), src);
4984    }
4985
4986  /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4987     the comparison result and try to simplify it unless we already have used
4988     undobuf.other_insn.  */
4989  if ((GET_CODE (src) == COMPARE
4990#ifdef HAVE_cc0
4991       || dest == cc0_rtx
4992#endif
4993       )
4994      && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4995      && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4996      && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4997      && rtx_equal_p (XEXP (*cc_use, 0), dest))
4998    {
4999      enum rtx_code old_code = GET_CODE (*cc_use);
5000      enum rtx_code new_code;
5001      rtx op0, op1;
5002      int other_changed = 0;
5003      enum machine_mode compare_mode = GET_MODE (dest);
5004
5005      if (GET_CODE (src) == COMPARE)
5006	op0 = XEXP (src, 0), op1 = XEXP (src, 1);
5007      else
5008	op0 = src, op1 = const0_rtx;
5009
5010      /* Simplify our comparison, if possible.  */
5011      new_code = simplify_comparison (old_code, &op0, &op1);
5012
5013#ifdef EXTRA_CC_MODES
5014      /* If this machine has CC modes other than CCmode, check to see if we
5015	 need to use a different CC mode here.  */
5016      compare_mode = SELECT_CC_MODE (new_code, op0, op1);
5017#endif /* EXTRA_CC_MODES */
5018
5019#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
5020      /* If the mode changed, we have to change SET_DEST, the mode in the
5021	 compare, and the mode in the place SET_DEST is used.  If SET_DEST is
5022	 a hard register, just build new versions with the proper mode.  If it
5023	 is a pseudo, we lose unless it is only time we set the pseudo, in
5024	 which case we can safely change its mode.  */
5025      if (compare_mode != GET_MODE (dest))
5026	{
5027	  unsigned int regno = REGNO (dest);
5028	  rtx new_dest = gen_rtx_REG (compare_mode, regno);
5029
5030	  if (regno < FIRST_PSEUDO_REGISTER
5031	      || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
5032	    {
5033	      if (regno >= FIRST_PSEUDO_REGISTER)
5034		SUBST (regno_reg_rtx[regno], new_dest);
5035
5036	      SUBST (SET_DEST (x), new_dest);
5037	      SUBST (XEXP (*cc_use, 0), new_dest);
5038	      other_changed = 1;
5039
5040	      dest = new_dest;
5041	    }
5042	}
5043#endif
5044
5045      /* If the code changed, we have to build a new comparison in
5046	 undobuf.other_insn.  */
5047      if (new_code != old_code)
5048	{
5049	  unsigned HOST_WIDE_INT mask;
5050
5051	  SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
5052					  dest, const0_rtx));
5053
5054	  /* If the only change we made was to change an EQ into an NE or
5055	     vice versa, OP0 has only one bit that might be nonzero, and OP1
5056	     is zero, check if changing the user of the condition code will
5057	     produce a valid insn.  If it won't, we can keep the original code
5058	     in that insn by surrounding our operation with an XOR.  */
5059
5060	  if (((old_code == NE && new_code == EQ)
5061	       || (old_code == EQ && new_code == NE))
5062	      && ! other_changed && op1 == const0_rtx
5063	      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
5064	      && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
5065	    {
5066	      rtx pat = PATTERN (other_insn), note = 0;
5067
5068	      if ((recog_for_combine (&pat, other_insn, &note) < 0
5069		   && ! check_asm_operands (pat)))
5070		{
5071		  PUT_CODE (*cc_use, old_code);
5072		  other_insn = 0;
5073
5074		  op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
5075		}
5076	    }
5077
5078	  other_changed = 1;
5079	}
5080
5081      if (other_changed)
5082	undobuf.other_insn = other_insn;
5083
5084#ifdef HAVE_cc0
5085      /* If we are now comparing against zero, change our source if
5086	 needed.  If we do not use cc0, we always have a COMPARE.  */
5087      if (op1 == const0_rtx && dest == cc0_rtx)
5088	{
5089	  SUBST (SET_SRC (x), op0);
5090	  src = op0;
5091	}
5092      else
5093#endif
5094
5095      /* Otherwise, if we didn't previously have a COMPARE in the
5096	 correct mode, we need one.  */
5097      if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5098	{
5099	  SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
5100	  src = SET_SRC (x);
5101	}
5102      else
5103	{
5104	  /* Otherwise, update the COMPARE if needed.  */
5105	  SUBST (XEXP (src, 0), op0);
5106	  SUBST (XEXP (src, 1), op1);
5107	}
5108    }
5109  else
5110    {
5111      /* Get SET_SRC in a form where we have placed back any
5112	 compound expressions.  Then do the checks below.  */
5113      src = make_compound_operation (src, SET);
5114      SUBST (SET_SRC (x), src);
5115    }
5116
5117  /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5118     and X being a REG or (subreg (reg)), we may be able to convert this to
5119     (set (subreg:m2 x) (op)).
5120
5121     We can always do this if M1 is narrower than M2 because that means that
5122     we only care about the low bits of the result.
5123
5124     However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
5125     perform a narrower operation than requested since the high-order bits will
5126     be undefined.  On machine where it is defined, this transformation is safe
5127     as long as M1 and M2 have the same number of words.  */
5128
5129  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5130      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
5131      && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5132	   / UNITS_PER_WORD)
5133	  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5134	       + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
5135#ifndef WORD_REGISTER_OPERATIONS
5136      && (GET_MODE_SIZE (GET_MODE (src))
5137	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5138#endif
5139#ifdef CLASS_CANNOT_CHANGE_MODE
5140      && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
5141	    && (TEST_HARD_REG_BIT
5142		(reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
5143		 REGNO (dest)))
5144	    && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (src),
5145					   GET_MODE (SUBREG_REG (src))))
5146#endif
5147      && (GET_CODE (dest) == REG
5148	  || (GET_CODE (dest) == SUBREG
5149	      && GET_CODE (SUBREG_REG (dest)) == REG)))
5150    {
5151      SUBST (SET_DEST (x),
5152	     gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
5153				      dest));
5154      SUBST (SET_SRC (x), SUBREG_REG (src));
5155
5156      src = SET_SRC (x), dest = SET_DEST (x);
5157    }
5158
5159#ifdef LOAD_EXTEND_OP
5160  /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5161     would require a paradoxical subreg.  Replace the subreg with a
5162     zero_extend to avoid the reload that would otherwise be required.  */
5163
5164  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5165      && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
5166      && SUBREG_BYTE (src) == 0
5167      && (GET_MODE_SIZE (GET_MODE (src))
5168	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5169      && GET_CODE (SUBREG_REG (src)) == MEM)
5170    {
5171      SUBST (SET_SRC (x),
5172	     gen_rtx (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
5173		      GET_MODE (src), SUBREG_REG (src)));
5174
5175      src = SET_SRC (x);
5176    }
5177#endif
5178
5179  /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5180     are comparing an item known to be 0 or -1 against 0, use a logical
5181     operation instead. Check for one of the arms being an IOR of the other
5182     arm with some value.  We compute three terms to be IOR'ed together.  In
5183     practice, at most two will be nonzero.  Then we do the IOR's.  */
5184
5185  if (GET_CODE (dest) != PC
5186      && GET_CODE (src) == IF_THEN_ELSE
5187      && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
5188      && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5189      && XEXP (XEXP (src, 0), 1) == const0_rtx
5190      && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
5191#ifdef HAVE_conditional_move
5192      && ! can_conditionally_move_p (GET_MODE (src))
5193#endif
5194      && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5195			       GET_MODE (XEXP (XEXP (src, 0), 0)))
5196	  == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5197      && ! side_effects_p (src))
5198    {
5199      rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
5200		      ? XEXP (src, 1) : XEXP (src, 2));
5201      rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
5202		   ? XEXP (src, 2) : XEXP (src, 1));
5203      rtx term1 = const0_rtx, term2, term3;
5204
5205      if (GET_CODE (true_rtx) == IOR
5206	  && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
5207	term1 = false_rtx, true_rtx = XEXP(true_rtx, 1), false_rtx = const0_rtx;
5208      else if (GET_CODE (true_rtx) == IOR
5209	       && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
5210	term1 = false_rtx, true_rtx = XEXP(true_rtx, 0), false_rtx = const0_rtx;
5211      else if (GET_CODE (false_rtx) == IOR
5212	       && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
5213	term1 = true_rtx, false_rtx = XEXP(false_rtx, 1), true_rtx = const0_rtx;
5214      else if (GET_CODE (false_rtx) == IOR
5215	       && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
5216	term1 = true_rtx, false_rtx = XEXP(false_rtx, 0), true_rtx = const0_rtx;
5217
5218      term2 = gen_binary (AND, GET_MODE (src),
5219			  XEXP (XEXP (src, 0), 0), true_rtx);
5220      term3 = gen_binary (AND, GET_MODE (src),
5221			  simplify_gen_unary (NOT, GET_MODE (src),
5222					      XEXP (XEXP (src, 0), 0),
5223					      GET_MODE (src)),
5224			  false_rtx);
5225
5226      SUBST (SET_SRC (x),
5227	     gen_binary (IOR, GET_MODE (src),
5228			 gen_binary (IOR, GET_MODE (src), term1, term2),
5229			 term3));
5230
5231      src = SET_SRC (x);
5232    }
5233
5234  /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5235     whole thing fail.  */
5236  if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5237    return src;
5238  else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5239    return dest;
5240  else
5241    /* Convert this into a field assignment operation, if possible.  */
5242    return make_field_assignment (x);
5243}
5244
5245/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5246   result.  LAST is nonzero if this is the last retry.  */
5247
5248static rtx
5249simplify_logical (x, last)
5250     rtx x;
5251     int last;
5252{
5253  enum machine_mode mode = GET_MODE (x);
5254  rtx op0 = XEXP (x, 0);
5255  rtx op1 = XEXP (x, 1);
5256  rtx reversed;
5257
5258  switch (GET_CODE (x))
5259    {
5260    case AND:
5261      /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
5262	 insn (and may simplify more).  */
5263      if (GET_CODE (op0) == XOR
5264	  && rtx_equal_p (XEXP (op0, 0), op1)
5265	  && ! side_effects_p (op1))
5266	x = gen_binary (AND, mode,
5267			simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5268			op1);
5269
5270      if (GET_CODE (op0) == XOR
5271	  && rtx_equal_p (XEXP (op0, 1), op1)
5272	  && ! side_effects_p (op1))
5273	x = gen_binary (AND, mode,
5274			simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5275			op1);
5276
5277      /* Similarly for (~(A ^ B)) & A.  */
5278      if (GET_CODE (op0) == NOT
5279	  && GET_CODE (XEXP (op0, 0)) == XOR
5280	  && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5281	  && ! side_effects_p (op1))
5282	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5283
5284      if (GET_CODE (op0) == NOT
5285	  && GET_CODE (XEXP (op0, 0)) == XOR
5286	  && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5287	  && ! side_effects_p (op1))
5288	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5289
5290      /* We can call simplify_and_const_int only if we don't lose
5291	 any (sign) bits when converting INTVAL (op1) to
5292	 "unsigned HOST_WIDE_INT".  */
5293      if (GET_CODE (op1) == CONST_INT
5294	  && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5295	      || INTVAL (op1) > 0))
5296	{
5297	  x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
5298
5299	  /* If we have (ior (and (X C1) C2)) and the next restart would be
5300	     the last, simplify this by making C1 as small as possible
5301	     and then exit.  */
5302	  if (last
5303	      && GET_CODE (x) == IOR && GET_CODE (op0) == AND
5304	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
5305	      && GET_CODE (op1) == CONST_INT)
5306	    return gen_binary (IOR, mode,
5307			       gen_binary (AND, mode, XEXP (op0, 0),
5308					   GEN_INT (INTVAL (XEXP (op0, 1))
5309						    & ~INTVAL (op1))), op1);
5310
5311	  if (GET_CODE (x) != AND)
5312	    return x;
5313
5314	  if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
5315	      || GET_RTX_CLASS (GET_CODE (x)) == '2')
5316	    op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5317	}
5318
5319      /* Convert (A | B) & A to A.  */
5320      if (GET_CODE (op0) == IOR
5321	  && (rtx_equal_p (XEXP (op0, 0), op1)
5322	      || rtx_equal_p (XEXP (op0, 1), op1))
5323	  && ! side_effects_p (XEXP (op0, 0))
5324	  && ! side_effects_p (XEXP (op0, 1)))
5325	return op1;
5326
5327      /* In the following group of tests (and those in case IOR below),
5328	 we start with some combination of logical operations and apply
5329	 the distributive law followed by the inverse distributive law.
5330	 Most of the time, this results in no change.  However, if some of
5331	 the operands are the same or inverses of each other, simplifications
5332	 will result.
5333
5334	 For example, (and (ior A B) (not B)) can occur as the result of
5335	 expanding a bit field assignment.  When we apply the distributive
5336	 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
5337	 which then simplifies to (and (A (not B))).
5338
5339	 If we have (and (ior A B) C), apply the distributive law and then
5340	 the inverse distributive law to see if things simplify.  */
5341
5342      if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
5343	{
5344	  x = apply_distributive_law
5345	    (gen_binary (GET_CODE (op0), mode,
5346			 gen_binary (AND, mode, XEXP (op0, 0), op1),
5347			 gen_binary (AND, mode, XEXP (op0, 1),
5348				     copy_rtx (op1))));
5349	  if (GET_CODE (x) != AND)
5350	    return x;
5351	}
5352
5353      if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5354	return apply_distributive_law
5355	  (gen_binary (GET_CODE (op1), mode,
5356		       gen_binary (AND, mode, XEXP (op1, 0), op0),
5357		       gen_binary (AND, mode, XEXP (op1, 1),
5358				   copy_rtx (op0))));
5359
5360      /* Similarly, taking advantage of the fact that
5361	 (and (not A) (xor B C)) == (xor (ior A B) (ior A C))  */
5362
5363      if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
5364	return apply_distributive_law
5365	  (gen_binary (XOR, mode,
5366		       gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
5367		       gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)),
5368				   XEXP (op1, 1))));
5369
5370      else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
5371	return apply_distributive_law
5372	  (gen_binary (XOR, mode,
5373		       gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
5374		       gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1))));
5375      break;
5376
5377    case IOR:
5378      /* (ior A C) is C if all bits of A that might be nonzero are on in C.  */
5379      if (GET_CODE (op1) == CONST_INT
5380	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5381	  && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
5382	return op1;
5383
5384      /* Convert (A & B) | A to A.  */
5385      if (GET_CODE (op0) == AND
5386	  && (rtx_equal_p (XEXP (op0, 0), op1)
5387	      || rtx_equal_p (XEXP (op0, 1), op1))
5388	  && ! side_effects_p (XEXP (op0, 0))
5389	  && ! side_effects_p (XEXP (op0, 1)))
5390	return op1;
5391
5392      /* If we have (ior (and A B) C), apply the distributive law and then
5393	 the inverse distributive law to see if things simplify.  */
5394
5395      if (GET_CODE (op0) == AND)
5396	{
5397	  x = apply_distributive_law
5398	    (gen_binary (AND, mode,
5399			 gen_binary (IOR, mode, XEXP (op0, 0), op1),
5400			 gen_binary (IOR, mode, XEXP (op0, 1),
5401				     copy_rtx (op1))));
5402
5403	  if (GET_CODE (x) != IOR)
5404	    return x;
5405	}
5406
5407      if (GET_CODE (op1) == AND)
5408	{
5409	  x = apply_distributive_law
5410	    (gen_binary (AND, mode,
5411			 gen_binary (IOR, mode, XEXP (op1, 0), op0),
5412			 gen_binary (IOR, mode, XEXP (op1, 1),
5413				     copy_rtx (op0))));
5414
5415	  if (GET_CODE (x) != IOR)
5416	    return x;
5417	}
5418
5419      /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5420	 mode size to (rotate A CX).  */
5421
5422      if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5423	   || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5424	  && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5425	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5426	  && GET_CODE (XEXP (op1, 1)) == CONST_INT
5427	  && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
5428	      == GET_MODE_BITSIZE (mode)))
5429	return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5430			       (GET_CODE (op0) == ASHIFT
5431				? XEXP (op0, 1) : XEXP (op1, 1)));
5432
5433      /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5434	 a (sign_extend (plus ...)).  If so, OP1 is a CONST_INT, and the PLUS
5435	 does not affect any of the bits in OP1, it can really be done
5436	 as a PLUS and we can associate.  We do this by seeing if OP1
5437	 can be safely shifted left C bits.  */
5438      if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5439	  && GET_CODE (XEXP (op0, 0)) == PLUS
5440	  && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5441	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5442	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5443	{
5444	  int count = INTVAL (XEXP (op0, 1));
5445	  HOST_WIDE_INT mask = INTVAL (op1) << count;
5446
5447	  if (mask >> count == INTVAL (op1)
5448	      && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5449	    {
5450	      SUBST (XEXP (XEXP (op0, 0), 1),
5451		     GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5452	      return op0;
5453	    }
5454	}
5455      break;
5456
5457    case XOR:
5458      /* If we are XORing two things that have no bits in common,
5459	 convert them into an IOR.  This helps to detect rotation encoded
5460	 using those methods and possibly other simplifications.  */
5461
5462      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5463	  && (nonzero_bits (op0, mode)
5464	      & nonzero_bits (op1, mode)) == 0)
5465	return (gen_binary (IOR, mode, op0, op1));
5466
5467      /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5468	 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5469	 (NOT y).  */
5470      {
5471	int num_negated = 0;
5472
5473	if (GET_CODE (op0) == NOT)
5474	  num_negated++, op0 = XEXP (op0, 0);
5475	if (GET_CODE (op1) == NOT)
5476	  num_negated++, op1 = XEXP (op1, 0);
5477
5478	if (num_negated == 2)
5479	  {
5480	    SUBST (XEXP (x, 0), op0);
5481	    SUBST (XEXP (x, 1), op1);
5482	  }
5483	else if (num_negated == 1)
5484	  return
5485	    simplify_gen_unary (NOT, mode, gen_binary (XOR, mode, op0, op1),
5486				mode);
5487      }
5488
5489      /* Convert (xor (and A B) B) to (and (not A) B).  The latter may
5490	 correspond to a machine insn or result in further simplifications
5491	 if B is a constant.  */
5492
5493      if (GET_CODE (op0) == AND
5494	  && rtx_equal_p (XEXP (op0, 1), op1)
5495	  && ! side_effects_p (op1))
5496	return gen_binary (AND, mode,
5497			   simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5498			   op1);
5499
5500      else if (GET_CODE (op0) == AND
5501	       && rtx_equal_p (XEXP (op0, 0), op1)
5502	       && ! side_effects_p (op1))
5503	return gen_binary (AND, mode,
5504			   simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5505			   op1);
5506
5507      /* (xor (comparison foo bar) (const_int 1)) can become the reversed
5508	 comparison if STORE_FLAG_VALUE is 1.  */
5509      if (STORE_FLAG_VALUE == 1
5510	  && op1 == const1_rtx
5511	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5512	  && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5513					      XEXP (op0, 1))))
5514	return reversed;
5515
5516      /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5517	 is (lt foo (const_int 0)), so we can perform the above
5518	 simplification if STORE_FLAG_VALUE is 1.  */
5519
5520      if (STORE_FLAG_VALUE == 1
5521	  && op1 == const1_rtx
5522	  && GET_CODE (op0) == LSHIFTRT
5523	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5524	  && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5525	return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
5526
5527      /* (xor (comparison foo bar) (const_int sign-bit))
5528	 when STORE_FLAG_VALUE is the sign bit.  */
5529      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5530	  && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5531	      == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5532	  && op1 == const_true_rtx
5533	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5534	  && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5535					      XEXP (op0, 1))))
5536	return reversed;
5537
5538      break;
5539
5540    default:
5541      abort ();
5542    }
5543
5544  return x;
5545}
5546
5547/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5548   operations" because they can be replaced with two more basic operations.
5549   ZERO_EXTEND is also considered "compound" because it can be replaced with
5550   an AND operation, which is simpler, though only one operation.
5551
5552   The function expand_compound_operation is called with an rtx expression
5553   and will convert it to the appropriate shifts and AND operations,
5554   simplifying at each stage.
5555
5556   The function make_compound_operation is called to convert an expression
5557   consisting of shifts and ANDs into the equivalent compound expression.
5558   It is the inverse of this function, loosely speaking.  */
5559
5560static rtx
5561expand_compound_operation (x)
5562     rtx x;
5563{
5564  unsigned HOST_WIDE_INT pos = 0, len;
5565  int unsignedp = 0;
5566  unsigned int modewidth;
5567  rtx tem;
5568
5569  switch (GET_CODE (x))
5570    {
5571    case ZERO_EXTEND:
5572      unsignedp = 1;
5573    case SIGN_EXTEND:
5574      /* We can't necessarily use a const_int for a multiword mode;
5575	 it depends on implicitly extending the value.
5576	 Since we don't know the right way to extend it,
5577	 we can't tell whether the implicit way is right.
5578
5579	 Even for a mode that is no wider than a const_int,
5580	 we can't win, because we need to sign extend one of its bits through
5581	 the rest of it, and we don't know which bit.  */
5582      if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5583	return x;
5584
5585      /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5586	 (zero_extend:MODE FROM) or (sign_extend:MODE FROM).  It is for any MEM
5587	 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5588	 reloaded. If not for that, MEM's would very rarely be safe.
5589
5590	 Reject MODEs bigger than a word, because we might not be able
5591	 to reference a two-register group starting with an arbitrary register
5592	 (and currently gen_lowpart might crash for a SUBREG).  */
5593
5594      if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5595	return x;
5596
5597      len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5598      /* If the inner object has VOIDmode (the only way this can happen
5599	 is if it is a ASM_OPERANDS), we can't do anything since we don't
5600	 know how much masking to do.  */
5601      if (len == 0)
5602	return x;
5603
5604      break;
5605
5606    case ZERO_EXTRACT:
5607      unsignedp = 1;
5608    case SIGN_EXTRACT:
5609      /* If the operand is a CLOBBER, just return it.  */
5610      if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5611	return XEXP (x, 0);
5612
5613      if (GET_CODE (XEXP (x, 1)) != CONST_INT
5614	  || GET_CODE (XEXP (x, 2)) != CONST_INT
5615	  || GET_MODE (XEXP (x, 0)) == VOIDmode)
5616	return x;
5617
5618      len = INTVAL (XEXP (x, 1));
5619      pos = INTVAL (XEXP (x, 2));
5620
5621      /* If this goes outside the object being extracted, replace the object
5622	 with a (use (mem ...)) construct that only combine understands
5623	 and is used only for this purpose.  */
5624      if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5625	SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
5626
5627      if (BITS_BIG_ENDIAN)
5628	pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5629
5630      break;
5631
5632    default:
5633      return x;
5634    }
5635  /* Convert sign extension to zero extension, if we know that the high
5636     bit is not set, as this is easier to optimize.  It will be converted
5637     back to cheaper alternative in make_extraction.  */
5638  if (GET_CODE (x) == SIGN_EXTEND
5639      && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5640	  && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5641		& ~(((unsigned HOST_WIDE_INT)
5642		      GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5643		     >> 1))
5644	       == 0)))
5645    {
5646      rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5647      return expand_compound_operation (temp);
5648    }
5649
5650  /* We can optimize some special cases of ZERO_EXTEND.  */
5651  if (GET_CODE (x) == ZERO_EXTEND)
5652    {
5653      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5654         know that the last value didn't have any inappropriate bits
5655         set.  */
5656      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5657	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5658	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5659	  && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5660	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5661	return XEXP (XEXP (x, 0), 0);
5662
5663      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5664      if (GET_CODE (XEXP (x, 0)) == SUBREG
5665	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5666	  && subreg_lowpart_p (XEXP (x, 0))
5667	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5668	  && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5669	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5670	return SUBREG_REG (XEXP (x, 0));
5671
5672      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5673         is a comparison and STORE_FLAG_VALUE permits.  This is like
5674         the first case, but it works even when GET_MODE (x) is larger
5675         than HOST_WIDE_INT.  */
5676      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5677	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5678	  && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5679	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5680	      <= HOST_BITS_PER_WIDE_INT)
5681	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5682	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5683	return XEXP (XEXP (x, 0), 0);
5684
5685      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5686      if (GET_CODE (XEXP (x, 0)) == SUBREG
5687	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5688	  && subreg_lowpart_p (XEXP (x, 0))
5689	  && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5690	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5691	      <= HOST_BITS_PER_WIDE_INT)
5692	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5693	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5694	return SUBREG_REG (XEXP (x, 0));
5695
5696    }
5697
5698  /* If we reach here, we want to return a pair of shifts.  The inner
5699     shift is a left shift of BITSIZE - POS - LEN bits.  The outer
5700     shift is a right shift of BITSIZE - LEN bits.  It is arithmetic or
5701     logical depending on the value of UNSIGNEDP.
5702
5703     If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5704     converted into an AND of a shift.
5705
5706     We must check for the case where the left shift would have a negative
5707     count.  This can happen in a case like (x >> 31) & 255 on machines
5708     that can't shift by a constant.  On those machines, we would first
5709     combine the shift with the AND to produce a variable-position
5710     extraction.  Then the constant of 31 would be substituted in to produce
5711     a such a position.  */
5712
5713  modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5714  if (modewidth + len >= pos)
5715    tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
5716				GET_MODE (x),
5717				simplify_shift_const (NULL_RTX, ASHIFT,
5718						      GET_MODE (x),
5719						      XEXP (x, 0),
5720						      modewidth - pos - len),
5721				modewidth - len);
5722
5723  else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5724    tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5725				  simplify_shift_const (NULL_RTX, LSHIFTRT,
5726							GET_MODE (x),
5727							XEXP (x, 0), pos),
5728				  ((HOST_WIDE_INT) 1 << len) - 1);
5729  else
5730    /* Any other cases we can't handle.  */
5731    return x;
5732
5733  /* If we couldn't do this for some reason, return the original
5734     expression.  */
5735  if (GET_CODE (tem) == CLOBBER)
5736    return x;
5737
5738  return tem;
5739}
5740
5741/* X is a SET which contains an assignment of one object into
5742   a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5743   or certain SUBREGS). If possible, convert it into a series of
5744   logical operations.
5745
5746   We half-heartedly support variable positions, but do not at all
5747   support variable lengths.  */
5748
5749static rtx
5750expand_field_assignment (x)
5751     rtx x;
5752{
5753  rtx inner;
5754  rtx pos;			/* Always counts from low bit.  */
5755  int len;
5756  rtx mask;
5757  enum machine_mode compute_mode;
5758
5759  /* Loop until we find something we can't simplify.  */
5760  while (1)
5761    {
5762      if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5763	  && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5764	{
5765	  inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5766	  len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
5767	  pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
5768	}
5769      else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5770	       && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5771	{
5772	  inner = XEXP (SET_DEST (x), 0);
5773	  len = INTVAL (XEXP (SET_DEST (x), 1));
5774	  pos = XEXP (SET_DEST (x), 2);
5775
5776	  /* If the position is constant and spans the width of INNER,
5777	     surround INNER  with a USE to indicate this.  */
5778	  if (GET_CODE (pos) == CONST_INT
5779	      && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5780	    inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
5781
5782	  if (BITS_BIG_ENDIAN)
5783	    {
5784	      if (GET_CODE (pos) == CONST_INT)
5785		pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5786			       - INTVAL (pos));
5787	      else if (GET_CODE (pos) == MINUS
5788		       && GET_CODE (XEXP (pos, 1)) == CONST_INT
5789		       && (INTVAL (XEXP (pos, 1))
5790			   == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5791		/* If position is ADJUST - X, new position is X.  */
5792		pos = XEXP (pos, 0);
5793	      else
5794		pos = gen_binary (MINUS, GET_MODE (pos),
5795				  GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5796					   - len),
5797				  pos);
5798	    }
5799	}
5800
5801      /* A SUBREG between two modes that occupy the same numbers of words
5802	 can be done by moving the SUBREG to the source.  */
5803      else if (GET_CODE (SET_DEST (x)) == SUBREG
5804	       /* We need SUBREGs to compute nonzero_bits properly.  */
5805	       && nonzero_sign_valid
5806	       && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5807		     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5808		   == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5809			+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5810	{
5811	  x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
5812			   gen_lowpart_for_combine
5813			   (GET_MODE (SUBREG_REG (SET_DEST (x))),
5814			    SET_SRC (x)));
5815	  continue;
5816	}
5817      else
5818	break;
5819
5820      while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5821	inner = SUBREG_REG (inner);
5822
5823      compute_mode = GET_MODE (inner);
5824
5825      /* Don't attempt bitwise arithmetic on non-integral modes.  */
5826      if (! INTEGRAL_MODE_P (compute_mode))
5827	{
5828	  enum machine_mode imode;
5829
5830	  /* Something is probably seriously wrong if this matches.  */
5831	  if (! FLOAT_MODE_P (compute_mode))
5832	    break;
5833
5834	  /* Try to find an integral mode to pun with.  */
5835	  imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5836	  if (imode == BLKmode)
5837	    break;
5838
5839	  compute_mode = imode;
5840	  inner = gen_lowpart_for_combine (imode, inner);
5841	}
5842
5843      /* Compute a mask of LEN bits, if we can do this on the host machine.  */
5844      if (len < HOST_BITS_PER_WIDE_INT)
5845	mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
5846      else
5847	break;
5848
5849      /* Now compute the equivalent expression.  Make a copy of INNER
5850	 for the SET_DEST in case it is a MEM into which we will substitute;
5851	 we don't want shared RTL in that case.  */
5852      x = gen_rtx_SET
5853	(VOIDmode, copy_rtx (inner),
5854	 gen_binary (IOR, compute_mode,
5855		     gen_binary (AND, compute_mode,
5856				 simplify_gen_unary (NOT, compute_mode,
5857						     gen_binary (ASHIFT,
5858								 compute_mode,
5859								 mask, pos),
5860						     compute_mode),
5861				 inner),
5862		     gen_binary (ASHIFT, compute_mode,
5863				 gen_binary (AND, compute_mode,
5864					     gen_lowpart_for_combine
5865					     (compute_mode, SET_SRC (x)),
5866					     mask),
5867				 pos)));
5868    }
5869
5870  return x;
5871}
5872
5873/* Return an RTX for a reference to LEN bits of INNER.  If POS_RTX is nonzero,
5874   it is an RTX that represents a variable starting position; otherwise,
5875   POS is the (constant) starting bit position (counted from the LSB).
5876
5877   INNER may be a USE.  This will occur when we started with a bitfield
5878   that went outside the boundary of the object in memory, which is
5879   allowed on most machines.  To isolate this case, we produce a USE
5880   whose mode is wide enough and surround the MEM with it.  The only
5881   code that understands the USE is this routine.  If it is not removed,
5882   it will cause the resulting insn not to match.
5883
5884   UNSIGNEDP is non-zero for an unsigned reference and zero for a
5885   signed reference.
5886
5887   IN_DEST is non-zero if this is a reference in the destination of a
5888   SET.  This is used when a ZERO_ or SIGN_EXTRACT isn't needed.  If non-zero,
5889   a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5890   be used.
5891
5892   IN_COMPARE is non-zero if we are in a COMPARE.  This means that a
5893   ZERO_EXTRACT should be built even for bits starting at bit 0.
5894
5895   MODE is the desired mode of the result (if IN_DEST == 0).
5896
5897   The result is an RTX for the extraction or NULL_RTX if the target
5898   can't handle it.  */
5899
5900static rtx
5901make_extraction (mode, inner, pos, pos_rtx, len,
5902		 unsignedp, in_dest, in_compare)
5903     enum machine_mode mode;
5904     rtx inner;
5905     HOST_WIDE_INT pos;
5906     rtx pos_rtx;
5907     unsigned HOST_WIDE_INT len;
5908     int unsignedp;
5909     int in_dest, in_compare;
5910{
5911  /* This mode describes the size of the storage area
5912     to fetch the overall value from.  Within that, we
5913     ignore the POS lowest bits, etc.  */
5914  enum machine_mode is_mode = GET_MODE (inner);
5915  enum machine_mode inner_mode;
5916  enum machine_mode wanted_inner_mode = byte_mode;
5917  enum machine_mode wanted_inner_reg_mode = word_mode;
5918  enum machine_mode pos_mode = word_mode;
5919  enum machine_mode extraction_mode = word_mode;
5920  enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5921  int spans_byte = 0;
5922  rtx new = 0;
5923  rtx orig_pos_rtx = pos_rtx;
5924  HOST_WIDE_INT orig_pos;
5925
5926  /* Get some information about INNER and get the innermost object.  */
5927  if (GET_CODE (inner) == USE)
5928    /* (use:SI (mem:QI foo)) stands for (mem:SI foo).  */
5929    /* We don't need to adjust the position because we set up the USE
5930       to pretend that it was a full-word object.  */
5931    spans_byte = 1, inner = XEXP (inner, 0);
5932  else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5933    {
5934      /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5935	 consider just the QI as the memory to extract from.
5936	 The subreg adds or removes high bits; its mode is
5937	 irrelevant to the meaning of this extraction,
5938	 since POS and LEN count from the lsb.  */
5939      if (GET_CODE (SUBREG_REG (inner)) == MEM)
5940	is_mode = GET_MODE (SUBREG_REG (inner));
5941      inner = SUBREG_REG (inner);
5942    }
5943
5944  inner_mode = GET_MODE (inner);
5945
5946  if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
5947    pos = INTVAL (pos_rtx), pos_rtx = 0;
5948
5949  /* See if this can be done without an extraction.  We never can if the
5950     width of the field is not the same as that of some integer mode. For
5951     registers, we can only avoid the extraction if the position is at the
5952     low-order bit and this is either not in the destination or we have the
5953     appropriate STRICT_LOW_PART operation available.
5954
5955     For MEM, we can avoid an extract if the field starts on an appropriate
5956     boundary and we can change the mode of the memory reference.  However,
5957     we cannot directly access the MEM if we have a USE and the underlying
5958     MEM is not TMODE.  This combination means that MEM was being used in a
5959     context where bits outside its mode were being referenced; that is only
5960     valid in bit-field insns.  */
5961
5962  if (tmode != BLKmode
5963      && ! (spans_byte && inner_mode != tmode)
5964      && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5965	   && GET_CODE (inner) != MEM
5966	   && (! in_dest
5967	       || (GET_CODE (inner) == REG
5968		   && have_insn_for (STRICT_LOW_PART, tmode))))
5969	  || (GET_CODE (inner) == MEM && pos_rtx == 0
5970	      && (pos
5971		  % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5972		     : BITS_PER_UNIT)) == 0
5973	      /* We can't do this if we are widening INNER_MODE (it
5974		 may not be aligned, for one thing).  */
5975	      && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5976	      && (inner_mode == tmode
5977		  || (! mode_dependent_address_p (XEXP (inner, 0))
5978		      && ! MEM_VOLATILE_P (inner))))))
5979    {
5980      /* If INNER is a MEM, make a new MEM that encompasses just the desired
5981	 field.  If the original and current mode are the same, we need not
5982	 adjust the offset.  Otherwise, we do if bytes big endian.
5983
5984	 If INNER is not a MEM, get a piece consisting of just the field
5985	 of interest (in this case POS % BITS_PER_WORD must be 0).  */
5986
5987      if (GET_CODE (inner) == MEM)
5988	{
5989	  HOST_WIDE_INT offset;
5990
5991	  /* POS counts from lsb, but make OFFSET count in memory order.  */
5992	  if (BYTES_BIG_ENDIAN)
5993	    offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5994	  else
5995	    offset = pos / BITS_PER_UNIT;
5996
5997	  new = adjust_address_nv (inner, tmode, offset);
5998	}
5999      else if (GET_CODE (inner) == REG)
6000	{
6001	  /* We can't call gen_lowpart_for_combine here since we always want
6002	     a SUBREG and it would sometimes return a new hard register.  */
6003	  if (tmode != inner_mode)
6004	    {
6005	      HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6006
6007	      if (WORDS_BIG_ENDIAN
6008		  && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6009		final_word = ((GET_MODE_SIZE (inner_mode)
6010			       - GET_MODE_SIZE (tmode))
6011			      / UNITS_PER_WORD) - final_word;
6012
6013	      final_word *= UNITS_PER_WORD;
6014	      if (BYTES_BIG_ENDIAN &&
6015		  GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6016		final_word += (GET_MODE_SIZE (inner_mode)
6017			       - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6018
6019	      new = gen_rtx_SUBREG (tmode, inner, final_word);
6020	    }
6021	  else
6022	    new = inner;
6023	}
6024      else
6025	new = force_to_mode (inner, tmode,
6026			     len >= HOST_BITS_PER_WIDE_INT
6027			     ? ~(unsigned HOST_WIDE_INT) 0
6028			     : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6029			     NULL_RTX, 0);
6030
6031      /* If this extraction is going into the destination of a SET,
6032	 make a STRICT_LOW_PART unless we made a MEM.  */
6033
6034      if (in_dest)
6035	return (GET_CODE (new) == MEM ? new
6036		: (GET_CODE (new) != SUBREG
6037		   ? gen_rtx_CLOBBER (tmode, const0_rtx)
6038		   : gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
6039
6040      if (mode == tmode)
6041	return new;
6042
6043      if (GET_CODE (new) == CONST_INT)
6044	return GEN_INT (trunc_int_for_mode (INTVAL (new), mode));
6045
6046      /* If we know that no extraneous bits are set, and that the high
6047	 bit is not set, convert the extraction to the cheaper of
6048	 sign and zero extension, that are equivalent in these cases.  */
6049      if (flag_expensive_optimizations
6050	  && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6051	      && ((nonzero_bits (new, tmode)
6052		   & ~(((unsigned HOST_WIDE_INT)
6053			GET_MODE_MASK (tmode))
6054		       >> 1))
6055		  == 0)))
6056	{
6057	  rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
6058	  rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
6059
6060	  /* Prefer ZERO_EXTENSION, since it gives more information to
6061	     backends.  */
6062	  if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
6063	    return temp;
6064	  return temp1;
6065	}
6066
6067      /* Otherwise, sign- or zero-extend unless we already are in the
6068	 proper mode.  */
6069
6070      return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6071			     mode, new));
6072    }
6073
6074  /* Unless this is a COMPARE or we have a funny memory reference,
6075     don't do anything with zero-extending field extracts starting at
6076     the low-order bit since they are simple AND operations.  */
6077  if (pos_rtx == 0 && pos == 0 && ! in_dest
6078      && ! in_compare && ! spans_byte && unsignedp)
6079    return 0;
6080
6081  /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
6082     we would be spanning bytes or if the position is not a constant and the
6083     length is not 1.  In all other cases, we would only be going outside
6084     our object in cases when an original shift would have been
6085     undefined.  */
6086  if (! spans_byte && GET_CODE (inner) == MEM
6087      && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6088	  || (pos_rtx != 0 && len != 1)))
6089    return 0;
6090
6091  /* Get the mode to use should INNER not be a MEM, the mode for the position,
6092     and the mode for the result.  */
6093  if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
6094    {
6095      wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6096      pos_mode = mode_for_extraction (EP_insv, 2);
6097      extraction_mode = mode_for_extraction (EP_insv, 3);
6098    }
6099
6100  if (! in_dest && unsignedp
6101      && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
6102    {
6103      wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6104      pos_mode = mode_for_extraction (EP_extzv, 3);
6105      extraction_mode = mode_for_extraction (EP_extzv, 0);
6106    }
6107
6108  if (! in_dest && ! unsignedp
6109      && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
6110    {
6111      wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6112      pos_mode = mode_for_extraction (EP_extv, 3);
6113      extraction_mode = mode_for_extraction (EP_extv, 0);
6114    }
6115
6116  /* Never narrow an object, since that might not be safe.  */
6117
6118  if (mode != VOIDmode
6119      && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6120    extraction_mode = mode;
6121
6122  if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6123      && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6124    pos_mode = GET_MODE (pos_rtx);
6125
6126  /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6127     if we have to change the mode of memory and cannot, the desired mode is
6128     EXTRACTION_MODE.  */
6129  if (GET_CODE (inner) != MEM)
6130    wanted_inner_mode = wanted_inner_reg_mode;
6131  else if (inner_mode != wanted_inner_mode
6132	   && (mode_dependent_address_p (XEXP (inner, 0))
6133	       || MEM_VOLATILE_P (inner)))
6134    wanted_inner_mode = extraction_mode;
6135
6136  orig_pos = pos;
6137
6138  if (BITS_BIG_ENDIAN)
6139    {
6140      /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6141	 BITS_BIG_ENDIAN style.  If position is constant, compute new
6142	 position.  Otherwise, build subtraction.
6143	 Note that POS is relative to the mode of the original argument.
6144	 If it's a MEM we need to recompute POS relative to that.
6145	 However, if we're extracting from (or inserting into) a register,
6146	 we want to recompute POS relative to wanted_inner_mode.  */
6147      int width = (GET_CODE (inner) == MEM
6148		   ? GET_MODE_BITSIZE (is_mode)
6149		   : GET_MODE_BITSIZE (wanted_inner_mode));
6150
6151      if (pos_rtx == 0)
6152	pos = width - len - pos;
6153      else
6154	pos_rtx
6155	  = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6156      /* POS may be less than 0 now, but we check for that below.
6157	 Note that it can only be less than 0 if GET_CODE (inner) != MEM.  */
6158    }
6159
6160  /* If INNER has a wider mode, make it smaller.  If this is a constant
6161     extract, try to adjust the byte to point to the byte containing
6162     the value.  */
6163  if (wanted_inner_mode != VOIDmode
6164      && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6165      && ((GET_CODE (inner) == MEM
6166	   && (inner_mode == wanted_inner_mode
6167	       || (! mode_dependent_address_p (XEXP (inner, 0))
6168		   && ! MEM_VOLATILE_P (inner))))))
6169    {
6170      int offset = 0;
6171
6172      /* The computations below will be correct if the machine is big
6173	 endian in both bits and bytes or little endian in bits and bytes.
6174	 If it is mixed, we must adjust.  */
6175
6176      /* If bytes are big endian and we had a paradoxical SUBREG, we must
6177	 adjust OFFSET to compensate.  */
6178      if (BYTES_BIG_ENDIAN
6179	  && ! spans_byte
6180	  && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6181	offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6182
6183      /* If this is a constant position, we can move to the desired byte.  */
6184      if (pos_rtx == 0)
6185	{
6186	  offset += pos / BITS_PER_UNIT;
6187	  pos %= GET_MODE_BITSIZE (wanted_inner_mode);
6188	}
6189
6190      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6191	  && ! spans_byte
6192	  && is_mode != wanted_inner_mode)
6193	offset = (GET_MODE_SIZE (is_mode)
6194		  - GET_MODE_SIZE (wanted_inner_mode) - offset);
6195
6196      if (offset != 0 || inner_mode != wanted_inner_mode)
6197	inner = adjust_address_nv (inner, wanted_inner_mode, offset);
6198    }
6199
6200  /* If INNER is not memory, we can always get it into the proper mode.  If we
6201     are changing its mode, POS must be a constant and smaller than the size
6202     of the new mode.  */
6203  else if (GET_CODE (inner) != MEM)
6204    {
6205      if (GET_MODE (inner) != wanted_inner_mode
6206	  && (pos_rtx != 0
6207	      || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6208	return 0;
6209
6210      inner = force_to_mode (inner, wanted_inner_mode,
6211			     pos_rtx
6212			     || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6213			     ? ~(unsigned HOST_WIDE_INT) 0
6214			     : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6215				<< orig_pos),
6216			     NULL_RTX, 0);
6217    }
6218
6219  /* Adjust mode of POS_RTX, if needed.  If we want a wider mode, we
6220     have to zero extend.  Otherwise, we can just use a SUBREG.  */
6221  if (pos_rtx != 0
6222      && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
6223    {
6224      rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
6225
6226      /* If we know that no extraneous bits are set, and that the high
6227	 bit is not set, convert extraction to cheaper one - either
6228	 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6229	 cases.  */
6230      if (flag_expensive_optimizations
6231	  && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6232	      && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
6233		   & ~(((unsigned HOST_WIDE_INT)
6234			GET_MODE_MASK (GET_MODE (pos_rtx)))
6235		       >> 1))
6236		  == 0)))
6237	{
6238	  rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6239
6240	  /* Prefer ZERO_EXTENSION, since it gives more information to
6241	     backends.  */
6242	  if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6243	    temp = temp1;
6244	}
6245      pos_rtx = temp;
6246    }
6247  else if (pos_rtx != 0
6248	   && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6249    pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
6250
6251  /* Make POS_RTX unless we already have it and it is correct.  If we don't
6252     have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
6253     be a CONST_INT.  */
6254  if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6255    pos_rtx = orig_pos_rtx;
6256
6257  else if (pos_rtx == 0)
6258    pos_rtx = GEN_INT (pos);
6259
6260  /* Make the required operation.  See if we can use existing rtx.  */
6261  new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
6262			 extraction_mode, inner, GEN_INT (len), pos_rtx);
6263  if (! in_dest)
6264    new = gen_lowpart_for_combine (mode, new);
6265
6266  return new;
6267}
6268
6269/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6270   with any other operations in X.  Return X without that shift if so.  */
6271
6272static rtx
6273extract_left_shift (x, count)
6274     rtx x;
6275     int count;
6276{
6277  enum rtx_code code = GET_CODE (x);
6278  enum machine_mode mode = GET_MODE (x);
6279  rtx tem;
6280
6281  switch (code)
6282    {
6283    case ASHIFT:
6284      /* This is the shift itself.  If it is wide enough, we will return
6285	 either the value being shifted if the shift count is equal to
6286	 COUNT or a shift for the difference.  */
6287      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6288	  && INTVAL (XEXP (x, 1)) >= count)
6289	return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6290				     INTVAL (XEXP (x, 1)) - count);
6291      break;
6292
6293    case NEG:  case NOT:
6294      if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6295	return simplify_gen_unary (code, mode, tem, mode);
6296
6297      break;
6298
6299    case PLUS:  case IOR:  case XOR:  case AND:
6300      /* If we can safely shift this constant and we find the inner shift,
6301	 make a new operation.  */
6302      if (GET_CODE (XEXP (x,1)) == CONST_INT
6303	  && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
6304	  && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6305	return gen_binary (code, mode, tem,
6306			   GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6307
6308      break;
6309
6310    default:
6311      break;
6312    }
6313
6314  return 0;
6315}
6316
6317/* Look at the expression rooted at X.  Look for expressions
6318   equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6319   Form these expressions.
6320
6321   Return the new rtx, usually just X.
6322
6323   Also, for machines like the VAX that don't have logical shift insns,
6324   try to convert logical to arithmetic shift operations in cases where
6325   they are equivalent.  This undoes the canonicalizations to logical
6326   shifts done elsewhere.
6327
6328   We try, as much as possible, to re-use rtl expressions to save memory.
6329
6330   IN_CODE says what kind of expression we are processing.  Normally, it is
6331   SET.  In a memory address (inside a MEM, PLUS or minus, the latter two
6332   being kludges), it is MEM.  When processing the arguments of a comparison
6333   or a COMPARE against zero, it is COMPARE.  */
6334
6335static rtx
6336make_compound_operation (x, in_code)
6337     rtx x;
6338     enum rtx_code in_code;
6339{
6340  enum rtx_code code = GET_CODE (x);
6341  enum machine_mode mode = GET_MODE (x);
6342  int mode_width = GET_MODE_BITSIZE (mode);
6343  rtx rhs, lhs;
6344  enum rtx_code next_code;
6345  int i;
6346  rtx new = 0;
6347  rtx tem;
6348  const char *fmt;
6349
6350  /* Select the code to be used in recursive calls.  Once we are inside an
6351     address, we stay there.  If we have a comparison, set to COMPARE,
6352     but once inside, go back to our default of SET.  */
6353
6354  next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
6355	       : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
6356		  && XEXP (x, 1) == const0_rtx) ? COMPARE
6357	       : in_code == COMPARE ? SET : in_code);
6358
6359  /* Process depending on the code of this operation.  If NEW is set
6360     non-zero, it will be returned.  */
6361
6362  switch (code)
6363    {
6364    case ASHIFT:
6365      /* Convert shifts by constants into multiplications if inside
6366	 an address.  */
6367      if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
6368	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6369	  && INTVAL (XEXP (x, 1)) >= 0)
6370	{
6371	  new = make_compound_operation (XEXP (x, 0), next_code);
6372	  new = gen_rtx_MULT (mode, new,
6373			      GEN_INT ((HOST_WIDE_INT) 1
6374				       << INTVAL (XEXP (x, 1))));
6375	}
6376      break;
6377
6378    case AND:
6379      /* If the second operand is not a constant, we can't do anything
6380	 with it.  */
6381      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6382	break;
6383
6384      /* If the constant is a power of two minus one and the first operand
6385	 is a logical right shift, make an extraction.  */
6386      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6387	  && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6388	{
6389	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6390	  new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6391				 0, in_code == COMPARE);
6392	}
6393
6394      /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
6395      else if (GET_CODE (XEXP (x, 0)) == SUBREG
6396	       && subreg_lowpart_p (XEXP (x, 0))
6397	       && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6398	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6399	{
6400	  new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6401					 next_code);
6402	  new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
6403				 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6404				 0, in_code == COMPARE);
6405	}
6406      /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)).  */
6407      else if ((GET_CODE (XEXP (x, 0)) == XOR
6408		|| GET_CODE (XEXP (x, 0)) == IOR)
6409	       && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6410	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6411	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6412	{
6413	  /* Apply the distributive law, and then try to make extractions.  */
6414	  new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
6415				gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6416					     XEXP (x, 1)),
6417				gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6418					     XEXP (x, 1)));
6419	  new = make_compound_operation (new, in_code);
6420	}
6421
6422      /* If we are have (and (rotate X C) M) and C is larger than the number
6423	 of bits in M, this is an extraction.  */
6424
6425      else if (GET_CODE (XEXP (x, 0)) == ROTATE
6426	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6427	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6428	       && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
6429	{
6430	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6431	  new = make_extraction (mode, new,
6432				 (GET_MODE_BITSIZE (mode)
6433				  - INTVAL (XEXP (XEXP (x, 0), 1))),
6434				 NULL_RTX, i, 1, 0, in_code == COMPARE);
6435	}
6436
6437      /* On machines without logical shifts, if the operand of the AND is
6438	 a logical shift and our mask turns off all the propagated sign
6439	 bits, we can replace the logical shift with an arithmetic shift.  */
6440      else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6441	       && !have_insn_for (LSHIFTRT, mode)
6442	       && have_insn_for (ASHIFTRT, mode)
6443	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6444	       && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6445	       && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6446	       && mode_width <= HOST_BITS_PER_WIDE_INT)
6447	{
6448	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6449
6450	  mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6451	  if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6452	    SUBST (XEXP (x, 0),
6453		   gen_rtx_ASHIFTRT (mode,
6454				     make_compound_operation
6455				     (XEXP (XEXP (x, 0), 0), next_code),
6456				     XEXP (XEXP (x, 0), 1)));
6457	}
6458
6459      /* If the constant is one less than a power of two, this might be
6460	 representable by an extraction even if no shift is present.
6461	 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6462	 we are in a COMPARE.  */
6463      else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6464	new = make_extraction (mode,
6465			       make_compound_operation (XEXP (x, 0),
6466							next_code),
6467			       0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6468
6469      /* If we are in a comparison and this is an AND with a power of two,
6470	 convert this into the appropriate bit extract.  */
6471      else if (in_code == COMPARE
6472	       && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6473	new = make_extraction (mode,
6474			       make_compound_operation (XEXP (x, 0),
6475							next_code),
6476			       i, NULL_RTX, 1, 1, 0, 1);
6477
6478      break;
6479
6480    case LSHIFTRT:
6481      /* If the sign bit is known to be zero, replace this with an
6482	 arithmetic shift.  */
6483      if (have_insn_for (ASHIFTRT, mode)
6484	  && ! have_insn_for (LSHIFTRT, mode)
6485	  && mode_width <= HOST_BITS_PER_WIDE_INT
6486	  && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6487	{
6488	  new = gen_rtx_ASHIFTRT (mode,
6489				  make_compound_operation (XEXP (x, 0),
6490							   next_code),
6491				  XEXP (x, 1));
6492	  break;
6493	}
6494
6495      /* ... fall through ...  */
6496
6497    case ASHIFTRT:
6498      lhs = XEXP (x, 0);
6499      rhs = XEXP (x, 1);
6500
6501      /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6502	 this is a SIGN_EXTRACT.  */
6503      if (GET_CODE (rhs) == CONST_INT
6504	  && GET_CODE (lhs) == ASHIFT
6505	  && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6506	  && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6507	{
6508	  new = make_compound_operation (XEXP (lhs, 0), next_code);
6509	  new = make_extraction (mode, new,
6510				 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6511				 NULL_RTX, mode_width - INTVAL (rhs),
6512				 code == LSHIFTRT, 0, in_code == COMPARE);
6513	  break;
6514	}
6515
6516      /* See if we have operations between an ASHIFTRT and an ASHIFT.
6517	 If so, try to merge the shifts into a SIGN_EXTEND.  We could
6518	 also do this for some cases of SIGN_EXTRACT, but it doesn't
6519	 seem worth the effort; the case checked for occurs on Alpha.  */
6520
6521      if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6522	  && ! (GET_CODE (lhs) == SUBREG
6523		&& (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6524	  && GET_CODE (rhs) == CONST_INT
6525	  && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6526	  && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6527	new = make_extraction (mode, make_compound_operation (new, next_code),
6528			       0, NULL_RTX, mode_width - INTVAL (rhs),
6529			       code == LSHIFTRT, 0, in_code == COMPARE);
6530
6531      break;
6532
6533    case SUBREG:
6534      /* Call ourselves recursively on the inner expression.  If we are
6535	 narrowing the object and it has a different RTL code from
6536	 what it originally did, do this SUBREG as a force_to_mode.  */
6537
6538      tem = make_compound_operation (SUBREG_REG (x), in_code);
6539      if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6540	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6541	  && subreg_lowpart_p (x))
6542	{
6543	  rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6544				     NULL_RTX, 0);
6545
6546	  /* If we have something other than a SUBREG, we might have
6547	     done an expansion, so rerun ourselves.  */
6548	  if (GET_CODE (newer) != SUBREG)
6549	    newer = make_compound_operation (newer, in_code);
6550
6551	  return newer;
6552	}
6553
6554      /* If this is a paradoxical subreg, and the new code is a sign or
6555	 zero extension, omit the subreg and widen the extension.  If it
6556	 is a regular subreg, we can still get rid of the subreg by not
6557	 widening so much, or in fact removing the extension entirely.  */
6558      if ((GET_CODE (tem) == SIGN_EXTEND
6559	   || GET_CODE (tem) == ZERO_EXTEND)
6560	  && subreg_lowpart_p (x))
6561	{
6562	  if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6563	      || (GET_MODE_SIZE (mode) >
6564		  GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6565	    tem = gen_rtx_fmt_e (GET_CODE (tem), mode, XEXP (tem, 0));
6566	  else
6567	    tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6568	  return tem;
6569	}
6570      break;
6571
6572    default:
6573      break;
6574    }
6575
6576  if (new)
6577    {
6578      x = gen_lowpart_for_combine (mode, new);
6579      code = GET_CODE (x);
6580    }
6581
6582  /* Now recursively process each operand of this operation.  */
6583  fmt = GET_RTX_FORMAT (code);
6584  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6585    if (fmt[i] == 'e')
6586      {
6587	new = make_compound_operation (XEXP (x, i), next_code);
6588	SUBST (XEXP (x, i), new);
6589      }
6590
6591  return x;
6592}
6593
6594/* Given M see if it is a value that would select a field of bits
6595   within an item, but not the entire word.  Return -1 if not.
6596   Otherwise, return the starting position of the field, where 0 is the
6597   low-order bit.
6598
6599   *PLEN is set to the length of the field.  */
6600
6601static int
6602get_pos_from_mask (m, plen)
6603     unsigned HOST_WIDE_INT m;
6604     unsigned HOST_WIDE_INT *plen;
6605{
6606  /* Get the bit number of the first 1 bit from the right, -1 if none.  */
6607  int pos = exact_log2 (m & -m);
6608  int len;
6609
6610  if (pos < 0)
6611    return -1;
6612
6613  /* Now shift off the low-order zero bits and see if we have a power of
6614     two minus 1.  */
6615  len = exact_log2 ((m >> pos) + 1);
6616
6617  if (len <= 0)
6618    return -1;
6619
6620  *plen = len;
6621  return pos;
6622}
6623
6624/* See if X can be simplified knowing that we will only refer to it in
6625   MODE and will only refer to those bits that are nonzero in MASK.
6626   If other bits are being computed or if masking operations are done
6627   that select a superset of the bits in MASK, they can sometimes be
6628   ignored.
6629
6630   Return a possibly simplified expression, but always convert X to
6631   MODE.  If X is a CONST_INT, AND the CONST_INT with MASK.
6632
6633   Also, if REG is non-zero and X is a register equal in value to REG,
6634   replace X with REG.
6635
6636   If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6637   are all off in X.  This is used when X will be complemented, by either
6638   NOT, NEG, or XOR.  */
6639
6640static rtx
6641force_to_mode (x, mode, mask, reg, just_select)
6642     rtx x;
6643     enum machine_mode mode;
6644     unsigned HOST_WIDE_INT mask;
6645     rtx reg;
6646     int just_select;
6647{
6648  enum rtx_code code = GET_CODE (x);
6649  int next_select = just_select || code == XOR || code == NOT || code == NEG;
6650  enum machine_mode op_mode;
6651  unsigned HOST_WIDE_INT fuller_mask, nonzero;
6652  rtx op0, op1, temp;
6653
6654  /* If this is a CALL or ASM_OPERANDS, don't do anything.  Some of the
6655     code below will do the wrong thing since the mode of such an
6656     expression is VOIDmode.
6657
6658     Also do nothing if X is a CLOBBER; this can happen if X was
6659     the return value from a call to gen_lowpart_for_combine.  */
6660  if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
6661    return x;
6662
6663  /* We want to perform the operation is its present mode unless we know
6664     that the operation is valid in MODE, in which case we do the operation
6665     in MODE.  */
6666  op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6667	      && have_insn_for (code, mode))
6668	     ? mode : GET_MODE (x));
6669
6670  /* It is not valid to do a right-shift in a narrower mode
6671     than the one it came in with.  */
6672  if ((code == LSHIFTRT || code == ASHIFTRT)
6673      && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6674    op_mode = GET_MODE (x);
6675
6676  /* Truncate MASK to fit OP_MODE.  */
6677  if (op_mode)
6678    mask &= GET_MODE_MASK (op_mode);
6679
6680  /* When we have an arithmetic operation, or a shift whose count we
6681     do not know, we need to assume that all bit the up to the highest-order
6682     bit in MASK will be needed.  This is how we form such a mask.  */
6683  if (op_mode)
6684    fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6685		   ? GET_MODE_MASK (op_mode)
6686		   : (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
6687		      - 1));
6688  else
6689    fuller_mask = ~(HOST_WIDE_INT) 0;
6690
6691  /* Determine what bits of X are guaranteed to be (non)zero.  */
6692  nonzero = nonzero_bits (x, mode);
6693
6694  /* If none of the bits in X are needed, return a zero.  */
6695  if (! just_select && (nonzero & mask) == 0)
6696    return const0_rtx;
6697
6698  /* If X is a CONST_INT, return a new one.  Do this here since the
6699     test below will fail.  */
6700  if (GET_CODE (x) == CONST_INT)
6701    return gen_int_mode (INTVAL (x) & mask, mode);
6702
6703  /* If X is narrower than MODE and we want all the bits in X's mode, just
6704     get X in the proper mode.  */
6705  if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6706      && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
6707    return gen_lowpart_for_combine (mode, x);
6708
6709  /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6710     MASK are already known to be zero in X, we need not do anything.  */
6711  if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0)
6712    return x;
6713
6714  switch (code)
6715    {
6716    case CLOBBER:
6717      /* If X is a (clobber (const_int)), return it since we know we are
6718	 generating something that won't match.  */
6719      return x;
6720
6721    case USE:
6722      /* X is a (use (mem ..)) that was made from a bit-field extraction that
6723	 spanned the boundary of the MEM.  If we are now masking so it is
6724	 within that boundary, we don't need the USE any more.  */
6725      if (! BITS_BIG_ENDIAN
6726	  && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6727	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6728      break;
6729
6730    case SIGN_EXTEND:
6731    case ZERO_EXTEND:
6732    case ZERO_EXTRACT:
6733    case SIGN_EXTRACT:
6734      x = expand_compound_operation (x);
6735      if (GET_CODE (x) != code)
6736	return force_to_mode (x, mode, mask, reg, next_select);
6737      break;
6738
6739    case REG:
6740      if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6741		       || rtx_equal_p (reg, get_last_value (x))))
6742	x = reg;
6743      break;
6744
6745    case SUBREG:
6746      if (subreg_lowpart_p (x)
6747	  /* We can ignore the effect of this SUBREG if it narrows the mode or
6748	     if the constant masks to zero all the bits the mode doesn't
6749	     have.  */
6750	  && ((GET_MODE_SIZE (GET_MODE (x))
6751	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6752	      || (0 == (mask
6753			& GET_MODE_MASK (GET_MODE (x))
6754			& ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
6755	return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
6756      break;
6757
6758    case AND:
6759      /* If this is an AND with a constant, convert it into an AND
6760	 whose constant is the AND of that constant with MASK.  If it
6761	 remains an AND of MASK, delete it since it is redundant.  */
6762
6763      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6764	{
6765	  x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6766				      mask & INTVAL (XEXP (x, 1)));
6767
6768	  /* If X is still an AND, see if it is an AND with a mask that
6769	     is just some low-order bits.  If so, and it is MASK, we don't
6770	     need it.  */
6771
6772	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6773	      && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
6774		  == (HOST_WIDE_INT) mask))
6775	    x = XEXP (x, 0);
6776
6777	  /* If it remains an AND, try making another AND with the bits
6778	     in the mode mask that aren't in MASK turned on.  If the
6779	     constant in the AND is wide enough, this might make a
6780	     cheaper constant.  */
6781
6782	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6783	      && GET_MODE_MASK (GET_MODE (x)) != mask
6784	      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
6785	    {
6786	      HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6787				    | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
6788	      int width = GET_MODE_BITSIZE (GET_MODE (x));
6789	      rtx y;
6790
6791	      /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6792		 number, sign extend it.  */
6793	      if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6794		  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6795		cval |= (HOST_WIDE_INT) -1 << width;
6796
6797	      y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6798	      if (rtx_cost (y, SET) < rtx_cost (x, SET))
6799		x = y;
6800	    }
6801
6802	  break;
6803	}
6804
6805      goto binop;
6806
6807    case PLUS:
6808      /* In (and (plus FOO C1) M), if M is a mask that just turns off
6809	 low-order bits (as in an alignment operation) and FOO is already
6810	 aligned to that boundary, mask C1 to that boundary as well.
6811	 This may eliminate that PLUS and, later, the AND.  */
6812
6813      {
6814	unsigned int width = GET_MODE_BITSIZE (mode);
6815	unsigned HOST_WIDE_INT smask = mask;
6816
6817	/* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6818	   number, sign extend it.  */
6819
6820	if (width < HOST_BITS_PER_WIDE_INT
6821	    && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6822	  smask |= (HOST_WIDE_INT) -1 << width;
6823
6824	if (GET_CODE (XEXP (x, 1)) == CONST_INT
6825	    && exact_log2 (- smask) >= 0
6826	    && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
6827	    && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
6828	  return force_to_mode (plus_constant (XEXP (x, 0),
6829					       (INTVAL (XEXP (x, 1)) & smask)),
6830				mode, smask, reg, next_select);
6831      }
6832
6833      /* ... fall through ...  */
6834
6835    case MULT:
6836      /* For PLUS, MINUS and MULT, we need any bits less significant than the
6837	 most significant bit in MASK since carries from those bits will
6838	 affect the bits we are interested in.  */
6839      mask = fuller_mask;
6840      goto binop;
6841
6842    case MINUS:
6843      /* If X is (minus C Y) where C's least set bit is larger than any bit
6844	 in the mask, then we may replace with (neg Y).  */
6845      if (GET_CODE (XEXP (x, 0)) == CONST_INT
6846	  && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
6847					& -INTVAL (XEXP (x, 0))))
6848	      > mask))
6849	{
6850	  x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
6851				  GET_MODE (x));
6852	  return force_to_mode (x, mode, mask, reg, next_select);
6853	}
6854
6855      /* Similarly, if C contains every bit in the mask, then we may
6856	 replace with (not Y).  */
6857      if (GET_CODE (XEXP (x, 0)) == CONST_INT
6858	  && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) mask)
6859	      == INTVAL (XEXP (x, 0))))
6860	{
6861	  x = simplify_gen_unary (NOT, GET_MODE (x),
6862				  XEXP (x, 1), GET_MODE (x));
6863	  return force_to_mode (x, mode, mask, reg, next_select);
6864	}
6865
6866      mask = fuller_mask;
6867      goto binop;
6868
6869    case IOR:
6870    case XOR:
6871      /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6872	 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6873	 operation which may be a bitfield extraction.  Ensure that the
6874	 constant we form is not wider than the mode of X.  */
6875
6876      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6877	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6878	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6879	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6880	  && GET_CODE (XEXP (x, 1)) == CONST_INT
6881	  && ((INTVAL (XEXP (XEXP (x, 0), 1))
6882	       + floor_log2 (INTVAL (XEXP (x, 1))))
6883	      < GET_MODE_BITSIZE (GET_MODE (x)))
6884	  && (INTVAL (XEXP (x, 1))
6885	      & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6886	{
6887	  temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6888			  << INTVAL (XEXP (XEXP (x, 0), 1)));
6889	  temp = gen_binary (GET_CODE (x), GET_MODE (x),
6890			     XEXP (XEXP (x, 0), 0), temp);
6891	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6892			  XEXP (XEXP (x, 0), 1));
6893	  return force_to_mode (x, mode, mask, reg, next_select);
6894	}
6895
6896    binop:
6897      /* For most binary operations, just propagate into the operation and
6898	 change the mode if we have an operation of that mode.  */
6899
6900      op0 = gen_lowpart_for_combine (op_mode,
6901				     force_to_mode (XEXP (x, 0), mode, mask,
6902						    reg, next_select));
6903      op1 = gen_lowpart_for_combine (op_mode,
6904				     force_to_mode (XEXP (x, 1), mode, mask,
6905						    reg, next_select));
6906
6907      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6908	x = gen_binary (code, op_mode, op0, op1);
6909      break;
6910
6911    case ASHIFT:
6912      /* For left shifts, do the same, but just for the first operand.
6913	 However, we cannot do anything with shifts where we cannot
6914	 guarantee that the counts are smaller than the size of the mode
6915	 because such a count will have a different meaning in a
6916	 wider mode.  */
6917
6918      if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6919	     && INTVAL (XEXP (x, 1)) >= 0
6920	     && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6921	  && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6922		&& (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
6923		    < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
6924	break;
6925
6926      /* If the shift count is a constant and we can do arithmetic in
6927	 the mode of the shift, refine which bits we need.  Otherwise, use the
6928	 conservative form of the mask.  */
6929      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6930	  && INTVAL (XEXP (x, 1)) >= 0
6931	  && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6932	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6933	mask >>= INTVAL (XEXP (x, 1));
6934      else
6935	mask = fuller_mask;
6936
6937      op0 = gen_lowpart_for_combine (op_mode,
6938				     force_to_mode (XEXP (x, 0), op_mode,
6939						    mask, reg, next_select));
6940
6941      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6942	x = gen_binary (code, op_mode, op0, XEXP (x, 1));
6943      break;
6944
6945    case LSHIFTRT:
6946      /* Here we can only do something if the shift count is a constant,
6947	 this shift constant is valid for the host, and we can do arithmetic
6948	 in OP_MODE.  */
6949
6950      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6951	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6952	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6953	{
6954	  rtx inner = XEXP (x, 0);
6955	  unsigned HOST_WIDE_INT inner_mask;
6956
6957	  /* Select the mask of the bits we need for the shift operand.  */
6958	  inner_mask = mask << INTVAL (XEXP (x, 1));
6959
6960	  /* We can only change the mode of the shift if we can do arithmetic
6961	     in the mode of the shift and INNER_MASK is no wider than the
6962	     width of OP_MODE.  */
6963	  if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
6964	      || (inner_mask & ~GET_MODE_MASK (op_mode)) != 0)
6965	    op_mode = GET_MODE (x);
6966
6967	  inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
6968
6969	  if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6970	    x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
6971	}
6972
6973      /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6974	 shift and AND produces only copies of the sign bit (C2 is one less
6975	 than a power of two), we can do this with just a shift.  */
6976
6977      if (GET_CODE (x) == LSHIFTRT
6978	  && GET_CODE (XEXP (x, 1)) == CONST_INT
6979	  /* The shift puts one of the sign bit copies in the least significant
6980	     bit.  */
6981	  && ((INTVAL (XEXP (x, 1))
6982	       + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6983	      >= GET_MODE_BITSIZE (GET_MODE (x)))
6984	  && exact_log2 (mask + 1) >= 0
6985	  /* Number of bits left after the shift must be more than the mask
6986	     needs.  */
6987	  && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
6988	      <= GET_MODE_BITSIZE (GET_MODE (x)))
6989	  /* Must be more sign bit copies than the mask needs.  */
6990	  && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6991	      >= exact_log2 (mask + 1)))
6992	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6993			GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6994				 - exact_log2 (mask + 1)));
6995
6996      goto shiftrt;
6997
6998    case ASHIFTRT:
6999      /* If we are just looking for the sign bit, we don't need this shift at
7000	 all, even if it has a variable count.  */
7001      if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7002	  && (mask == ((unsigned HOST_WIDE_INT) 1
7003		       << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7004	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7005
7006      /* If this is a shift by a constant, get a mask that contains those bits
7007	 that are not copies of the sign bit.  We then have two cases:  If
7008	 MASK only includes those bits, this can be a logical shift, which may
7009	 allow simplifications.  If MASK is a single-bit field not within
7010	 those bits, we are requesting a copy of the sign bit and hence can
7011	 shift the sign bit to the appropriate location.  */
7012
7013      if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
7014	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7015	{
7016	  int i = -1;
7017
7018	  /* If the considered data is wider than HOST_WIDE_INT, we can't
7019	     represent a mask for all its bits in a single scalar.
7020	     But we only care about the lower bits, so calculate these.  */
7021
7022	  if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7023	    {
7024	      nonzero = ~(HOST_WIDE_INT) 0;
7025
7026	      /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7027		 is the number of bits a full-width mask would have set.
7028		 We need only shift if these are fewer than nonzero can
7029		 hold.  If not, we must keep all bits set in nonzero.  */
7030
7031	      if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7032		  < HOST_BITS_PER_WIDE_INT)
7033		nonzero >>= INTVAL (XEXP (x, 1))
7034			    + HOST_BITS_PER_WIDE_INT
7035			    - GET_MODE_BITSIZE (GET_MODE (x)) ;
7036	    }
7037	  else
7038	    {
7039	      nonzero = GET_MODE_MASK (GET_MODE (x));
7040	      nonzero >>= INTVAL (XEXP (x, 1));
7041	    }
7042
7043	  if ((mask & ~nonzero) == 0
7044	      || (i = exact_log2 (mask)) >= 0)
7045	    {
7046	      x = simplify_shift_const
7047		(x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7048		 i < 0 ? INTVAL (XEXP (x, 1))
7049		 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7050
7051	      if (GET_CODE (x) != ASHIFTRT)
7052		return force_to_mode (x, mode, mask, reg, next_select);
7053	    }
7054	}
7055
7056      /* If MASK is 1, convert this to a LSHIFTRT.  This can be done
7057	 even if the shift count isn't a constant.  */
7058      if (mask == 1)
7059	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
7060
7061    shiftrt:
7062
7063      /* If this is a zero- or sign-extension operation that just affects bits
7064	 we don't care about, remove it.  Be sure the call above returned
7065	 something that is still a shift.  */
7066
7067      if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7068	  && GET_CODE (XEXP (x, 1)) == CONST_INT
7069	  && INTVAL (XEXP (x, 1)) >= 0
7070	  && (INTVAL (XEXP (x, 1))
7071	      <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
7072	  && GET_CODE (XEXP (x, 0)) == ASHIFT
7073	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7074	  && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
7075	return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7076			      reg, next_select);
7077
7078      break;
7079
7080    case ROTATE:
7081    case ROTATERT:
7082      /* If the shift count is constant and we can do computations
7083	 in the mode of X, compute where the bits we care about are.
7084	 Otherwise, we can't do anything.  Don't change the mode of
7085	 the shift or propagate MODE into the shift, though.  */
7086      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7087	  && INTVAL (XEXP (x, 1)) >= 0)
7088	{
7089	  temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7090					    GET_MODE (x), GEN_INT (mask),
7091					    XEXP (x, 1));
7092	  if (temp && GET_CODE(temp) == CONST_INT)
7093	    SUBST (XEXP (x, 0),
7094		   force_to_mode (XEXP (x, 0), GET_MODE (x),
7095				  INTVAL (temp), reg, next_select));
7096	}
7097      break;
7098
7099    case NEG:
7100      /* If we just want the low-order bit, the NEG isn't needed since it
7101	 won't change the low-order bit.  */
7102      if (mask == 1)
7103	return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
7104
7105      /* We need any bits less significant than the most significant bit in
7106	 MASK since carries from those bits will affect the bits we are
7107	 interested in.  */
7108      mask = fuller_mask;
7109      goto unop;
7110
7111    case NOT:
7112      /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7113	 same as the XOR case above.  Ensure that the constant we form is not
7114	 wider than the mode of X.  */
7115
7116      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7117	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7118	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7119	  && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7120	      < GET_MODE_BITSIZE (GET_MODE (x)))
7121	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7122	{
7123	  temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
7124	  temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
7125	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
7126
7127	  return force_to_mode (x, mode, mask, reg, next_select);
7128	}
7129
7130      /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7131	 use the full mask inside the NOT.  */
7132      mask = fuller_mask;
7133
7134    unop:
7135      op0 = gen_lowpart_for_combine (op_mode,
7136				     force_to_mode (XEXP (x, 0), mode, mask,
7137						    reg, next_select));
7138      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7139	x = simplify_gen_unary (code, op_mode, op0, op_mode);
7140      break;
7141
7142    case NE:
7143      /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
7144	 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
7145	 which is equal to STORE_FLAG_VALUE.  */
7146      if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
7147	  && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
7148	  && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
7149	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7150
7151      break;
7152
7153    case IF_THEN_ELSE:
7154      /* We have no way of knowing if the IF_THEN_ELSE can itself be
7155	 written in a narrower mode.  We play it safe and do not do so.  */
7156
7157      SUBST (XEXP (x, 1),
7158	     gen_lowpart_for_combine (GET_MODE (x),
7159				      force_to_mode (XEXP (x, 1), mode,
7160						     mask, reg, next_select)));
7161      SUBST (XEXP (x, 2),
7162	     gen_lowpart_for_combine (GET_MODE (x),
7163				      force_to_mode (XEXP (x, 2), mode,
7164						     mask, reg,next_select)));
7165      break;
7166
7167    default:
7168      break;
7169    }
7170
7171  /* Ensure we return a value of the proper mode.  */
7172  return gen_lowpart_for_combine (mode, x);
7173}
7174
7175/* Return nonzero if X is an expression that has one of two values depending on
7176   whether some other value is zero or nonzero.  In that case, we return the
7177   value that is being tested, *PTRUE is set to the value if the rtx being
7178   returned has a nonzero value, and *PFALSE is set to the other alternative.
7179
7180   If we return zero, we set *PTRUE and *PFALSE to X.  */
7181
7182static rtx
7183if_then_else_cond (x, ptrue, pfalse)
7184     rtx x;
7185     rtx *ptrue, *pfalse;
7186{
7187  enum machine_mode mode = GET_MODE (x);
7188  enum rtx_code code = GET_CODE (x);
7189  rtx cond0, cond1, true0, true1, false0, false1;
7190  unsigned HOST_WIDE_INT nz;
7191
7192  /* If we are comparing a value against zero, we are done.  */
7193  if ((code == NE || code == EQ)
7194      && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
7195    {
7196      *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7197      *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
7198      return XEXP (x, 0);
7199    }
7200
7201  /* If this is a unary operation whose operand has one of two values, apply
7202     our opcode to compute those values.  */
7203  else if (GET_RTX_CLASS (code) == '1'
7204	   && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
7205    {
7206      *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
7207      *pfalse = simplify_gen_unary (code, mode, false0,
7208				    GET_MODE (XEXP (x, 0)));
7209      return cond0;
7210    }
7211
7212  /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
7213     make can't possibly match and would suppress other optimizations.  */
7214  else if (code == COMPARE)
7215    ;
7216
7217  /* If this is a binary operation, see if either side has only one of two
7218     values.  If either one does or if both do and they are conditional on
7219     the same value, compute the new true and false values.  */
7220  else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
7221	   || GET_RTX_CLASS (code) == '<')
7222    {
7223      cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7224      cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7225
7226      if ((cond0 != 0 || cond1 != 0)
7227	  && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7228	{
7229	  /* If if_then_else_cond returned zero, then true/false are the
7230	     same rtl.  We must copy one of them to prevent invalid rtl
7231	     sharing.  */
7232	  if (cond0 == 0)
7233	    true0 = copy_rtx (true0);
7234	  else if (cond1 == 0)
7235	    true1 = copy_rtx (true1);
7236
7237	  *ptrue = gen_binary (code, mode, true0, true1);
7238	  *pfalse = gen_binary (code, mode, false0, false1);
7239	  return cond0 ? cond0 : cond1;
7240	}
7241
7242      /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
7243	 operands is zero when the other is non-zero, and vice-versa,
7244	 and STORE_FLAG_VALUE is 1 or -1.  */
7245
7246      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7247	  && (code == PLUS || code == IOR || code == XOR || code == MINUS
7248	      || code == UMAX)
7249	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7250	{
7251	  rtx op0 = XEXP (XEXP (x, 0), 1);
7252	  rtx op1 = XEXP (XEXP (x, 1), 1);
7253
7254	  cond0 = XEXP (XEXP (x, 0), 0);
7255	  cond1 = XEXP (XEXP (x, 1), 0);
7256
7257	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7258	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7259	      && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7260		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7261		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7262		  || ((swap_condition (GET_CODE (cond0))
7263		       == combine_reversed_comparison_code (cond1))
7264		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7265		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7266	      && ! side_effects_p (x))
7267	    {
7268	      *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
7269	      *pfalse = gen_binary (MULT, mode,
7270				    (code == MINUS
7271				     ? simplify_gen_unary (NEG, mode, op1,
7272							   mode)
7273				     : op1),
7274				    const_true_rtx);
7275	      return cond0;
7276	    }
7277	}
7278
7279      /* Similarly for MULT, AND and UMIN, except that for these the result
7280	 is always zero.  */
7281      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7282	  && (code == MULT || code == AND || code == UMIN)
7283	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7284	{
7285	  cond0 = XEXP (XEXP (x, 0), 0);
7286	  cond1 = XEXP (XEXP (x, 1), 0);
7287
7288	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7289	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7290	      && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7291		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7292		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7293		  || ((swap_condition (GET_CODE (cond0))
7294		       == combine_reversed_comparison_code (cond1))
7295		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7296		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7297	      && ! side_effects_p (x))
7298	    {
7299	      *ptrue = *pfalse = const0_rtx;
7300	      return cond0;
7301	    }
7302	}
7303    }
7304
7305  else if (code == IF_THEN_ELSE)
7306    {
7307      /* If we have IF_THEN_ELSE already, extract the condition and
7308	 canonicalize it if it is NE or EQ.  */
7309      cond0 = XEXP (x, 0);
7310      *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7311      if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7312	return XEXP (cond0, 0);
7313      else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7314	{
7315	  *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7316	  return XEXP (cond0, 0);
7317	}
7318      else
7319	return cond0;
7320    }
7321
7322  /* If X is a SUBREG, we can narrow both the true and false values
7323     if the inner expression, if there is a condition.  */
7324  else if (code == SUBREG
7325	   && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7326					       &true0, &false0)))
7327    {
7328      *ptrue = simplify_gen_subreg (mode, true0,
7329				    GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7330      *pfalse = simplify_gen_subreg (mode, false0,
7331				     GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7332
7333      return cond0;
7334    }
7335
7336  /* If X is a constant, this isn't special and will cause confusions
7337     if we treat it as such.  Likewise if it is equivalent to a constant.  */
7338  else if (CONSTANT_P (x)
7339	   || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7340    ;
7341
7342  /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7343     will be least confusing to the rest of the compiler.  */
7344  else if (mode == BImode)
7345    {
7346      *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7347      return x;
7348    }
7349
7350  /* If X is known to be either 0 or -1, those are the true and
7351     false values when testing X.  */
7352  else if (x == constm1_rtx || x == const0_rtx
7353	   || (mode != VOIDmode
7354	       && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
7355    {
7356      *ptrue = constm1_rtx, *pfalse = const0_rtx;
7357      return x;
7358    }
7359
7360  /* Likewise for 0 or a single bit.  */
7361  else if (mode != VOIDmode
7362	   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7363	   && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
7364    {
7365      *ptrue = GEN_INT (trunc_int_for_mode (nz, mode)), *pfalse = const0_rtx;
7366      return x;
7367    }
7368
7369  /* Otherwise fail; show no condition with true and false values the same.  */
7370  *ptrue = *pfalse = x;
7371  return 0;
7372}
7373
7374/* Return the value of expression X given the fact that condition COND
7375   is known to be true when applied to REG as its first operand and VAL
7376   as its second.  X is known to not be shared and so can be modified in
7377   place.
7378
7379   We only handle the simplest cases, and specifically those cases that
7380   arise with IF_THEN_ELSE expressions.  */
7381
7382static rtx
7383known_cond (x, cond, reg, val)
7384     rtx x;
7385     enum rtx_code cond;
7386     rtx reg, val;
7387{
7388  enum rtx_code code = GET_CODE (x);
7389  rtx temp;
7390  const char *fmt;
7391  int i, j;
7392
7393  if (side_effects_p (x))
7394    return x;
7395
7396  /* If either operand of the condition is a floating point value,
7397     then we have to avoid collapsing an EQ comparison.  */
7398  if (cond == EQ
7399      && rtx_equal_p (x, reg)
7400      && ! FLOAT_MODE_P (GET_MODE (x))
7401      && ! FLOAT_MODE_P (GET_MODE (val)))
7402    return val;
7403
7404  if (cond == UNEQ && rtx_equal_p (x, reg))
7405    return val;
7406
7407  /* If X is (abs REG) and we know something about REG's relationship
7408     with zero, we may be able to simplify this.  */
7409
7410  if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7411    switch (cond)
7412      {
7413      case GE:  case GT:  case EQ:
7414	return XEXP (x, 0);
7415      case LT:  case LE:
7416	return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
7417				   XEXP (x, 0),
7418				   GET_MODE (XEXP (x, 0)));
7419      default:
7420	break;
7421      }
7422
7423  /* The only other cases we handle are MIN, MAX, and comparisons if the
7424     operands are the same as REG and VAL.  */
7425
7426  else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
7427    {
7428      if (rtx_equal_p (XEXP (x, 0), val))
7429	cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7430
7431      if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7432	{
7433	  if (GET_RTX_CLASS (code) == '<')
7434	    {
7435	      if (comparison_dominates_p (cond, code))
7436		return const_true_rtx;
7437
7438	      code = combine_reversed_comparison_code (x);
7439	      if (code != UNKNOWN
7440		  && comparison_dominates_p (cond, code))
7441		return const0_rtx;
7442	      else
7443		return x;
7444	    }
7445	  else if (code == SMAX || code == SMIN
7446		   || code == UMIN || code == UMAX)
7447	    {
7448	      int unsignedp = (code == UMIN || code == UMAX);
7449
7450	      /* Do not reverse the condition when it is NE or EQ.
7451		 This is because we cannot conclude anything about
7452		 the value of 'SMAX (x, y)' when x is not equal to y,
7453		 but we can when x equals y.  */
7454	      if ((code == SMAX || code == UMAX)
7455		  && ! (cond == EQ || cond == NE))
7456		cond = reverse_condition (cond);
7457
7458	      switch (cond)
7459		{
7460		case GE:   case GT:
7461		  return unsignedp ? x : XEXP (x, 1);
7462		case LE:   case LT:
7463		  return unsignedp ? x : XEXP (x, 0);
7464		case GEU:  case GTU:
7465		  return unsignedp ? XEXP (x, 1) : x;
7466		case LEU:  case LTU:
7467		  return unsignedp ? XEXP (x, 0) : x;
7468		default:
7469		  break;
7470		}
7471	    }
7472	}
7473    }
7474  else if (code == SUBREG)
7475    {
7476      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
7477      rtx new, r = known_cond (SUBREG_REG (x), cond, reg, val);
7478
7479      if (SUBREG_REG (x) != r)
7480	{
7481	  /* We must simplify subreg here, before we lose track of the
7482	     original inner_mode.  */
7483	  new = simplify_subreg (GET_MODE (x), r,
7484				 inner_mode, SUBREG_BYTE (x));
7485	  if (new)
7486	    return new;
7487	  else
7488	    SUBST (SUBREG_REG (x), r);
7489	}
7490
7491      return x;
7492    }
7493  /* We don't have to handle SIGN_EXTEND here, because even in the
7494     case of replacing something with a modeless CONST_INT, a
7495     CONST_INT is already (supposed to be) a valid sign extension for
7496     its narrower mode, which implies it's already properly
7497     sign-extended for the wider mode.  Now, for ZERO_EXTEND, the
7498     story is different.  */
7499  else if (code == ZERO_EXTEND)
7500    {
7501      enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
7502      rtx new, r = known_cond (XEXP (x, 0), cond, reg, val);
7503
7504      if (XEXP (x, 0) != r)
7505	{
7506	  /* We must simplify the zero_extend here, before we lose
7507             track of the original inner_mode.  */
7508	  new = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
7509					  r, inner_mode);
7510	  if (new)
7511	    return new;
7512	  else
7513	    SUBST (XEXP (x, 0), r);
7514	}
7515
7516      return x;
7517    }
7518
7519  fmt = GET_RTX_FORMAT (code);
7520  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7521    {
7522      if (fmt[i] == 'e')
7523	SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7524      else if (fmt[i] == 'E')
7525	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7526	  SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7527						cond, reg, val));
7528    }
7529
7530  return x;
7531}
7532
7533/* See if X and Y are equal for the purposes of seeing if we can rewrite an
7534   assignment as a field assignment.  */
7535
7536static int
7537rtx_equal_for_field_assignment_p (x, y)
7538     rtx x;
7539     rtx y;
7540{
7541  if (x == y || rtx_equal_p (x, y))
7542    return 1;
7543
7544  if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7545    return 0;
7546
7547  /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7548     Note that all SUBREGs of MEM are paradoxical; otherwise they
7549     would have been rewritten.  */
7550  if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7551      && GET_CODE (SUBREG_REG (y)) == MEM
7552      && rtx_equal_p (SUBREG_REG (y),
7553		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7554    return 1;
7555
7556  if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7557      && GET_CODE (SUBREG_REG (x)) == MEM
7558      && rtx_equal_p (SUBREG_REG (x),
7559		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7560    return 1;
7561
7562  /* We used to see if get_last_value of X and Y were the same but that's
7563     not correct.  In one direction, we'll cause the assignment to have
7564     the wrong destination and in the case, we'll import a register into this
7565     insn that might have already have been dead.   So fail if none of the
7566     above cases are true.  */
7567  return 0;
7568}
7569
7570/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7571   Return that assignment if so.
7572
7573   We only handle the most common cases.  */
7574
7575static rtx
7576make_field_assignment (x)
7577     rtx x;
7578{
7579  rtx dest = SET_DEST (x);
7580  rtx src = SET_SRC (x);
7581  rtx assign;
7582  rtx rhs, lhs;
7583  HOST_WIDE_INT c1;
7584  HOST_WIDE_INT pos;
7585  unsigned HOST_WIDE_INT len;
7586  rtx other;
7587  enum machine_mode mode;
7588
7589  /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7590     a clear of a one-bit field.  We will have changed it to
7591     (and (rotate (const_int -2) POS) DEST), so check for that.  Also check
7592     for a SUBREG.  */
7593
7594  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7595      && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7596      && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7597      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7598    {
7599      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7600				1, 1, 1, 0);
7601      if (assign != 0)
7602	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7603      return x;
7604    }
7605
7606  else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7607	   && subreg_lowpart_p (XEXP (src, 0))
7608	   && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7609	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7610	   && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7611	   && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7612	   && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7613    {
7614      assign = make_extraction (VOIDmode, dest, 0,
7615				XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7616				1, 1, 1, 0);
7617      if (assign != 0)
7618	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7619      return x;
7620    }
7621
7622  /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
7623     one-bit field.  */
7624  else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7625	   && XEXP (XEXP (src, 0), 0) == const1_rtx
7626	   && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7627    {
7628      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7629				1, 1, 1, 0);
7630      if (assign != 0)
7631	return gen_rtx_SET (VOIDmode, assign, const1_rtx);
7632      return x;
7633    }
7634
7635  /* The other case we handle is assignments into a constant-position
7636     field.  They look like (ior/xor (and DEST C1) OTHER).  If C1 represents
7637     a mask that has all one bits except for a group of zero bits and
7638     OTHER is known to have zeros where C1 has ones, this is such an
7639     assignment.  Compute the position and length from C1.  Shift OTHER
7640     to the appropriate position, force it to the required mode, and
7641     make the extraction.  Check for the AND in both operands.  */
7642
7643  if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
7644    return x;
7645
7646  rhs = expand_compound_operation (XEXP (src, 0));
7647  lhs = expand_compound_operation (XEXP (src, 1));
7648
7649  if (GET_CODE (rhs) == AND
7650      && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7651      && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7652    c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7653  else if (GET_CODE (lhs) == AND
7654	   && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7655	   && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7656    c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
7657  else
7658    return x;
7659
7660  pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
7661  if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
7662      || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7663      || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
7664    return x;
7665
7666  assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
7667  if (assign == 0)
7668    return x;
7669
7670  /* The mode to use for the source is the mode of the assignment, or of
7671     what is inside a possible STRICT_LOW_PART.  */
7672  mode = (GET_CODE (assign) == STRICT_LOW_PART
7673	  ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
7674
7675  /* Shift OTHER right POS places and make it the source, restricting it
7676     to the proper length and mode.  */
7677
7678  src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7679					     GET_MODE (src), other, pos),
7680		       mode,
7681		       GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7682		       ? ~(unsigned HOST_WIDE_INT) 0
7683		       : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
7684		       dest, 0);
7685
7686  return gen_rtx_SET (VOIDmode, assign, src);
7687}
7688
7689/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7690   if so.  */
7691
7692static rtx
7693apply_distributive_law (x)
7694     rtx x;
7695{
7696  enum rtx_code code = GET_CODE (x);
7697  rtx lhs, rhs, other;
7698  rtx tem;
7699  enum rtx_code inner_code;
7700
7701  /* Distributivity is not true for floating point.
7702     It can change the value.  So don't do it.
7703     -- rms and moshier@world.std.com.  */
7704  if (FLOAT_MODE_P (GET_MODE (x)))
7705    return x;
7706
7707  /* The outer operation can only be one of the following:  */
7708  if (code != IOR && code != AND && code != XOR
7709      && code != PLUS && code != MINUS)
7710    return x;
7711
7712  lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7713
7714  /* If either operand is a primitive we can't do anything, so get out
7715     fast.  */
7716  if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
7717      || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
7718    return x;
7719
7720  lhs = expand_compound_operation (lhs);
7721  rhs = expand_compound_operation (rhs);
7722  inner_code = GET_CODE (lhs);
7723  if (inner_code != GET_CODE (rhs))
7724    return x;
7725
7726  /* See if the inner and outer operations distribute.  */
7727  switch (inner_code)
7728    {
7729    case LSHIFTRT:
7730    case ASHIFTRT:
7731    case AND:
7732    case IOR:
7733      /* These all distribute except over PLUS.  */
7734      if (code == PLUS || code == MINUS)
7735	return x;
7736      break;
7737
7738    case MULT:
7739      if (code != PLUS && code != MINUS)
7740	return x;
7741      break;
7742
7743    case ASHIFT:
7744      /* This is also a multiply, so it distributes over everything.  */
7745      break;
7746
7747    case SUBREG:
7748      /* Non-paradoxical SUBREGs distributes over all operations, provided
7749	 the inner modes and byte offsets are the same, this is an extraction
7750	 of a low-order part, we don't convert an fp operation to int or
7751	 vice versa, and we would not be converting a single-word
7752	 operation into a multi-word operation.  The latter test is not
7753	 required, but it prevents generating unneeded multi-word operations.
7754	 Some of the previous tests are redundant given the latter test, but
7755	 are retained because they are required for correctness.
7756
7757	 We produce the result slightly differently in this case.  */
7758
7759      if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7760	  || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
7761	  || ! subreg_lowpart_p (lhs)
7762	  || (GET_MODE_CLASS (GET_MODE (lhs))
7763	      != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
7764	  || (GET_MODE_SIZE (GET_MODE (lhs))
7765	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
7766	  || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
7767	return x;
7768
7769      tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7770			SUBREG_REG (lhs), SUBREG_REG (rhs));
7771      return gen_lowpart_for_combine (GET_MODE (x), tem);
7772
7773    default:
7774      return x;
7775    }
7776
7777  /* Set LHS and RHS to the inner operands (A and B in the example
7778     above) and set OTHER to the common operand (C in the example).
7779     These is only one way to do this unless the inner operation is
7780     commutative.  */
7781  if (GET_RTX_CLASS (inner_code) == 'c'
7782      && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7783    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7784  else if (GET_RTX_CLASS (inner_code) == 'c'
7785	   && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7786    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7787  else if (GET_RTX_CLASS (inner_code) == 'c'
7788	   && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7789    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7790  else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7791    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7792  else
7793    return x;
7794
7795  /* Form the new inner operation, seeing if it simplifies first.  */
7796  tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7797
7798  /* There is one exception to the general way of distributing:
7799     (a ^ b) | (a ^ c) -> (~a) & (b ^ c)  */
7800  if (code == XOR && inner_code == IOR)
7801    {
7802      inner_code = AND;
7803      other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
7804    }
7805
7806  /* We may be able to continuing distributing the result, so call
7807     ourselves recursively on the inner operation before forming the
7808     outer operation, which we return.  */
7809  return gen_binary (inner_code, GET_MODE (x),
7810		     apply_distributive_law (tem), other);
7811}
7812
7813/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7814   in MODE.
7815
7816   Return an equivalent form, if different from X.  Otherwise, return X.  If
7817   X is zero, we are to always construct the equivalent form.  */
7818
7819static rtx
7820simplify_and_const_int (x, mode, varop, constop)
7821     rtx x;
7822     enum machine_mode mode;
7823     rtx varop;
7824     unsigned HOST_WIDE_INT constop;
7825{
7826  unsigned HOST_WIDE_INT nonzero;
7827  int i;
7828
7829  /* Simplify VAROP knowing that we will be only looking at some of the
7830     bits in it.
7831
7832     Note by passing in CONSTOP, we guarantee that the bits not set in
7833     CONSTOP are not significant and will never be examined.  We must
7834     ensure that is the case by explicitly masking out those bits
7835     before returning.  */
7836  varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
7837
7838  /* If VAROP is a CLOBBER, we will fail so return it.  */
7839  if (GET_CODE (varop) == CLOBBER)
7840    return varop;
7841
7842  /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
7843     to VAROP and return the new constant.  */
7844  if (GET_CODE (varop) == CONST_INT)
7845    return GEN_INT (trunc_int_for_mode (INTVAL (varop) & constop, mode));
7846
7847  /* See what bits may be nonzero in VAROP.  Unlike the general case of
7848     a call to nonzero_bits, here we don't care about bits outside
7849     MODE.  */
7850
7851  nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7852
7853  /* Turn off all bits in the constant that are known to already be zero.
7854     Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
7855     which is tested below.  */
7856
7857  constop &= nonzero;
7858
7859  /* If we don't have any bits left, return zero.  */
7860  if (constop == 0)
7861    return const0_rtx;
7862
7863  /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7864     a power of two, we can replace this with a ASHIFT.  */
7865  if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7866      && (i = exact_log2 (constop)) >= 0)
7867    return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7868
7869  /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7870     or XOR, then try to apply the distributive law.  This may eliminate
7871     operations if either branch can be simplified because of the AND.
7872     It may also make some cases more complex, but those cases probably
7873     won't match a pattern either with or without this.  */
7874
7875  if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7876    return
7877      gen_lowpart_for_combine
7878	(mode,
7879	 apply_distributive_law
7880	 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7881		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7882					      XEXP (varop, 0), constop),
7883		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7884					      XEXP (varop, 1), constop))));
7885
7886  /* If VAROP is PLUS, and the constant is a mask of low bite, distribute
7887     the AND and see if one of the operands simplifies to zero.  If so, we
7888     may eliminate it.  */
7889
7890  if (GET_CODE (varop) == PLUS
7891      && exact_log2 (constop + 1) >= 0)
7892    {
7893      rtx o0, o1;
7894
7895      o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
7896      o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
7897      if (o0 == const0_rtx)
7898	return o1;
7899      if (o1 == const0_rtx)
7900	return o0;
7901    }
7902
7903  /* Get VAROP in MODE.  Try to get a SUBREG if not.  Don't make a new SUBREG
7904     if we already had one (just check for the simplest cases).  */
7905  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7906      && GET_MODE (XEXP (x, 0)) == mode
7907      && SUBREG_REG (XEXP (x, 0)) == varop)
7908    varop = XEXP (x, 0);
7909  else
7910    varop = gen_lowpart_for_combine (mode, varop);
7911
7912  /* If we can't make the SUBREG, try to return what we were given.  */
7913  if (GET_CODE (varop) == CLOBBER)
7914    return x ? x : varop;
7915
7916  /* If we are only masking insignificant bits, return VAROP.  */
7917  if (constop == nonzero)
7918    x = varop;
7919  else
7920    {
7921      /* Otherwise, return an AND.  */
7922      constop = trunc_int_for_mode (constop, mode);
7923      /* See how much, if any, of X we can use.  */
7924      if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
7925	x = gen_binary (AND, mode, varop, GEN_INT (constop));
7926
7927      else
7928	{
7929	  if (GET_CODE (XEXP (x, 1)) != CONST_INT
7930	      || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
7931	    SUBST (XEXP (x, 1), GEN_INT (constop));
7932
7933	  SUBST (XEXP (x, 0), varop);
7934	}
7935    }
7936
7937  return x;
7938}
7939
7940/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7941   We don't let nonzero_bits recur into num_sign_bit_copies, because that
7942   is less useful.  We can't allow both, because that results in exponential
7943   run time recursion.  There is a nullstone testcase that triggered
7944   this.  This macro avoids accidental uses of num_sign_bit_copies.  */
7945#define num_sign_bit_copies()
7946
7947/* Given an expression, X, compute which bits in X can be non-zero.
7948   We don't care about bits outside of those defined in MODE.
7949
7950   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7951   a shift, AND, or zero_extract, we can do better.  */
7952
7953static unsigned HOST_WIDE_INT
7954nonzero_bits (x, mode)
7955     rtx x;
7956     enum machine_mode mode;
7957{
7958  unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7959  unsigned HOST_WIDE_INT inner_nz;
7960  enum rtx_code code;
7961  unsigned int mode_width = GET_MODE_BITSIZE (mode);
7962  rtx tem;
7963
7964  /* For floating-point values, assume all bits are needed.  */
7965  if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7966    return nonzero;
7967
7968  /* If X is wider than MODE, use its mode instead.  */
7969  if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7970    {
7971      mode = GET_MODE (x);
7972      nonzero = GET_MODE_MASK (mode);
7973      mode_width = GET_MODE_BITSIZE (mode);
7974    }
7975
7976  if (mode_width > HOST_BITS_PER_WIDE_INT)
7977    /* Our only callers in this case look for single bit values.  So
7978       just return the mode mask.  Those tests will then be false.  */
7979    return nonzero;
7980
7981#ifndef WORD_REGISTER_OPERATIONS
7982  /* If MODE is wider than X, but both are a single word for both the host
7983     and target machines, we can compute this from which bits of the
7984     object might be nonzero in its own mode, taking into account the fact
7985     that on many CISC machines, accessing an object in a wider mode
7986     causes the high-order bits to become undefined.  So they are
7987     not known to be zero.  */
7988
7989  if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7990      && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7991      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7992      && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
7993    {
7994      nonzero &= nonzero_bits (x, GET_MODE (x));
7995      nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
7996      return nonzero;
7997    }
7998#endif
7999
8000  code = GET_CODE (x);
8001  switch (code)
8002    {
8003    case REG:
8004#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
8005      /* If pointers extend unsigned and this is a pointer in Pmode, say that
8006	 all the bits above ptr_mode are known to be zero.  */
8007      if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8008	  && REG_POINTER (x))
8009	nonzero &= GET_MODE_MASK (ptr_mode);
8010#endif
8011
8012      /* Include declared information about alignment of pointers.  */
8013      /* ??? We don't properly preserve REG_POINTER changes across
8014	 pointer-to-integer casts, so we can't trust it except for
8015	 things that we know must be pointers.  See execute/960116-1.c.  */
8016      if ((x == stack_pointer_rtx
8017	   || x == frame_pointer_rtx
8018	   || x == arg_pointer_rtx)
8019	  && REGNO_POINTER_ALIGN (REGNO (x)))
8020	{
8021	  unsigned HOST_WIDE_INT alignment
8022	    = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
8023
8024#ifdef PUSH_ROUNDING
8025	  /* If PUSH_ROUNDING is defined, it is possible for the
8026	     stack to be momentarily aligned only to that amount,
8027	     so we pick the least alignment.  */
8028	  if (x == stack_pointer_rtx && PUSH_ARGS)
8029	    alignment = MIN (PUSH_ROUNDING (1), alignment);
8030#endif
8031
8032	  nonzero &= ~(alignment - 1);
8033	}
8034
8035      /* If X is a register whose nonzero bits value is current, use it.
8036	 Otherwise, if X is a register whose value we can find, use that
8037	 value.  Otherwise, use the previously-computed global nonzero bits
8038	 for this register.  */
8039
8040      if (reg_last_set_value[REGNO (x)] != 0
8041	  && (reg_last_set_mode[REGNO (x)] == mode
8042	      || (GET_MODE_CLASS (reg_last_set_mode[REGNO (x)]) == MODE_INT
8043		  && GET_MODE_CLASS (mode) == MODE_INT))
8044	  && (reg_last_set_label[REGNO (x)] == label_tick
8045	      || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8046		  && REG_N_SETS (REGNO (x)) == 1
8047		  && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
8048					REGNO (x))))
8049	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8050	return reg_last_set_nonzero_bits[REGNO (x)] & nonzero;
8051
8052      tem = get_last_value (x);
8053
8054      if (tem)
8055	{
8056#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8057	  /* If X is narrower than MODE and TEM is a non-negative
8058	     constant that would appear negative in the mode of X,
8059	     sign-extend it for use in reg_nonzero_bits because some
8060	     machines (maybe most) will actually do the sign-extension
8061	     and this is the conservative approach.
8062
8063	     ??? For 2.5, try to tighten up the MD files in this regard
8064	     instead of this kludge.  */
8065
8066	  if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
8067	      && GET_CODE (tem) == CONST_INT
8068	      && INTVAL (tem) > 0
8069	      && 0 != (INTVAL (tem)
8070		       & ((HOST_WIDE_INT) 1
8071			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
8072	    tem = GEN_INT (INTVAL (tem)
8073			   | ((HOST_WIDE_INT) (-1)
8074			      << GET_MODE_BITSIZE (GET_MODE (x))));
8075#endif
8076	  return nonzero_bits (tem, mode) & nonzero;
8077	}
8078      else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
8079	{
8080	  unsigned HOST_WIDE_INT mask = reg_nonzero_bits[REGNO (x)];
8081
8082	  if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width)
8083	    /* We don't know anything about the upper bits.  */
8084	    mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
8085	  return nonzero & mask;
8086	}
8087      else
8088	return nonzero;
8089
8090    case CONST_INT:
8091#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8092      /* If X is negative in MODE, sign-extend the value.  */
8093      if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
8094	  && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
8095	return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
8096#endif
8097
8098      return INTVAL (x);
8099
8100    case MEM:
8101#ifdef LOAD_EXTEND_OP
8102      /* In many, if not most, RISC machines, reading a byte from memory
8103	 zeros the rest of the register.  Noticing that fact saves a lot
8104	 of extra zero-extends.  */
8105      if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
8106	nonzero &= GET_MODE_MASK (GET_MODE (x));
8107#endif
8108      break;
8109
8110    case EQ:  case NE:
8111    case UNEQ:  case LTGT:
8112    case GT:  case GTU:  case UNGT:
8113    case LT:  case LTU:  case UNLT:
8114    case GE:  case GEU:  case UNGE:
8115    case LE:  case LEU:  case UNLE:
8116    case UNORDERED: case ORDERED:
8117
8118      /* If this produces an integer result, we know which bits are set.
8119	 Code here used to clear bits outside the mode of X, but that is
8120	 now done above.  */
8121
8122      if (GET_MODE_CLASS (mode) == MODE_INT
8123	  && mode_width <= HOST_BITS_PER_WIDE_INT)
8124	nonzero = STORE_FLAG_VALUE;
8125      break;
8126
8127    case NEG:
8128#if 0
8129      /* Disabled to avoid exponential mutual recursion between nonzero_bits
8130	 and num_sign_bit_copies.  */
8131      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8132	  == GET_MODE_BITSIZE (GET_MODE (x)))
8133	nonzero = 1;
8134#endif
8135
8136      if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
8137	nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
8138      break;
8139
8140    case ABS:
8141#if 0
8142      /* Disabled to avoid exponential mutual recursion between nonzero_bits
8143	 and num_sign_bit_copies.  */
8144      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8145	  == GET_MODE_BITSIZE (GET_MODE (x)))
8146	nonzero = 1;
8147#endif
8148      break;
8149
8150    case TRUNCATE:
8151      nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
8152      break;
8153
8154    case ZERO_EXTEND:
8155      nonzero &= nonzero_bits (XEXP (x, 0), mode);
8156      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8157	nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8158      break;
8159
8160    case SIGN_EXTEND:
8161      /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
8162	 Otherwise, show all the bits in the outer mode but not the inner
8163	 may be non-zero.  */
8164      inner_nz = nonzero_bits (XEXP (x, 0), mode);
8165      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8166	{
8167	  inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8168	  if (inner_nz
8169	      & (((HOST_WIDE_INT) 1
8170		  << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
8171	    inner_nz |= (GET_MODE_MASK (mode)
8172			 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
8173	}
8174
8175      nonzero &= inner_nz;
8176      break;
8177
8178    case AND:
8179      nonzero &= (nonzero_bits (XEXP (x, 0), mode)
8180		  & nonzero_bits (XEXP (x, 1), mode));
8181      break;
8182
8183    case XOR:   case IOR:
8184    case UMIN:  case UMAX:  case SMIN:  case SMAX:
8185      {
8186	unsigned HOST_WIDE_INT nonzero0 = nonzero_bits (XEXP (x, 0), mode);
8187
8188	/* Don't call nonzero_bits for the second time if it cannot change
8189	   anything.  */
8190	if ((nonzero & nonzero0) != nonzero)
8191	  nonzero &= (nonzero0 | nonzero_bits (XEXP (x, 1), mode));
8192      }
8193      break;
8194
8195    case PLUS:  case MINUS:
8196    case MULT:
8197    case DIV:   case UDIV:
8198    case MOD:   case UMOD:
8199      /* We can apply the rules of arithmetic to compute the number of
8200	 high- and low-order zero bits of these operations.  We start by
8201	 computing the width (position of the highest-order non-zero bit)
8202	 and the number of low-order zero bits for each value.  */
8203      {
8204	unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
8205	unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
8206	int width0 = floor_log2 (nz0) + 1;
8207	int width1 = floor_log2 (nz1) + 1;
8208	int low0 = floor_log2 (nz0 & -nz0);
8209	int low1 = floor_log2 (nz1 & -nz1);
8210	HOST_WIDE_INT op0_maybe_minusp
8211	  = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8212	HOST_WIDE_INT op1_maybe_minusp
8213	  = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8214	unsigned int result_width = mode_width;
8215	int result_low = 0;
8216
8217	switch (code)
8218	  {
8219	  case PLUS:
8220	    result_width = MAX (width0, width1) + 1;
8221	    result_low = MIN (low0, low1);
8222	    break;
8223	  case MINUS:
8224	    result_low = MIN (low0, low1);
8225	    break;
8226	  case MULT:
8227	    result_width = width0 + width1;
8228	    result_low = low0 + low1;
8229	    break;
8230	  case DIV:
8231	    if (width1 == 0)
8232	      break;
8233	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
8234	      result_width = width0;
8235	    break;
8236	  case UDIV:
8237	    if (width1 == 0)
8238	      break;
8239	    result_width = width0;
8240	    break;
8241	  case MOD:
8242	    if (width1 == 0)
8243	      break;
8244	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
8245	      result_width = MIN (width0, width1);
8246	    result_low = MIN (low0, low1);
8247	    break;
8248	  case UMOD:
8249	    if (width1 == 0)
8250	      break;
8251	    result_width = MIN (width0, width1);
8252	    result_low = MIN (low0, low1);
8253	    break;
8254	  default:
8255	    abort ();
8256	  }
8257
8258	if (result_width < mode_width)
8259	  nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
8260
8261	if (result_low > 0)
8262	  nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
8263
8264#ifdef POINTERS_EXTEND_UNSIGNED
8265	/* If pointers extend unsigned and this is an addition or subtraction
8266	   to a pointer in Pmode, all the bits above ptr_mode are known to be
8267	   zero.  */
8268	if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
8269	    && (code == PLUS || code == MINUS)
8270	    && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8271	  nonzero &= GET_MODE_MASK (ptr_mode);
8272#endif
8273      }
8274      break;
8275
8276    case ZERO_EXTRACT:
8277      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8278	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8279	nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
8280      break;
8281
8282    case SUBREG:
8283      /* If this is a SUBREG formed for a promoted variable that has
8284	 been zero-extended, we know that at least the high-order bits
8285	 are zero, though others might be too.  */
8286
8287      if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
8288	nonzero = (GET_MODE_MASK (GET_MODE (x))
8289		   & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
8290
8291      /* If the inner mode is a single word for both the host and target
8292	 machines, we can compute this from which bits of the inner
8293	 object might be nonzero.  */
8294      if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
8295	  && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8296	      <= HOST_BITS_PER_WIDE_INT))
8297	{
8298	  nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8299
8300#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
8301	  /* If this is a typical RISC machine, we only have to worry
8302	     about the way loads are extended.  */
8303	  if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
8304	      ? (((nonzero
8305		   & (((unsigned HOST_WIDE_INT) 1
8306		       << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
8307		  != 0))
8308	      : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
8309#endif
8310	    {
8311	      /* On many CISC machines, accessing an object in a wider mode
8312		 causes the high-order bits to become undefined.  So they are
8313		 not known to be zero.  */
8314	      if (GET_MODE_SIZE (GET_MODE (x))
8315		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8316		nonzero |= (GET_MODE_MASK (GET_MODE (x))
8317			    & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
8318	    }
8319	}
8320      break;
8321
8322    case ASHIFTRT:
8323    case LSHIFTRT:
8324    case ASHIFT:
8325    case ROTATE:
8326      /* The nonzero bits are in two classes: any bits within MODE
8327	 that aren't in GET_MODE (x) are always significant.  The rest of the
8328	 nonzero bits are those that are significant in the operand of
8329	 the shift when shifted the appropriate number of bits.  This
8330	 shows that high-order bits are cleared by the right shift and
8331	 low-order bits by left shifts.  */
8332      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8333	  && INTVAL (XEXP (x, 1)) >= 0
8334	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8335	{
8336	  enum machine_mode inner_mode = GET_MODE (x);
8337	  unsigned int width = GET_MODE_BITSIZE (inner_mode);
8338	  int count = INTVAL (XEXP (x, 1));
8339	  unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
8340	  unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
8341	  unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
8342	  unsigned HOST_WIDE_INT outer = 0;
8343
8344	  if (mode_width > width)
8345	    outer = (op_nonzero & nonzero & ~mode_mask);
8346
8347	  if (code == LSHIFTRT)
8348	    inner >>= count;
8349	  else if (code == ASHIFTRT)
8350	    {
8351	      inner >>= count;
8352
8353	      /* If the sign bit may have been nonzero before the shift, we
8354		 need to mark all the places it could have been copied to
8355		 by the shift as possibly nonzero.  */
8356	      if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
8357		inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
8358	    }
8359	  else if (code == ASHIFT)
8360	    inner <<= count;
8361	  else
8362	    inner = ((inner << (count % width)
8363		      | (inner >> (width - (count % width)))) & mode_mask);
8364
8365	  nonzero &= (outer | inner);
8366	}
8367      break;
8368
8369    case FFS:
8370      /* This is at most the number of bits in the mode.  */
8371      nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
8372      break;
8373
8374    case IF_THEN_ELSE:
8375      nonzero &= (nonzero_bits (XEXP (x, 1), mode)
8376		  | nonzero_bits (XEXP (x, 2), mode));
8377      break;
8378
8379    default:
8380      break;
8381    }
8382
8383  return nonzero;
8384}
8385
8386/* See the macro definition above.  */
8387#undef num_sign_bit_copies
8388
8389/* Return the number of bits at the high-order end of X that are known to
8390   be equal to the sign bit.  X will be used in mode MODE; if MODE is
8391   VOIDmode, X will be used in its own mode.  The returned value  will always
8392   be between 1 and the number of bits in MODE.  */
8393
8394static unsigned int
8395num_sign_bit_copies (x, mode)
8396     rtx x;
8397     enum machine_mode mode;
8398{
8399  enum rtx_code code = GET_CODE (x);
8400  unsigned int bitwidth;
8401  int num0, num1, result;
8402  unsigned HOST_WIDE_INT nonzero;
8403  rtx tem;
8404
8405  /* If we weren't given a mode, use the mode of X.  If the mode is still
8406     VOIDmode, we don't know anything.  Likewise if one of the modes is
8407     floating-point.  */
8408
8409  if (mode == VOIDmode)
8410    mode = GET_MODE (x);
8411
8412  if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
8413    return 1;
8414
8415  bitwidth = GET_MODE_BITSIZE (mode);
8416
8417  /* For a smaller object, just ignore the high bits.  */
8418  if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
8419    {
8420      num0 = num_sign_bit_copies (x, GET_MODE (x));
8421      return MAX (1,
8422		  num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
8423    }
8424
8425  if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
8426    {
8427#ifndef WORD_REGISTER_OPERATIONS
8428  /* If this machine does not do all register operations on the entire
8429     register and MODE is wider than the mode of X, we can say nothing
8430     at all about the high-order bits.  */
8431      return 1;
8432#else
8433      /* Likewise on machines that do, if the mode of the object is smaller
8434	 than a word and loads of that size don't sign extend, we can say
8435	 nothing about the high order bits.  */
8436      if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
8437#ifdef LOAD_EXTEND_OP
8438	  && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
8439#endif
8440	  )
8441	return 1;
8442#endif
8443    }
8444
8445  switch (code)
8446    {
8447    case REG:
8448
8449#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
8450      /* If pointers extend signed and this is a pointer in Pmode, say that
8451	 all the bits above ptr_mode are known to be sign bit copies.  */
8452      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
8453	  && REG_POINTER (x))
8454	return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
8455#endif
8456
8457      if (reg_last_set_value[REGNO (x)] != 0
8458	  && reg_last_set_mode[REGNO (x)] == mode
8459	  && (reg_last_set_label[REGNO (x)] == label_tick
8460	      || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8461		  && REG_N_SETS (REGNO (x)) == 1
8462		  && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
8463					REGNO (x))))
8464	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8465	return reg_last_set_sign_bit_copies[REGNO (x)];
8466
8467      tem = get_last_value (x);
8468      if (tem != 0)
8469	return num_sign_bit_copies (tem, mode);
8470
8471      if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0
8472	  && GET_MODE_BITSIZE (GET_MODE (x)) == bitwidth)
8473	return reg_sign_bit_copies[REGNO (x)];
8474      break;
8475
8476    case MEM:
8477#ifdef LOAD_EXTEND_OP
8478      /* Some RISC machines sign-extend all loads of smaller than a word.  */
8479      if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
8480	return MAX (1, ((int) bitwidth
8481			- (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
8482#endif
8483      break;
8484
8485    case CONST_INT:
8486      /* If the constant is negative, take its 1's complement and remask.
8487	 Then see how many zero bits we have.  */
8488      nonzero = INTVAL (x) & GET_MODE_MASK (mode);
8489      if (bitwidth <= HOST_BITS_PER_WIDE_INT
8490	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8491	nonzero = (~nonzero) & GET_MODE_MASK (mode);
8492
8493      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8494
8495    case SUBREG:
8496      /* If this is a SUBREG for a promoted object that is sign-extended
8497	 and we are looking at it in a wider mode, we know that at least the
8498	 high-order bits are known to be sign bit copies.  */
8499
8500      if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
8501	{
8502	  num0 = num_sign_bit_copies (SUBREG_REG (x), mode);
8503	  return MAX ((int) bitwidth
8504		      - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
8505		      num0);
8506	}
8507
8508      /* For a smaller object, just ignore the high bits.  */
8509      if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
8510	{
8511	  num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
8512	  return MAX (1, (num0
8513			  - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8514				   - bitwidth)));
8515	}
8516
8517#ifdef WORD_REGISTER_OPERATIONS
8518#ifdef LOAD_EXTEND_OP
8519      /* For paradoxical SUBREGs on machines where all register operations
8520	 affect the entire register, just look inside.  Note that we are
8521	 passing MODE to the recursive call, so the number of sign bit copies
8522	 will remain relative to that mode, not the inner mode.  */
8523
8524      /* This works only if loads sign extend.  Otherwise, if we get a
8525	 reload for the inner part, it may be loaded from the stack, and
8526	 then we lose all sign bit copies that existed before the store
8527	 to the stack.  */
8528
8529      if ((GET_MODE_SIZE (GET_MODE (x))
8530	   > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8531	  && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
8532	return num_sign_bit_copies (SUBREG_REG (x), mode);
8533#endif
8534#endif
8535      break;
8536
8537    case SIGN_EXTRACT:
8538      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
8539	return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
8540      break;
8541
8542    case SIGN_EXTEND:
8543      return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8544	      + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
8545
8546    case TRUNCATE:
8547      /* For a smaller object, just ignore the high bits.  */
8548      num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
8549      return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8550				    - bitwidth)));
8551
8552    case NOT:
8553      return num_sign_bit_copies (XEXP (x, 0), mode);
8554
8555    case ROTATE:       case ROTATERT:
8556      /* If we are rotating left by a number of bits less than the number
8557	 of sign bit copies, we can just subtract that amount from the
8558	 number.  */
8559      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8560	  && INTVAL (XEXP (x, 1)) >= 0
8561	  && INTVAL (XEXP (x, 1)) < (int) bitwidth)
8562	{
8563	  num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8564	  return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
8565				 : (int) bitwidth - INTVAL (XEXP (x, 1))));
8566	}
8567      break;
8568
8569    case NEG:
8570      /* In general, this subtracts one sign bit copy.  But if the value
8571	 is known to be positive, the number of sign bit copies is the
8572	 same as that of the input.  Finally, if the input has just one bit
8573	 that might be nonzero, all the bits are copies of the sign bit.  */
8574      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8575      if (bitwidth > HOST_BITS_PER_WIDE_INT)
8576	return num0 > 1 ? num0 - 1 : 1;
8577
8578      nonzero = nonzero_bits (XEXP (x, 0), mode);
8579      if (nonzero == 1)
8580	return bitwidth;
8581
8582      if (num0 > 1
8583	  && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
8584	num0--;
8585
8586      return num0;
8587
8588    case IOR:   case AND:   case XOR:
8589    case SMIN:  case SMAX:  case UMIN:  case UMAX:
8590      /* Logical operations will preserve the number of sign-bit copies.
8591	 MIN and MAX operations always return one of the operands.  */
8592      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8593      num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8594      return MIN (num0, num1);
8595
8596    case PLUS:  case MINUS:
8597      /* For addition and subtraction, we can have a 1-bit carry.  However,
8598	 if we are subtracting 1 from a positive number, there will not
8599	 be such a carry.  Furthermore, if the positive number is known to
8600	 be 0 or 1, we know the result is either -1 or 0.  */
8601
8602      if (code == PLUS && XEXP (x, 1) == constm1_rtx
8603	  && bitwidth <= HOST_BITS_PER_WIDE_INT)
8604	{
8605	  nonzero = nonzero_bits (XEXP (x, 0), mode);
8606	  if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8607	    return (nonzero == 1 || nonzero == 0 ? bitwidth
8608		    : bitwidth - floor_log2 (nonzero) - 1);
8609	}
8610
8611      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8612      num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8613      result = MAX (1, MIN (num0, num1) - 1);
8614
8615#ifdef POINTERS_EXTEND_UNSIGNED
8616      /* If pointers extend signed and this is an addition or subtraction
8617	 to a pointer in Pmode, all the bits above ptr_mode are known to be
8618	 sign bit copies.  */
8619      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8620	  && (code == PLUS || code == MINUS)
8621	  && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8622	result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
8623			     - GET_MODE_BITSIZE (ptr_mode) + 1),
8624		      result);
8625#endif
8626      return result;
8627
8628    case MULT:
8629      /* The number of bits of the product is the sum of the number of
8630	 bits of both terms.  However, unless one of the terms if known
8631	 to be positive, we must allow for an additional bit since negating
8632	 a negative number can remove one sign bit copy.  */
8633
8634      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8635      num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8636
8637      result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8638      if (result > 0
8639	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8640	      || (((nonzero_bits (XEXP (x, 0), mode)
8641		    & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8642		  && ((nonzero_bits (XEXP (x, 1), mode)
8643		       & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
8644	result--;
8645
8646      return MAX (1, result);
8647
8648    case UDIV:
8649      /* The result must be <= the first operand.  If the first operand
8650         has the high bit set, we know nothing about the number of sign
8651         bit copies.  */
8652      if (bitwidth > HOST_BITS_PER_WIDE_INT)
8653	return 1;
8654      else if ((nonzero_bits (XEXP (x, 0), mode)
8655		& ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8656	return 1;
8657      else
8658	return num_sign_bit_copies (XEXP (x, 0), mode);
8659
8660    case UMOD:
8661      /* The result must be <= the second operand.  */
8662      return num_sign_bit_copies (XEXP (x, 1), mode);
8663
8664    case DIV:
8665      /* Similar to unsigned division, except that we have to worry about
8666	 the case where the divisor is negative, in which case we have
8667	 to add 1.  */
8668      result = num_sign_bit_copies (XEXP (x, 0), mode);
8669      if (result > 1
8670	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8671	      || (nonzero_bits (XEXP (x, 1), mode)
8672		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8673	result--;
8674
8675      return result;
8676
8677    case MOD:
8678      result = num_sign_bit_copies (XEXP (x, 1), mode);
8679      if (result > 1
8680	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8681	      || (nonzero_bits (XEXP (x, 1), mode)
8682		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8683	result--;
8684
8685      return result;
8686
8687    case ASHIFTRT:
8688      /* Shifts by a constant add to the number of bits equal to the
8689	 sign bit.  */
8690      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8691      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8692	  && INTVAL (XEXP (x, 1)) > 0)
8693	num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
8694
8695      return num0;
8696
8697    case ASHIFT:
8698      /* Left shifts destroy copies.  */
8699      if (GET_CODE (XEXP (x, 1)) != CONST_INT
8700	  || INTVAL (XEXP (x, 1)) < 0
8701	  || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
8702	return 1;
8703
8704      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8705      return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8706
8707    case IF_THEN_ELSE:
8708      num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8709      num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8710      return MIN (num0, num1);
8711
8712    case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
8713    case UNEQ:  case LTGT:  case UNGE:  case UNGT:  case UNLE:  case UNLT:
8714    case GEU: case GTU: case LEU: case LTU:
8715    case UNORDERED: case ORDERED:
8716      /* If the constant is negative, take its 1's complement and remask.
8717	 Then see how many zero bits we have.  */
8718      nonzero = STORE_FLAG_VALUE;
8719      if (bitwidth <= HOST_BITS_PER_WIDE_INT
8720	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8721	nonzero = (~nonzero) & GET_MODE_MASK (mode);
8722
8723      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8724      break;
8725
8726    default:
8727      break;
8728    }
8729
8730  /* If we haven't been able to figure it out by one of the above rules,
8731     see if some of the high-order bits are known to be zero.  If so,
8732     count those bits and return one less than that amount.  If we can't
8733     safely compute the mask for this mode, always return BITWIDTH.  */
8734
8735  if (bitwidth > HOST_BITS_PER_WIDE_INT)
8736    return 1;
8737
8738  nonzero = nonzero_bits (x, mode);
8739  return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
8740	  ? 1 : bitwidth - floor_log2 (nonzero) - 1);
8741}
8742
8743/* Return the number of "extended" bits there are in X, when interpreted
8744   as a quantity in MODE whose signedness is indicated by UNSIGNEDP.  For
8745   unsigned quantities, this is the number of high-order zero bits.
8746   For signed quantities, this is the number of copies of the sign bit
8747   minus 1.  In both case, this function returns the number of "spare"
8748   bits.  For example, if two quantities for which this function returns
8749   at least 1 are added, the addition is known not to overflow.
8750
8751   This function will always return 0 unless called during combine, which
8752   implies that it must be called from a define_split.  */
8753
8754unsigned int
8755extended_count (x, mode, unsignedp)
8756     rtx x;
8757     enum machine_mode mode;
8758     int unsignedp;
8759{
8760  if (nonzero_sign_valid == 0)
8761    return 0;
8762
8763  return (unsignedp
8764	  ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8765	     ? (GET_MODE_BITSIZE (mode) - 1
8766		- floor_log2 (nonzero_bits (x, mode)))
8767	     : 0)
8768	  : num_sign_bit_copies (x, mode) - 1);
8769}
8770
8771/* This function is called from `simplify_shift_const' to merge two
8772   outer operations.  Specifically, we have already found that we need
8773   to perform operation *POP0 with constant *PCONST0 at the outermost
8774   position.  We would now like to also perform OP1 with constant CONST1
8775   (with *POP0 being done last).
8776
8777   Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8778   the resulting operation.  *PCOMP_P is set to 1 if we would need to
8779   complement the innermost operand, otherwise it is unchanged.
8780
8781   MODE is the mode in which the operation will be done.  No bits outside
8782   the width of this mode matter.  It is assumed that the width of this mode
8783   is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8784
8785   If *POP0 or OP1 are NIL, it means no operation is required.  Only NEG, PLUS,
8786   IOR, XOR, and AND are supported.  We may set *POP0 to SET if the proper
8787   result is simply *PCONST0.
8788
8789   If the resulting operation cannot be expressed as one operation, we
8790   return 0 and do not change *POP0, *PCONST0, and *PCOMP_P.  */
8791
8792static int
8793merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8794     enum rtx_code *pop0;
8795     HOST_WIDE_INT *pconst0;
8796     enum rtx_code op1;
8797     HOST_WIDE_INT const1;
8798     enum machine_mode mode;
8799     int *pcomp_p;
8800{
8801  enum rtx_code op0 = *pop0;
8802  HOST_WIDE_INT const0 = *pconst0;
8803
8804  const0 &= GET_MODE_MASK (mode);
8805  const1 &= GET_MODE_MASK (mode);
8806
8807  /* If OP0 is an AND, clear unimportant bits in CONST1.  */
8808  if (op0 == AND)
8809    const1 &= const0;
8810
8811  /* If OP0 or OP1 is NIL, this is easy.  Similarly if they are the same or
8812     if OP0 is SET.  */
8813
8814  if (op1 == NIL || op0 == SET)
8815    return 1;
8816
8817  else if (op0 == NIL)
8818    op0 = op1, const0 = const1;
8819
8820  else if (op0 == op1)
8821    {
8822      switch (op0)
8823	{
8824	case AND:
8825	  const0 &= const1;
8826	  break;
8827	case IOR:
8828	  const0 |= const1;
8829	  break;
8830	case XOR:
8831	  const0 ^= const1;
8832	  break;
8833	case PLUS:
8834	  const0 += const1;
8835	  break;
8836	case NEG:
8837	  op0 = NIL;
8838	  break;
8839	default:
8840	  break;
8841	}
8842    }
8843
8844  /* Otherwise, if either is a PLUS or NEG, we can't do anything.  */
8845  else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8846    return 0;
8847
8848  /* If the two constants aren't the same, we can't do anything.  The
8849     remaining six cases can all be done.  */
8850  else if (const0 != const1)
8851    return 0;
8852
8853  else
8854    switch (op0)
8855      {
8856      case IOR:
8857	if (op1 == AND)
8858	  /* (a & b) | b == b */
8859	  op0 = SET;
8860	else /* op1 == XOR */
8861	  /* (a ^ b) | b == a | b */
8862	  {;}
8863	break;
8864
8865      case XOR:
8866	if (op1 == AND)
8867	  /* (a & b) ^ b == (~a) & b */
8868	  op0 = AND, *pcomp_p = 1;
8869	else /* op1 == IOR */
8870	  /* (a | b) ^ b == a & ~b */
8871	  op0 = AND, *pconst0 = ~const0;
8872	break;
8873
8874      case AND:
8875	if (op1 == IOR)
8876	  /* (a | b) & b == b */
8877	op0 = SET;
8878	else /* op1 == XOR */
8879	  /* (a ^ b) & b) == (~a) & b */
8880	  *pcomp_p = 1;
8881	break;
8882      default:
8883	break;
8884      }
8885
8886  /* Check for NO-OP cases.  */
8887  const0 &= GET_MODE_MASK (mode);
8888  if (const0 == 0
8889      && (op0 == IOR || op0 == XOR || op0 == PLUS))
8890    op0 = NIL;
8891  else if (const0 == 0 && op0 == AND)
8892    op0 = SET;
8893  else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8894	   && op0 == AND)
8895    op0 = NIL;
8896
8897  /* ??? Slightly redundant with the above mask, but not entirely.
8898     Moving this above means we'd have to sign-extend the mode mask
8899     for the final test.  */
8900  const0 = trunc_int_for_mode (const0, mode);
8901
8902  *pop0 = op0;
8903  *pconst0 = const0;
8904
8905  return 1;
8906}
8907
8908/* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
8909   The result of the shift is RESULT_MODE.  X, if non-zero, is an expression
8910   that we started with.
8911
8912   The shift is normally computed in the widest mode we find in VAROP, as
8913   long as it isn't a different number of words than RESULT_MODE.  Exceptions
8914   are ASHIFTRT and ROTATE, which are always done in their original mode,  */
8915
8916static rtx
8917simplify_shift_const (x, code, result_mode, varop, orig_count)
8918     rtx x;
8919     enum rtx_code code;
8920     enum machine_mode result_mode;
8921     rtx varop;
8922     int orig_count;
8923{
8924  enum rtx_code orig_code = code;
8925  unsigned int count;
8926  int signed_count;
8927  enum machine_mode mode = result_mode;
8928  enum machine_mode shift_mode, tmode;
8929  unsigned int mode_words
8930    = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8931  /* We form (outer_op (code varop count) (outer_const)).  */
8932  enum rtx_code outer_op = NIL;
8933  HOST_WIDE_INT outer_const = 0;
8934  rtx const_rtx;
8935  int complement_p = 0;
8936  rtx new;
8937
8938  /* Make sure and truncate the "natural" shift on the way in.  We don't
8939     want to do this inside the loop as it makes it more difficult to
8940     combine shifts.  */
8941#ifdef SHIFT_COUNT_TRUNCATED
8942  if (SHIFT_COUNT_TRUNCATED)
8943    orig_count &= GET_MODE_BITSIZE (mode) - 1;
8944#endif
8945
8946  /* If we were given an invalid count, don't do anything except exactly
8947     what was requested.  */
8948
8949  if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
8950    {
8951      if (x)
8952	return x;
8953
8954      return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (orig_count));
8955    }
8956
8957  count = orig_count;
8958
8959  /* Unless one of the branches of the `if' in this loop does a `continue',
8960     we will `break' the loop after the `if'.  */
8961
8962  while (count != 0)
8963    {
8964      /* If we have an operand of (clobber (const_int 0)), just return that
8965	 value.  */
8966      if (GET_CODE (varop) == CLOBBER)
8967	return varop;
8968
8969      /* If we discovered we had to complement VAROP, leave.  Making a NOT
8970	 here would cause an infinite loop.  */
8971      if (complement_p)
8972	break;
8973
8974      /* Convert ROTATERT to ROTATE.  */
8975      if (code == ROTATERT)
8976	code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8977
8978      /* We need to determine what mode we will do the shift in.  If the
8979	 shift is a right shift or a ROTATE, we must always do it in the mode
8980	 it was originally done in.  Otherwise, we can do it in MODE, the
8981	 widest mode encountered.  */
8982      shift_mode
8983	= (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8984	   ? result_mode : mode);
8985
8986      /* Handle cases where the count is greater than the size of the mode
8987	 minus 1.  For ASHIFT, use the size minus one as the count (this can
8988	 occur when simplifying (lshiftrt (ashiftrt ..))).  For rotates,
8989	 take the count modulo the size.  For other shifts, the result is
8990	 zero.
8991
8992	 Since these shifts are being produced by the compiler by combining
8993	 multiple operations, each of which are defined, we know what the
8994	 result is supposed to be.  */
8995
8996      if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8997	{
8998	  if (code == ASHIFTRT)
8999	    count = GET_MODE_BITSIZE (shift_mode) - 1;
9000	  else if (code == ROTATE || code == ROTATERT)
9001	    count %= GET_MODE_BITSIZE (shift_mode);
9002	  else
9003	    {
9004	      /* We can't simply return zero because there may be an
9005		 outer op.  */
9006	      varop = const0_rtx;
9007	      count = 0;
9008	      break;
9009	    }
9010	}
9011
9012      /* An arithmetic right shift of a quantity known to be -1 or 0
9013	 is a no-op.  */
9014      if (code == ASHIFTRT
9015	  && (num_sign_bit_copies (varop, shift_mode)
9016	      == GET_MODE_BITSIZE (shift_mode)))
9017	{
9018	  count = 0;
9019	  break;
9020	}
9021
9022      /* If we are doing an arithmetic right shift and discarding all but
9023	 the sign bit copies, this is equivalent to doing a shift by the
9024	 bitsize minus one.  Convert it into that shift because it will often
9025	 allow other simplifications.  */
9026
9027      if (code == ASHIFTRT
9028	  && (count + num_sign_bit_copies (varop, shift_mode)
9029	      >= GET_MODE_BITSIZE (shift_mode)))
9030	count = GET_MODE_BITSIZE (shift_mode) - 1;
9031
9032      /* We simplify the tests below and elsewhere by converting
9033	 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
9034	 `make_compound_operation' will convert it to a ASHIFTRT for
9035	 those machines (such as VAX) that don't have a LSHIFTRT.  */
9036      if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9037	  && code == ASHIFTRT
9038	  && ((nonzero_bits (varop, shift_mode)
9039	       & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
9040	      == 0))
9041	code = LSHIFTRT;
9042
9043      switch (GET_CODE (varop))
9044	{
9045	case SIGN_EXTEND:
9046	case ZERO_EXTEND:
9047	case SIGN_EXTRACT:
9048	case ZERO_EXTRACT:
9049	  new = expand_compound_operation (varop);
9050	  if (new != varop)
9051	    {
9052	      varop = new;
9053	      continue;
9054	    }
9055	  break;
9056
9057	case MEM:
9058	  /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
9059	     minus the width of a smaller mode, we can do this with a
9060	     SIGN_EXTEND or ZERO_EXTEND from the narrower memory location.  */
9061	  if ((code == ASHIFTRT || code == LSHIFTRT)
9062	      && ! mode_dependent_address_p (XEXP (varop, 0))
9063	      && ! MEM_VOLATILE_P (varop)
9064	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9065					 MODE_INT, 1)) != BLKmode)
9066	    {
9067	      new = adjust_address_nv (varop, tmode,
9068				       BYTES_BIG_ENDIAN ? 0
9069				       : count / BITS_PER_UNIT);
9070
9071	      varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9072				     : ZERO_EXTEND, mode, new);
9073	      count = 0;
9074	      continue;
9075	    }
9076	  break;
9077
9078	case USE:
9079	  /* Similar to the case above, except that we can only do this if
9080	     the resulting mode is the same as that of the underlying
9081	     MEM and adjust the address depending on the *bits* endianness
9082	     because of the way that bit-field extract insns are defined.  */
9083	  if ((code == ASHIFTRT || code == LSHIFTRT)
9084	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9085					 MODE_INT, 1)) != BLKmode
9086	      && tmode == GET_MODE (XEXP (varop, 0)))
9087	    {
9088	      if (BITS_BIG_ENDIAN)
9089		new = XEXP (varop, 0);
9090	      else
9091		{
9092		  new = copy_rtx (XEXP (varop, 0));
9093		  SUBST (XEXP (new, 0),
9094			 plus_constant (XEXP (new, 0),
9095					count / BITS_PER_UNIT));
9096		}
9097
9098	      varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9099				     : ZERO_EXTEND, mode, new);
9100	      count = 0;
9101	      continue;
9102	    }
9103	  break;
9104
9105	case SUBREG:
9106	  /* If VAROP is a SUBREG, strip it as long as the inner operand has
9107	     the same number of words as what we've seen so far.  Then store
9108	     the widest mode in MODE.  */
9109	  if (subreg_lowpart_p (varop)
9110	      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9111		  > GET_MODE_SIZE (GET_MODE (varop)))
9112	      && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9113		    + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9114		  == mode_words))
9115	    {
9116	      varop = SUBREG_REG (varop);
9117	      if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9118		mode = GET_MODE (varop);
9119	      continue;
9120	    }
9121	  break;
9122
9123	case MULT:
9124	  /* Some machines use MULT instead of ASHIFT because MULT
9125	     is cheaper.  But it is still better on those machines to
9126	     merge two shifts into one.  */
9127	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9128	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9129	    {
9130	      varop
9131		= gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
9132			      GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9133	      continue;
9134	    }
9135	  break;
9136
9137	case UDIV:
9138	  /* Similar, for when divides are cheaper.  */
9139	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9140	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9141	    {
9142	      varop
9143		= gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
9144			      GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9145	      continue;
9146	    }
9147	  break;
9148
9149	case ASHIFTRT:
9150	  /* If we are extracting just the sign bit of an arithmetic
9151	     right shift, that shift is not needed.  However, the sign
9152	     bit of a wider mode may be different from what would be
9153	     interpreted as the sign bit in a narrower mode, so, if
9154	     the result is narrower, don't discard the shift.  */
9155	  if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9156	      && (GET_MODE_BITSIZE (result_mode)
9157		  >= GET_MODE_BITSIZE (GET_MODE (varop))))
9158	    {
9159	      varop = XEXP (varop, 0);
9160	      continue;
9161	    }
9162
9163	  /* ... fall through ...  */
9164
9165	case LSHIFTRT:
9166	case ASHIFT:
9167	case ROTATE:
9168	  /* Here we have two nested shifts.  The result is usually the
9169	     AND of a new shift with a mask.  We compute the result below.  */
9170	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9171	      && INTVAL (XEXP (varop, 1)) >= 0
9172	      && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
9173	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9174	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9175	    {
9176	      enum rtx_code first_code = GET_CODE (varop);
9177	      unsigned int first_count = INTVAL (XEXP (varop, 1));
9178	      unsigned HOST_WIDE_INT mask;
9179	      rtx mask_rtx;
9180
9181	      /* We have one common special case.  We can't do any merging if
9182		 the inner code is an ASHIFTRT of a smaller mode.  However, if
9183		 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9184		 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9185		 we can convert it to
9186		 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9187		 This simplifies certain SIGN_EXTEND operations.  */
9188	      if (code == ASHIFT && first_code == ASHIFTRT
9189		  && (GET_MODE_BITSIZE (result_mode)
9190		      - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
9191		{
9192		  /* C3 has the low-order C1 bits zero.  */
9193
9194		  mask = (GET_MODE_MASK (mode)
9195			  & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
9196
9197		  varop = simplify_and_const_int (NULL_RTX, result_mode,
9198						  XEXP (varop, 0), mask);
9199		  varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
9200						varop, count);
9201		  count = first_count;
9202		  code = ASHIFTRT;
9203		  continue;
9204		}
9205
9206	      /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9207		 than C1 high-order bits equal to the sign bit, we can convert
9208		 this to either an ASHIFT or a ASHIFTRT depending on the
9209		 two counts.
9210
9211		 We cannot do this if VAROP's mode is not SHIFT_MODE.  */
9212
9213	      if (code == ASHIFTRT && first_code == ASHIFT
9214		  && GET_MODE (varop) == shift_mode
9215		  && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9216		      > first_count))
9217		{
9218		  varop = XEXP (varop, 0);
9219
9220		  signed_count = count - first_count;
9221		  if (signed_count < 0)
9222		    count = -signed_count, code = ASHIFT;
9223		  else
9224		    count = signed_count;
9225
9226		  continue;
9227		}
9228
9229	      /* There are some cases we can't do.  If CODE is ASHIFTRT,
9230		 we can only do this if FIRST_CODE is also ASHIFTRT.
9231
9232		 We can't do the case when CODE is ROTATE and FIRST_CODE is
9233		 ASHIFTRT.
9234
9235		 If the mode of this shift is not the mode of the outer shift,
9236		 we can't do this if either shift is a right shift or ROTATE.
9237
9238		 Finally, we can't do any of these if the mode is too wide
9239		 unless the codes are the same.
9240
9241		 Handle the case where the shift codes are the same
9242		 first.  */
9243
9244	      if (code == first_code)
9245		{
9246		  if (GET_MODE (varop) != result_mode
9247		      && (code == ASHIFTRT || code == LSHIFTRT
9248			  || code == ROTATE))
9249		    break;
9250
9251		  count += first_count;
9252		  varop = XEXP (varop, 0);
9253		  continue;
9254		}
9255
9256	      if (code == ASHIFTRT
9257		  || (code == ROTATE && first_code == ASHIFTRT)
9258		  || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
9259		  || (GET_MODE (varop) != result_mode
9260		      && (first_code == ASHIFTRT || first_code == LSHIFTRT
9261			  || first_code == ROTATE
9262			  || code == ROTATE)))
9263		break;
9264
9265	      /* To compute the mask to apply after the shift, shift the
9266		 nonzero bits of the inner shift the same way the
9267		 outer shift will.  */
9268
9269	      mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
9270
9271	      mask_rtx
9272		= simplify_binary_operation (code, result_mode, mask_rtx,
9273					     GEN_INT (count));
9274
9275	      /* Give up if we can't compute an outer operation to use.  */
9276	      if (mask_rtx == 0
9277		  || GET_CODE (mask_rtx) != CONST_INT
9278		  || ! merge_outer_ops (&outer_op, &outer_const, AND,
9279					INTVAL (mask_rtx),
9280					result_mode, &complement_p))
9281		break;
9282
9283	      /* If the shifts are in the same direction, we add the
9284		 counts.  Otherwise, we subtract them.  */
9285	      signed_count = count;
9286	      if ((code == ASHIFTRT || code == LSHIFTRT)
9287		  == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9288		signed_count += first_count;
9289	      else
9290		signed_count -= first_count;
9291
9292	      /* If COUNT is positive, the new shift is usually CODE,
9293		 except for the two exceptions below, in which case it is
9294		 FIRST_CODE.  If the count is negative, FIRST_CODE should
9295		 always be used  */
9296	      if (signed_count > 0
9297		  && ((first_code == ROTATE && code == ASHIFT)
9298		      || (first_code == ASHIFTRT && code == LSHIFTRT)))
9299		code = first_code, count = signed_count;
9300	      else if (signed_count < 0)
9301		code = first_code, count = -signed_count;
9302	      else
9303		count = signed_count;
9304
9305	      varop = XEXP (varop, 0);
9306	      continue;
9307	    }
9308
9309	  /* If we have (A << B << C) for any shift, we can convert this to
9310	     (A << C << B).  This wins if A is a constant.  Only try this if
9311	     B is not a constant.  */
9312
9313	  else if (GET_CODE (varop) == code
9314		   && GET_CODE (XEXP (varop, 1)) != CONST_INT
9315		   && 0 != (new
9316			    = simplify_binary_operation (code, mode,
9317							 XEXP (varop, 0),
9318							 GEN_INT (count))))
9319	    {
9320	      varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1));
9321	      count = 0;
9322	      continue;
9323	    }
9324	  break;
9325
9326	case NOT:
9327	  /* Make this fit the case below.  */
9328	  varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9329			       GEN_INT (GET_MODE_MASK (mode)));
9330	  continue;
9331
9332	case IOR:
9333	case AND:
9334	case XOR:
9335	  /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9336	     with C the size of VAROP - 1 and the shift is logical if
9337	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9338	     we have an (le X 0) operation.   If we have an arithmetic shift
9339	     and STORE_FLAG_VALUE is 1 or we have a logical shift with
9340	     STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
9341
9342	  if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9343	      && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9344	      && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9345	      && (code == LSHIFTRT || code == ASHIFTRT)
9346	      && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
9347	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9348	    {
9349	      count = 0;
9350	      varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9351				  const0_rtx);
9352
9353	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9354		varop = gen_rtx_NEG (GET_MODE (varop), varop);
9355
9356	      continue;
9357	    }
9358
9359	  /* If we have (shift (logical)), move the logical to the outside
9360	     to allow it to possibly combine with another logical and the
9361	     shift to combine with another shift.  This also canonicalizes to
9362	     what a ZERO_EXTRACT looks like.  Also, some machines have
9363	     (and (shift)) insns.  */
9364
9365	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9366	      && (new = simplify_binary_operation (code, result_mode,
9367						   XEXP (varop, 1),
9368						   GEN_INT (count))) != 0
9369	      && GET_CODE (new) == CONST_INT
9370	      && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9371				  INTVAL (new), result_mode, &complement_p))
9372	    {
9373	      varop = XEXP (varop, 0);
9374	      continue;
9375	    }
9376
9377	  /* If we can't do that, try to simplify the shift in each arm of the
9378	     logical expression, make a new logical expression, and apply
9379	     the inverse distributive law.  */
9380	  {
9381	    rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9382					    XEXP (varop, 0), count);
9383	    rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9384					    XEXP (varop, 1), count);
9385
9386	    varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
9387	    varop = apply_distributive_law (varop);
9388
9389	    count = 0;
9390	  }
9391	  break;
9392
9393	case EQ:
9394	  /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9395	     says that the sign bit can be tested, FOO has mode MODE, C is
9396	     GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9397	     that may be nonzero.  */
9398	  if (code == LSHIFTRT
9399	      && XEXP (varop, 1) == const0_rtx
9400	      && GET_MODE (XEXP (varop, 0)) == result_mode
9401	      && count == GET_MODE_BITSIZE (result_mode) - 1
9402	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9403	      && ((STORE_FLAG_VALUE
9404		   & ((HOST_WIDE_INT) 1
9405		      < (GET_MODE_BITSIZE (result_mode) - 1))))
9406	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9407	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9408				  (HOST_WIDE_INT) 1, result_mode,
9409				  &complement_p))
9410	    {
9411	      varop = XEXP (varop, 0);
9412	      count = 0;
9413	      continue;
9414	    }
9415	  break;
9416
9417	case NEG:
9418	  /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9419	     than the number of bits in the mode is equivalent to A.  */
9420	  if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9421	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9422	    {
9423	      varop = XEXP (varop, 0);
9424	      count = 0;
9425	      continue;
9426	    }
9427
9428	  /* NEG commutes with ASHIFT since it is multiplication.  Move the
9429	     NEG outside to allow shifts to combine.  */
9430	  if (code == ASHIFT
9431	      && merge_outer_ops (&outer_op, &outer_const, NEG,
9432				  (HOST_WIDE_INT) 0, result_mode,
9433				  &complement_p))
9434	    {
9435	      varop = XEXP (varop, 0);
9436	      continue;
9437	    }
9438	  break;
9439
9440	case PLUS:
9441	  /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9442	     is one less than the number of bits in the mode is
9443	     equivalent to (xor A 1).  */
9444	  if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9445	      && XEXP (varop, 1) == constm1_rtx
9446	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9447	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9448				  (HOST_WIDE_INT) 1, result_mode,
9449				  &complement_p))
9450	    {
9451	      count = 0;
9452	      varop = XEXP (varop, 0);
9453	      continue;
9454	    }
9455
9456	  /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9457	     that might be nonzero in BAR are those being shifted out and those
9458	     bits are known zero in FOO, we can replace the PLUS with FOO.
9459	     Similarly in the other operand order.  This code occurs when
9460	     we are computing the size of a variable-size array.  */
9461
9462	  if ((code == ASHIFTRT || code == LSHIFTRT)
9463	      && count < HOST_BITS_PER_WIDE_INT
9464	      && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9465	      && (nonzero_bits (XEXP (varop, 1), result_mode)
9466		  & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9467	    {
9468	      varop = XEXP (varop, 0);
9469	      continue;
9470	    }
9471	  else if ((code == ASHIFTRT || code == LSHIFTRT)
9472		   && count < HOST_BITS_PER_WIDE_INT
9473		   && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9474		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9475			    >> count)
9476		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9477			    & nonzero_bits (XEXP (varop, 1),
9478						 result_mode)))
9479	    {
9480	      varop = XEXP (varop, 1);
9481	      continue;
9482	    }
9483
9484	  /* (ashift (plus foo C) N) is (plus (ashift foo N) C').  */
9485	  if (code == ASHIFT
9486	      && GET_CODE (XEXP (varop, 1)) == CONST_INT
9487	      && (new = simplify_binary_operation (ASHIFT, result_mode,
9488						   XEXP (varop, 1),
9489						   GEN_INT (count))) != 0
9490	      && GET_CODE (new) == CONST_INT
9491	      && merge_outer_ops (&outer_op, &outer_const, PLUS,
9492				  INTVAL (new), result_mode, &complement_p))
9493	    {
9494	      varop = XEXP (varop, 0);
9495	      continue;
9496	    }
9497	  break;
9498
9499	case MINUS:
9500	  /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9501	     with C the size of VAROP - 1 and the shift is logical if
9502	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9503	     we have a (gt X 0) operation.  If the shift is arithmetic with
9504	     STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9505	     we have a (neg (gt X 0)) operation.  */
9506
9507	  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9508	      && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9509	      && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
9510	      && (code == LSHIFTRT || code == ASHIFTRT)
9511	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9512	      && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
9513	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9514	    {
9515	      count = 0;
9516	      varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9517				  const0_rtx);
9518
9519	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9520		varop = gen_rtx_NEG (GET_MODE (varop), varop);
9521
9522	      continue;
9523	    }
9524	  break;
9525
9526	case TRUNCATE:
9527	  /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9528	     if the truncate does not affect the value.  */
9529	  if (code == LSHIFTRT
9530	      && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9531	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9532	      && (INTVAL (XEXP (XEXP (varop, 0), 1))
9533		  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9534		      - GET_MODE_BITSIZE (GET_MODE (varop)))))
9535	    {
9536	      rtx varop_inner = XEXP (varop, 0);
9537
9538	      varop_inner
9539		= gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9540				    XEXP (varop_inner, 0),
9541				    GEN_INT
9542				    (count + INTVAL (XEXP (varop_inner, 1))));
9543	      varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
9544	      count = 0;
9545	      continue;
9546	    }
9547	  break;
9548
9549	default:
9550	  break;
9551	}
9552
9553      break;
9554    }
9555
9556  /* We need to determine what mode to do the shift in.  If the shift is
9557     a right shift or ROTATE, we must always do it in the mode it was
9558     originally done in.  Otherwise, we can do it in MODE, the widest mode
9559     encountered.  The code we care about is that of the shift that will
9560     actually be done, not the shift that was originally requested.  */
9561  shift_mode
9562    = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9563       ? result_mode : mode);
9564
9565  /* We have now finished analyzing the shift.  The result should be
9566     a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places.  If
9567     OUTER_OP is non-NIL, it is an operation that needs to be applied
9568     to the result of the shift.  OUTER_CONST is the relevant constant,
9569     but we must turn off all bits turned off in the shift.
9570
9571     If we were passed a value for X, see if we can use any pieces of
9572     it.  If not, make new rtx.  */
9573
9574  if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
9575      && GET_CODE (XEXP (x, 1)) == CONST_INT
9576      && INTVAL (XEXP (x, 1)) == count)
9577    const_rtx = XEXP (x, 1);
9578  else
9579    const_rtx = GEN_INT (count);
9580
9581  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9582      && GET_MODE (XEXP (x, 0)) == shift_mode
9583      && SUBREG_REG (XEXP (x, 0)) == varop)
9584    varop = XEXP (x, 0);
9585  else if (GET_MODE (varop) != shift_mode)
9586    varop = gen_lowpart_for_combine (shift_mode, varop);
9587
9588  /* If we can't make the SUBREG, try to return what we were given.  */
9589  if (GET_CODE (varop) == CLOBBER)
9590    return x ? x : varop;
9591
9592  new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9593  if (new != 0)
9594    x = new;
9595  else
9596    x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx);
9597
9598  /* If we have an outer operation and we just made a shift, it is
9599     possible that we could have simplified the shift were it not
9600     for the outer operation.  So try to do the simplification
9601     recursively.  */
9602
9603  if (outer_op != NIL && GET_CODE (x) == code
9604      && GET_CODE (XEXP (x, 1)) == CONST_INT)
9605    x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9606			      INTVAL (XEXP (x, 1)));
9607
9608  /* If we were doing a LSHIFTRT in a wider mode than it was originally,
9609     turn off all the bits that the shift would have turned off.  */
9610  if (orig_code == LSHIFTRT && result_mode != shift_mode)
9611    x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9612				GET_MODE_MASK (result_mode) >> orig_count);
9613
9614  /* Do the remainder of the processing in RESULT_MODE.  */
9615  x = gen_lowpart_for_combine (result_mode, x);
9616
9617  /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9618     operation.  */
9619  if (complement_p)
9620    x =simplify_gen_unary (NOT, result_mode, x, result_mode);
9621
9622  if (outer_op != NIL)
9623    {
9624      if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9625	outer_const = trunc_int_for_mode (outer_const, result_mode);
9626
9627      if (outer_op == AND)
9628	x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9629      else if (outer_op == SET)
9630	/* This means that we have determined that the result is
9631	   equivalent to a constant.  This should be rare.  */
9632	x = GEN_INT (outer_const);
9633      else if (GET_RTX_CLASS (outer_op) == '1')
9634	x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
9635      else
9636	x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
9637    }
9638
9639  return x;
9640}
9641
9642/* Like recog, but we receive the address of a pointer to a new pattern.
9643   We try to match the rtx that the pointer points to.
9644   If that fails, we may try to modify or replace the pattern,
9645   storing the replacement into the same pointer object.
9646
9647   Modifications include deletion or addition of CLOBBERs.
9648
9649   PNOTES is a pointer to a location where any REG_UNUSED notes added for
9650   the CLOBBERs are placed.
9651
9652   The value is the final insn code from the pattern ultimately matched,
9653   or -1.  */
9654
9655static int
9656recog_for_combine (pnewpat, insn, pnotes)
9657     rtx *pnewpat;
9658     rtx insn;
9659     rtx *pnotes;
9660{
9661  rtx pat = *pnewpat;
9662  int insn_code_number;
9663  int num_clobbers_to_add = 0;
9664  int i;
9665  rtx notes = 0;
9666  rtx dummy_insn;
9667
9668  /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9669     we use to indicate that something didn't match.  If we find such a
9670     thing, force rejection.  */
9671  if (GET_CODE (pat) == PARALLEL)
9672    for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9673      if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9674	  && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
9675	return -1;
9676
9677  /* *pnewpat does not have to be actual PATTERN (insn), so make a dummy
9678     instruction for pattern recognition.  */
9679  dummy_insn = shallow_copy_rtx (insn);
9680  PATTERN (dummy_insn) = pat;
9681  REG_NOTES (dummy_insn) = 0;
9682
9683  insn_code_number = recog (pat, dummy_insn, &num_clobbers_to_add);
9684
9685  /* If it isn't, there is the possibility that we previously had an insn
9686     that clobbered some register as a side effect, but the combined
9687     insn doesn't need to do that.  So try once more without the clobbers
9688     unless this represents an ASM insn.  */
9689
9690  if (insn_code_number < 0 && ! check_asm_operands (pat)
9691      && GET_CODE (pat) == PARALLEL)
9692    {
9693      int pos;
9694
9695      for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9696	if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9697	  {
9698	    if (i != pos)
9699	      SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9700	    pos++;
9701	  }
9702
9703      SUBST_INT (XVECLEN (pat, 0), pos);
9704
9705      if (pos == 1)
9706	pat = XVECEXP (pat, 0, 0);
9707
9708      PATTERN (dummy_insn) = pat;
9709      insn_code_number = recog (pat, dummy_insn, &num_clobbers_to_add);
9710    }
9711
9712  /* Recognize all noop sets, these will be killed by followup pass.  */
9713  if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
9714    insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
9715
9716  /* If we had any clobbers to add, make a new pattern than contains
9717     them.  Then check to make sure that all of them are dead.  */
9718  if (num_clobbers_to_add)
9719    {
9720      rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9721				     rtvec_alloc (GET_CODE (pat) == PARALLEL
9722						  ? (XVECLEN (pat, 0)
9723						     + num_clobbers_to_add)
9724						  : num_clobbers_to_add + 1));
9725
9726      if (GET_CODE (pat) == PARALLEL)
9727	for (i = 0; i < XVECLEN (pat, 0); i++)
9728	  XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9729      else
9730	XVECEXP (newpat, 0, 0) = pat;
9731
9732      add_clobbers (newpat, insn_code_number);
9733
9734      for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9735	   i < XVECLEN (newpat, 0); i++)
9736	{
9737	  if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9738	      && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9739	    return -1;
9740	  notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9741				     XEXP (XVECEXP (newpat, 0, i), 0), notes);
9742	}
9743      pat = newpat;
9744    }
9745
9746  *pnewpat = pat;
9747  *pnotes = notes;
9748
9749  return insn_code_number;
9750}
9751
9752/* Like gen_lowpart but for use by combine.  In combine it is not possible
9753   to create any new pseudoregs.  However, it is safe to create
9754   invalid memory addresses, because combine will try to recognize
9755   them and all they will do is make the combine attempt fail.
9756
9757   If for some reason this cannot do its job, an rtx
9758   (clobber (const_int 0)) is returned.
9759   An insn containing that will not be recognized.  */
9760
9761#undef gen_lowpart
9762
9763static rtx
9764gen_lowpart_for_combine (mode, x)
9765     enum machine_mode mode;
9766     rtx x;
9767{
9768  rtx result;
9769
9770  if (GET_MODE (x) == mode)
9771    return x;
9772
9773  /* We can only support MODE being wider than a word if X is a
9774     constant integer or has a mode the same size.  */
9775
9776  if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9777      && ! ((GET_MODE (x) == VOIDmode
9778	     && (GET_CODE (x) == CONST_INT
9779		 || GET_CODE (x) == CONST_DOUBLE))
9780	    || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
9781    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9782
9783  /* X might be a paradoxical (subreg (mem)).  In that case, gen_lowpart
9784     won't know what to do.  So we will strip off the SUBREG here and
9785     process normally.  */
9786  if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9787    {
9788      x = SUBREG_REG (x);
9789      if (GET_MODE (x) == mode)
9790	return x;
9791    }
9792
9793  result = gen_lowpart_common (mode, x);
9794#ifdef CLASS_CANNOT_CHANGE_MODE
9795  if (result != 0
9796      && GET_CODE (result) == SUBREG
9797      && GET_CODE (SUBREG_REG (result)) == REG
9798      && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
9799      && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (result),
9800				     GET_MODE (SUBREG_REG (result))))
9801    REG_CHANGES_MODE (REGNO (SUBREG_REG (result))) = 1;
9802#endif
9803
9804  if (result)
9805    return result;
9806
9807  if (GET_CODE (x) == MEM)
9808    {
9809      int offset = 0;
9810
9811      /* Refuse to work on a volatile memory ref or one with a mode-dependent
9812	 address.  */
9813      if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
9814	return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9815
9816      /* If we want to refer to something bigger than the original memref,
9817	 generate a perverse subreg instead.  That will force a reload
9818	 of the original memref X.  */
9819      if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
9820	return gen_rtx_SUBREG (mode, x, 0);
9821
9822      if (WORDS_BIG_ENDIAN)
9823	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9824		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
9825
9826      if (BYTES_BIG_ENDIAN)
9827	{
9828	  /* Adjust the address so that the address-after-the-data is
9829	     unchanged.  */
9830	  offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9831		     - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9832	}
9833
9834      return adjust_address_nv (x, mode, offset);
9835    }
9836
9837  /* If X is a comparison operator, rewrite it in a new mode.  This
9838     probably won't match, but may allow further simplifications.  */
9839  else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
9840    return gen_rtx_fmt_ee (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
9841
9842  /* If we couldn't simplify X any other way, just enclose it in a
9843     SUBREG.  Normally, this SUBREG won't match, but some patterns may
9844     include an explicit SUBREG or we may simplify it further in combine.  */
9845  else
9846    {
9847      int offset = 0;
9848      rtx res;
9849
9850      /* We can't handle VOIDmodes.  We can get here when generating vector
9851	 modes since these, unlike integral and floating point modes are not
9852	 handled earlier.  */
9853      if (GET_MODE (x) == VOIDmode)
9854	return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9855
9856      offset = subreg_lowpart_offset (mode, GET_MODE (x));
9857      res = simplify_gen_subreg (mode, x, GET_MODE (x), offset);
9858      if (res)
9859	return res;
9860      return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9861    }
9862}
9863
9864/* These routines make binary and unary operations by first seeing if they
9865   fold; if not, a new expression is allocated.  */
9866
9867static rtx
9868gen_binary (code, mode, op0, op1)
9869     enum rtx_code code;
9870     enum machine_mode mode;
9871     rtx op0, op1;
9872{
9873  rtx result;
9874  rtx tem;
9875
9876  if (GET_RTX_CLASS (code) == 'c'
9877      && swap_commutative_operands_p (op0, op1))
9878    tem = op0, op0 = op1, op1 = tem;
9879
9880  if (GET_RTX_CLASS (code) == '<')
9881    {
9882      enum machine_mode op_mode = GET_MODE (op0);
9883
9884      /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
9885	 just (REL_OP X Y).  */
9886      if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9887	{
9888	  op1 = XEXP (op0, 1);
9889	  op0 = XEXP (op0, 0);
9890	  op_mode = GET_MODE (op0);
9891	}
9892
9893      if (op_mode == VOIDmode)
9894	op_mode = GET_MODE (op1);
9895      result = simplify_relational_operation (code, op_mode, op0, op1);
9896    }
9897  else
9898    result = simplify_binary_operation (code, mode, op0, op1);
9899
9900  if (result)
9901    return result;
9902
9903  /* Put complex operands first and constants second.  */
9904  if (GET_RTX_CLASS (code) == 'c'
9905      && swap_commutative_operands_p (op0, op1))
9906    return gen_rtx_fmt_ee (code, mode, op1, op0);
9907
9908  /* If we are turning off bits already known off in OP0, we need not do
9909     an AND.  */
9910  else if (code == AND && GET_CODE (op1) == CONST_INT
9911	   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9912	   && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
9913    return op0;
9914
9915  return gen_rtx_fmt_ee (code, mode, op0, op1);
9916}
9917
9918/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9919   comparison code that will be tested.
9920
9921   The result is a possibly different comparison code to use.  *POP0 and
9922   *POP1 may be updated.
9923
9924   It is possible that we might detect that a comparison is either always
9925   true or always false.  However, we do not perform general constant
9926   folding in combine, so this knowledge isn't useful.  Such tautologies
9927   should have been detected earlier.  Hence we ignore all such cases.  */
9928
9929static enum rtx_code
9930simplify_comparison (code, pop0, pop1)
9931     enum rtx_code code;
9932     rtx *pop0;
9933     rtx *pop1;
9934{
9935  rtx op0 = *pop0;
9936  rtx op1 = *pop1;
9937  rtx tem, tem1;
9938  int i;
9939  enum machine_mode mode, tmode;
9940
9941  /* Try a few ways of applying the same transformation to both operands.  */
9942  while (1)
9943    {
9944#ifndef WORD_REGISTER_OPERATIONS
9945      /* The test below this one won't handle SIGN_EXTENDs on these machines,
9946	 so check specially.  */
9947      if (code != GTU && code != GEU && code != LTU && code != LEU
9948	  && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9949	  && GET_CODE (XEXP (op0, 0)) == ASHIFT
9950	  && GET_CODE (XEXP (op1, 0)) == ASHIFT
9951	  && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9952	  && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9953	  && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
9954	      == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
9955	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
9956	  && GET_CODE (XEXP (op1, 1)) == CONST_INT
9957	  && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9958	  && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
9959	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
9960	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
9961	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
9962	  && (INTVAL (XEXP (op0, 1))
9963	      == (GET_MODE_BITSIZE (GET_MODE (op0))
9964		  - (GET_MODE_BITSIZE
9965		     (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9966	{
9967	  op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9968	  op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9969	}
9970#endif
9971
9972      /* If both operands are the same constant shift, see if we can ignore the
9973	 shift.  We can if the shift is a rotate or if the bits shifted out of
9974	 this shift are known to be zero for both inputs and if the type of
9975	 comparison is compatible with the shift.  */
9976      if (GET_CODE (op0) == GET_CODE (op1)
9977	  && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9978	  && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
9979	      || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
9980		  && (code != GT && code != LT && code != GE && code != LE))
9981	      || (GET_CODE (op0) == ASHIFTRT
9982		  && (code != GTU && code != LTU
9983		      && code != GEU && code != LEU)))
9984	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
9985	  && INTVAL (XEXP (op0, 1)) >= 0
9986	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9987	  && XEXP (op0, 1) == XEXP (op1, 1))
9988	{
9989	  enum machine_mode mode = GET_MODE (op0);
9990	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9991	  int shift_count = INTVAL (XEXP (op0, 1));
9992
9993	  if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9994	    mask &= (mask >> shift_count) << shift_count;
9995	  else if (GET_CODE (op0) == ASHIFT)
9996	    mask = (mask & (mask << shift_count)) >> shift_count;
9997
9998	  if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
9999	      && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
10000	    op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
10001	  else
10002	    break;
10003	}
10004
10005      /* If both operands are AND's of a paradoxical SUBREG by constant, the
10006	 SUBREGs are of the same mode, and, in both cases, the AND would
10007	 be redundant if the comparison was done in the narrower mode,
10008	 do the comparison in the narrower mode (e.g., we are AND'ing with 1
10009	 and the operand's possibly nonzero bits are 0xffffff01; in that case
10010	 if we only care about QImode, we don't need the AND).  This case
10011	 occurs if the output mode of an scc insn is not SImode and
10012	 STORE_FLAG_VALUE == 1 (e.g., the 386).
10013
10014	 Similarly, check for a case where the AND's are ZERO_EXTEND
10015	 operations from some narrower mode even though a SUBREG is not
10016	 present.  */
10017
10018      else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
10019	       && GET_CODE (XEXP (op0, 1)) == CONST_INT
10020	       && GET_CODE (XEXP (op1, 1)) == CONST_INT)
10021	{
10022	  rtx inner_op0 = XEXP (op0, 0);
10023	  rtx inner_op1 = XEXP (op1, 0);
10024	  HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
10025	  HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
10026	  int changed = 0;
10027
10028	  if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
10029	      && (GET_MODE_SIZE (GET_MODE (inner_op0))
10030		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
10031	      && (GET_MODE (SUBREG_REG (inner_op0))
10032		  == GET_MODE (SUBREG_REG (inner_op1)))
10033	      && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
10034		  <= HOST_BITS_PER_WIDE_INT)
10035	      && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
10036					     GET_MODE (SUBREG_REG (inner_op0)))))
10037	      && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
10038					     GET_MODE (SUBREG_REG (inner_op1))))))
10039	    {
10040	      op0 = SUBREG_REG (inner_op0);
10041	      op1 = SUBREG_REG (inner_op1);
10042
10043	      /* The resulting comparison is always unsigned since we masked
10044		 off the original sign bit.  */
10045	      code = unsigned_condition (code);
10046
10047	      changed = 1;
10048	    }
10049
10050	  else if (c0 == c1)
10051	    for (tmode = GET_CLASS_NARROWEST_MODE
10052		 (GET_MODE_CLASS (GET_MODE (op0)));
10053		 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
10054	      if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
10055		{
10056		  op0 = gen_lowpart_for_combine (tmode, inner_op0);
10057		  op1 = gen_lowpart_for_combine (tmode, inner_op1);
10058		  code = unsigned_condition (code);
10059		  changed = 1;
10060		  break;
10061		}
10062
10063	  if (! changed)
10064	    break;
10065	}
10066
10067      /* If both operands are NOT, we can strip off the outer operation
10068	 and adjust the comparison code for swapped operands; similarly for
10069	 NEG, except that this must be an equality comparison.  */
10070      else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
10071	       || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
10072		   && (code == EQ || code == NE)))
10073	op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
10074
10075      else
10076	break;
10077    }
10078
10079  /* If the first operand is a constant, swap the operands and adjust the
10080     comparison code appropriately, but don't do this if the second operand
10081     is already a constant integer.  */
10082  if (swap_commutative_operands_p (op0, op1))
10083    {
10084      tem = op0, op0 = op1, op1 = tem;
10085      code = swap_condition (code);
10086    }
10087
10088  /* We now enter a loop during which we will try to simplify the comparison.
10089     For the most part, we only are concerned with comparisons with zero,
10090     but some things may really be comparisons with zero but not start
10091     out looking that way.  */
10092
10093  while (GET_CODE (op1) == CONST_INT)
10094    {
10095      enum machine_mode mode = GET_MODE (op0);
10096      unsigned int mode_width = GET_MODE_BITSIZE (mode);
10097      unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10098      int equality_comparison_p;
10099      int sign_bit_comparison_p;
10100      int unsigned_comparison_p;
10101      HOST_WIDE_INT const_op;
10102
10103      /* We only want to handle integral modes.  This catches VOIDmode,
10104	 CCmode, and the floating-point modes.  An exception is that we
10105	 can handle VOIDmode if OP0 is a COMPARE or a comparison
10106	 operation.  */
10107
10108      if (GET_MODE_CLASS (mode) != MODE_INT
10109	  && ! (mode == VOIDmode
10110		&& (GET_CODE (op0) == COMPARE
10111		    || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
10112	break;
10113
10114      /* Get the constant we are comparing against and turn off all bits
10115	 not on in our mode.  */
10116      const_op = trunc_int_for_mode (INTVAL (op1), mode);
10117      op1 = GEN_INT (const_op);
10118
10119      /* If we are comparing against a constant power of two and the value
10120	 being compared can only have that single bit nonzero (e.g., it was
10121	 `and'ed with that bit), we can replace this with a comparison
10122	 with zero.  */
10123      if (const_op
10124	  && (code == EQ || code == NE || code == GE || code == GEU
10125	      || code == LT || code == LTU)
10126	  && mode_width <= HOST_BITS_PER_WIDE_INT
10127	  && exact_log2 (const_op) >= 0
10128	  && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
10129	{
10130	  code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10131	  op1 = const0_rtx, const_op = 0;
10132	}
10133
10134      /* Similarly, if we are comparing a value known to be either -1 or
10135	 0 with -1, change it to the opposite comparison against zero.  */
10136
10137      if (const_op == -1
10138	  && (code == EQ || code == NE || code == GT || code == LE
10139	      || code == GEU || code == LTU)
10140	  && num_sign_bit_copies (op0, mode) == mode_width)
10141	{
10142	  code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10143	  op1 = const0_rtx, const_op = 0;
10144	}
10145
10146      /* Do some canonicalizations based on the comparison code.  We prefer
10147	 comparisons against zero and then prefer equality comparisons.
10148	 If we can reduce the size of a constant, we will do that too.  */
10149
10150      switch (code)
10151	{
10152	case LT:
10153	  /* < C is equivalent to <= (C - 1) */
10154	  if (const_op > 0)
10155	    {
10156	      const_op -= 1;
10157	      op1 = GEN_INT (const_op);
10158	      code = LE;
10159	      /* ... fall through to LE case below.  */
10160	    }
10161	  else
10162	    break;
10163
10164	case LE:
10165	  /* <= C is equivalent to < (C + 1); we do this for C < 0  */
10166	  if (const_op < 0)
10167	    {
10168	      const_op += 1;
10169	      op1 = GEN_INT (const_op);
10170	      code = LT;
10171	    }
10172
10173	  /* If we are doing a <= 0 comparison on a value known to have
10174	     a zero sign bit, we can replace this with == 0.  */
10175	  else if (const_op == 0
10176		   && mode_width <= HOST_BITS_PER_WIDE_INT
10177		   && (nonzero_bits (op0, mode)
10178		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10179	    code = EQ;
10180	  break;
10181
10182	case GE:
10183	  /* >= C is equivalent to > (C - 1).  */
10184	  if (const_op > 0)
10185	    {
10186	      const_op -= 1;
10187	      op1 = GEN_INT (const_op);
10188	      code = GT;
10189	      /* ... fall through to GT below.  */
10190	    }
10191	  else
10192	    break;
10193
10194	case GT:
10195	  /* > C is equivalent to >= (C + 1); we do this for C < 0.  */
10196	  if (const_op < 0)
10197	    {
10198	      const_op += 1;
10199	      op1 = GEN_INT (const_op);
10200	      code = GE;
10201	    }
10202
10203	  /* If we are doing a > 0 comparison on a value known to have
10204	     a zero sign bit, we can replace this with != 0.  */
10205	  else if (const_op == 0
10206		   && mode_width <= HOST_BITS_PER_WIDE_INT
10207		   && (nonzero_bits (op0, mode)
10208		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10209	    code = NE;
10210	  break;
10211
10212	case LTU:
10213	  /* < C is equivalent to <= (C - 1).  */
10214	  if (const_op > 0)
10215	    {
10216	      const_op -= 1;
10217	      op1 = GEN_INT (const_op);
10218	      code = LEU;
10219	      /* ... fall through ...  */
10220	    }
10221
10222	  /* (unsigned) < 0x80000000 is equivalent to >= 0.  */
10223	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10224		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10225	    {
10226	      const_op = 0, op1 = const0_rtx;
10227	      code = GE;
10228	      break;
10229	    }
10230	  else
10231	    break;
10232
10233	case LEU:
10234	  /* unsigned <= 0 is equivalent to == 0 */
10235	  if (const_op == 0)
10236	    code = EQ;
10237
10238	  /* (unsigned) <= 0x7fffffff is equivalent to >= 0.  */
10239	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10240		   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10241	    {
10242	      const_op = 0, op1 = const0_rtx;
10243	      code = GE;
10244	    }
10245	  break;
10246
10247	case GEU:
10248	  /* >= C is equivalent to < (C - 1).  */
10249	  if (const_op > 1)
10250	    {
10251	      const_op -= 1;
10252	      op1 = GEN_INT (const_op);
10253	      code = GTU;
10254	      /* ... fall through ...  */
10255	    }
10256
10257	  /* (unsigned) >= 0x80000000 is equivalent to < 0.  */
10258	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10259		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10260	    {
10261	      const_op = 0, op1 = const0_rtx;
10262	      code = LT;
10263	      break;
10264	    }
10265	  else
10266	    break;
10267
10268	case GTU:
10269	  /* unsigned > 0 is equivalent to != 0 */
10270	  if (const_op == 0)
10271	    code = NE;
10272
10273	  /* (unsigned) > 0x7fffffff is equivalent to < 0.  */
10274	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10275		    && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10276	    {
10277	      const_op = 0, op1 = const0_rtx;
10278	      code = LT;
10279	    }
10280	  break;
10281
10282	default:
10283	  break;
10284	}
10285
10286      /* Compute some predicates to simplify code below.  */
10287
10288      equality_comparison_p = (code == EQ || code == NE);
10289      sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10290      unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10291			       || code == GEU);
10292
10293      /* If this is a sign bit comparison and we can do arithmetic in
10294	 MODE, say that we will only be needing the sign bit of OP0.  */
10295      if (sign_bit_comparison_p
10296	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10297	op0 = force_to_mode (op0, mode,
10298			     ((HOST_WIDE_INT) 1
10299			      << (GET_MODE_BITSIZE (mode) - 1)),
10300			     NULL_RTX, 0);
10301
10302      /* Now try cases based on the opcode of OP0.  If none of the cases
10303	 does a "continue", we exit this loop immediately after the
10304	 switch.  */
10305
10306      switch (GET_CODE (op0))
10307	{
10308	case ZERO_EXTRACT:
10309	  /* If we are extracting a single bit from a variable position in
10310	     a constant that has only a single bit set and are comparing it
10311	     with zero, we can convert this into an equality comparison
10312	     between the position and the location of the single bit.  */
10313
10314	  if (GET_CODE (XEXP (op0, 0)) == CONST_INT
10315	      && XEXP (op0, 1) == const1_rtx
10316	      && equality_comparison_p && const_op == 0
10317	      && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
10318	    {
10319	      if (BITS_BIG_ENDIAN)
10320		{
10321		  enum machine_mode new_mode
10322		    = mode_for_extraction (EP_extzv, 1);
10323		  if (new_mode == MAX_MACHINE_MODE)
10324		    i = BITS_PER_WORD - 1 - i;
10325		  else
10326		    {
10327		      mode = new_mode;
10328		      i = (GET_MODE_BITSIZE (mode) - 1 - i);
10329		    }
10330		}
10331
10332	      op0 = XEXP (op0, 2);
10333	      op1 = GEN_INT (i);
10334	      const_op = i;
10335
10336	      /* Result is nonzero iff shift count is equal to I.  */
10337	      code = reverse_condition (code);
10338	      continue;
10339	    }
10340
10341	  /* ... fall through ...  */
10342
10343	case SIGN_EXTRACT:
10344	  tem = expand_compound_operation (op0);
10345	  if (tem != op0)
10346	    {
10347	      op0 = tem;
10348	      continue;
10349	    }
10350	  break;
10351
10352	case NOT:
10353	  /* If testing for equality, we can take the NOT of the constant.  */
10354	  if (equality_comparison_p
10355	      && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10356	    {
10357	      op0 = XEXP (op0, 0);
10358	      op1 = tem;
10359	      continue;
10360	    }
10361
10362	  /* If just looking at the sign bit, reverse the sense of the
10363	     comparison.  */
10364	  if (sign_bit_comparison_p)
10365	    {
10366	      op0 = XEXP (op0, 0);
10367	      code = (code == GE ? LT : GE);
10368	      continue;
10369	    }
10370	  break;
10371
10372	case NEG:
10373	  /* If testing for equality, we can take the NEG of the constant.  */
10374	  if (equality_comparison_p
10375	      && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10376	    {
10377	      op0 = XEXP (op0, 0);
10378	      op1 = tem;
10379	      continue;
10380	    }
10381
10382	  /* The remaining cases only apply to comparisons with zero.  */
10383	  if (const_op != 0)
10384	    break;
10385
10386	  /* When X is ABS or is known positive,
10387	     (neg X) is < 0 if and only if X != 0.  */
10388
10389	  if (sign_bit_comparison_p
10390	      && (GET_CODE (XEXP (op0, 0)) == ABS
10391		  || (mode_width <= HOST_BITS_PER_WIDE_INT
10392		      && (nonzero_bits (XEXP (op0, 0), mode)
10393			  & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
10394	    {
10395	      op0 = XEXP (op0, 0);
10396	      code = (code == LT ? NE : EQ);
10397	      continue;
10398	    }
10399
10400	  /* If we have NEG of something whose two high-order bits are the
10401	     same, we know that "(-a) < 0" is equivalent to "a > 0".  */
10402	  if (num_sign_bit_copies (op0, mode) >= 2)
10403	    {
10404	      op0 = XEXP (op0, 0);
10405	      code = swap_condition (code);
10406	      continue;
10407	    }
10408	  break;
10409
10410	case ROTATE:
10411	  /* If we are testing equality and our count is a constant, we
10412	     can perform the inverse operation on our RHS.  */
10413	  if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10414	      && (tem = simplify_binary_operation (ROTATERT, mode,
10415						   op1, XEXP (op0, 1))) != 0)
10416	    {
10417	      op0 = XEXP (op0, 0);
10418	      op1 = tem;
10419	      continue;
10420	    }
10421
10422	  /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10423	     a particular bit.  Convert it to an AND of a constant of that
10424	     bit.  This will be converted into a ZERO_EXTRACT.  */
10425	  if (const_op == 0 && sign_bit_comparison_p
10426	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10427	      && mode_width <= HOST_BITS_PER_WIDE_INT)
10428	    {
10429	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10430					    ((HOST_WIDE_INT) 1
10431					     << (mode_width - 1
10432						 - INTVAL (XEXP (op0, 1)))));
10433	      code = (code == LT ? NE : EQ);
10434	      continue;
10435	    }
10436
10437	  /* Fall through.  */
10438
10439	case ABS:
10440	  /* ABS is ignorable inside an equality comparison with zero.  */
10441	  if (const_op == 0 && equality_comparison_p)
10442	    {
10443	      op0 = XEXP (op0, 0);
10444	      continue;
10445	    }
10446	  break;
10447
10448	case SIGN_EXTEND:
10449	  /* Can simplify (compare (zero/sign_extend FOO) CONST)
10450	     to (compare FOO CONST) if CONST fits in FOO's mode and we
10451	     are either testing inequality or have an unsigned comparison
10452	     with ZERO_EXTEND or a signed comparison with SIGN_EXTEND.  */
10453	  if (! unsigned_comparison_p
10454	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10455		  <= HOST_BITS_PER_WIDE_INT)
10456	      && ((unsigned HOST_WIDE_INT) const_op
10457		  < (((unsigned HOST_WIDE_INT) 1
10458		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
10459	    {
10460	      op0 = XEXP (op0, 0);
10461	      continue;
10462	    }
10463	  break;
10464
10465	case SUBREG:
10466	  /* Check for the case where we are comparing A - C1 with C2,
10467	     both constants are smaller than 1/2 the maximum positive
10468	     value in MODE, and the comparison is equality or unsigned.
10469	     In that case, if A is either zero-extended to MODE or has
10470	     sufficient sign bits so that the high-order bit in MODE
10471	     is a copy of the sign in the inner mode, we can prove that it is
10472	     safe to do the operation in the wider mode.  This simplifies
10473	     many range checks.  */
10474
10475	  if (mode_width <= HOST_BITS_PER_WIDE_INT
10476	      && subreg_lowpart_p (op0)
10477	      && GET_CODE (SUBREG_REG (op0)) == PLUS
10478	      && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
10479	      && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
10480	      && (-INTVAL (XEXP (SUBREG_REG (op0), 1))
10481		  < (HOST_WIDE_INT) (GET_MODE_MASK (mode) / 2))
10482	      && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
10483	      && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
10484				      GET_MODE (SUBREG_REG (op0)))
10485			& ~GET_MODE_MASK (mode))
10486		  || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
10487					   GET_MODE (SUBREG_REG (op0)))
10488		      > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10489			 - GET_MODE_BITSIZE (mode)))))
10490	    {
10491	      op0 = SUBREG_REG (op0);
10492	      continue;
10493	    }
10494
10495	  /* If the inner mode is narrower and we are extracting the low part,
10496	     we can treat the SUBREG as if it were a ZERO_EXTEND.  */
10497	  if (subreg_lowpart_p (op0)
10498	      && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10499	    /* Fall through */ ;
10500	  else
10501	    break;
10502
10503	  /* ... fall through ...  */
10504
10505	case ZERO_EXTEND:
10506	  if ((unsigned_comparison_p || equality_comparison_p)
10507	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10508		  <= HOST_BITS_PER_WIDE_INT)
10509	      && ((unsigned HOST_WIDE_INT) const_op
10510		  < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10511	    {
10512	      op0 = XEXP (op0, 0);
10513	      continue;
10514	    }
10515	  break;
10516
10517	case PLUS:
10518	  /* (eq (plus X A) B) -> (eq X (minus B A)).  We can only do
10519	     this for equality comparisons due to pathological cases involving
10520	     overflows.  */
10521	  if (equality_comparison_p
10522	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10523							op1, XEXP (op0, 1))))
10524	    {
10525	      op0 = XEXP (op0, 0);
10526	      op1 = tem;
10527	      continue;
10528	    }
10529
10530	  /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0.  */
10531	  if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10532	      && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10533	    {
10534	      op0 = XEXP (XEXP (op0, 0), 0);
10535	      code = (code == LT ? EQ : NE);
10536	      continue;
10537	    }
10538	  break;
10539
10540	case MINUS:
10541	  /* We used to optimize signed comparisons against zero, but that
10542	     was incorrect.  Unsigned comparisons against zero (GTU, LEU)
10543	     arrive here as equality comparisons, or (GEU, LTU) are
10544	     optimized away.  No need to special-case them.  */
10545
10546	  /* (eq (minus A B) C) -> (eq A (plus B C)) or
10547	     (eq B (minus A C)), whichever simplifies.  We can only do
10548	     this for equality comparisons due to pathological cases involving
10549	     overflows.  */
10550	  if (equality_comparison_p
10551	      && 0 != (tem = simplify_binary_operation (PLUS, mode,
10552							XEXP (op0, 1), op1)))
10553	    {
10554	      op0 = XEXP (op0, 0);
10555	      op1 = tem;
10556	      continue;
10557	    }
10558
10559	  if (equality_comparison_p
10560	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10561							XEXP (op0, 0), op1)))
10562	    {
10563	      op0 = XEXP (op0, 1);
10564	      op1 = tem;
10565	      continue;
10566	    }
10567
10568	  /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10569	     of bits in X minus 1, is one iff X > 0.  */
10570	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10571	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10572	      && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
10573	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10574	    {
10575	      op0 = XEXP (op0, 1);
10576	      code = (code == GE ? LE : GT);
10577	      continue;
10578	    }
10579	  break;
10580
10581	case XOR:
10582	  /* (eq (xor A B) C) -> (eq A (xor B C)).  This is a simplification
10583	     if C is zero or B is a constant.  */
10584	  if (equality_comparison_p
10585	      && 0 != (tem = simplify_binary_operation (XOR, mode,
10586							XEXP (op0, 1), op1)))
10587	    {
10588	      op0 = XEXP (op0, 0);
10589	      op1 = tem;
10590	      continue;
10591	    }
10592	  break;
10593
10594	case EQ:  case NE:
10595	case UNEQ:  case LTGT:
10596	case LT:  case LTU:  case UNLT:  case LE:  case LEU:  case UNLE:
10597	case GT:  case GTU:  case UNGT:  case GE:  case GEU:  case UNGE:
10598        case UNORDERED: case ORDERED:
10599	  /* We can't do anything if OP0 is a condition code value, rather
10600	     than an actual data value.  */
10601	  if (const_op != 0
10602#ifdef HAVE_cc0
10603	      || XEXP (op0, 0) == cc0_rtx
10604#endif
10605	      || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10606	    break;
10607
10608	  /* Get the two operands being compared.  */
10609	  if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10610	    tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10611	  else
10612	    tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10613
10614	  /* Check for the cases where we simply want the result of the
10615	     earlier test or the opposite of that result.  */
10616	  if (code == NE || code == EQ
10617	      || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10618		  && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10619		  && (STORE_FLAG_VALUE
10620		      & (((HOST_WIDE_INT) 1
10621			  << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
10622		  && (code == LT || code == GE)))
10623	    {
10624	      enum rtx_code new_code;
10625	      if (code == LT || code == NE)
10626		new_code = GET_CODE (op0);
10627	      else
10628		new_code = combine_reversed_comparison_code (op0);
10629
10630	      if (new_code != UNKNOWN)
10631		{
10632		  code = new_code;
10633		  op0 = tem;
10634		  op1 = tem1;
10635		  continue;
10636		}
10637	    }
10638	  break;
10639
10640	case IOR:
10641	  /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10642	     iff X <= 0.  */
10643	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10644	      && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10645	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10646	    {
10647	      op0 = XEXP (op0, 1);
10648	      code = (code == GE ? GT : LE);
10649	      continue;
10650	    }
10651	  break;
10652
10653	case AND:
10654	  /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1).  This
10655	     will be converted to a ZERO_EXTRACT later.  */
10656	  if (const_op == 0 && equality_comparison_p
10657	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
10658	      && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10659	    {
10660	      op0 = simplify_and_const_int
10661		(op0, mode, gen_rtx_LSHIFTRT (mode,
10662					      XEXP (op0, 1),
10663					      XEXP (XEXP (op0, 0), 1)),
10664		 (HOST_WIDE_INT) 1);
10665	      continue;
10666	    }
10667
10668	  /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10669	     zero and X is a comparison and C1 and C2 describe only bits set
10670	     in STORE_FLAG_VALUE, we can compare with X.  */
10671	  if (const_op == 0 && equality_comparison_p
10672	      && mode_width <= HOST_BITS_PER_WIDE_INT
10673	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10674	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10675	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10676	      && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
10677	      && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
10678	    {
10679	      mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10680		      << INTVAL (XEXP (XEXP (op0, 0), 1)));
10681	      if ((~STORE_FLAG_VALUE & mask) == 0
10682		  && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10683		      || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10684			  && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10685		{
10686		  op0 = XEXP (XEXP (op0, 0), 0);
10687		  continue;
10688		}
10689	    }
10690
10691	  /* If we are doing an equality comparison of an AND of a bit equal
10692	     to the sign bit, replace this with a LT or GE comparison of
10693	     the underlying value.  */
10694	  if (equality_comparison_p
10695	      && const_op == 0
10696	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10697	      && mode_width <= HOST_BITS_PER_WIDE_INT
10698	      && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10699		  == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
10700	    {
10701	      op0 = XEXP (op0, 0);
10702	      code = (code == EQ ? GE : LT);
10703	      continue;
10704	    }
10705
10706	  /* If this AND operation is really a ZERO_EXTEND from a narrower
10707	     mode, the constant fits within that mode, and this is either an
10708	     equality or unsigned comparison, try to do this comparison in
10709	     the narrower mode.  */
10710	  if ((equality_comparison_p || unsigned_comparison_p)
10711	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10712	      && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10713				   & GET_MODE_MASK (mode))
10714				  + 1)) >= 0
10715	      && const_op >> i == 0
10716	      && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10717	    {
10718	      op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10719	      continue;
10720	    }
10721
10722	  /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
10723	     in both M1 and M2 and the SUBREG is either paradoxical or
10724	     represents the low part, permute the SUBREG and the AND and
10725	     try again.  */
10726	  if (GET_CODE (XEXP (op0, 0)) == SUBREG
10727	      && (0
10728#ifdef WORD_REGISTER_OPERATIONS
10729		  || ((mode_width
10730		       > (GET_MODE_BITSIZE
10731			   (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10732		      && mode_width <= BITS_PER_WORD)
10733#endif
10734		  || ((mode_width
10735		       <= (GET_MODE_BITSIZE
10736			   (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10737		      && subreg_lowpart_p (XEXP (op0, 0))))
10738#ifndef WORD_REGISTER_OPERATIONS
10739	      /* It is unsafe to commute the AND into the SUBREG if the SUBREG
10740		 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
10741		 As originally written the upper bits have a defined value
10742		 due to the AND operation.  However, if we commute the AND
10743		 inside the SUBREG then they no longer have defined values
10744		 and the meaning of the code has been changed.  */
10745	      && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
10746		  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10747#endif
10748	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10749	      && mode_width <= HOST_BITS_PER_WIDE_INT
10750	      && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10751		  <= HOST_BITS_PER_WIDE_INT)
10752	      && (INTVAL (XEXP (op0, 1)) & ~mask) == 0
10753	      && 0 == (~GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10754		       & INTVAL (XEXP (op0, 1)))
10755	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) != mask
10756	      && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10757		  != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10758
10759	    {
10760	      op0
10761		= gen_lowpart_for_combine
10762		  (mode,
10763		   gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
10764			       SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
10765	      continue;
10766	    }
10767
10768	  /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
10769	     (eq (and (lshiftrt X) 1) 0).  */
10770	  if (const_op == 0 && equality_comparison_p
10771	      && XEXP (op0, 1) == const1_rtx
10772	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10773	      && GET_CODE (XEXP (XEXP (op0, 0), 0)) == NOT)
10774	    {
10775	      op0 = simplify_and_const_int
10776		(op0, mode,
10777		 gen_rtx_LSHIFTRT (mode, XEXP (XEXP (XEXP (op0, 0), 0), 0),
10778				   XEXP (XEXP (op0, 0), 1)),
10779		 (HOST_WIDE_INT) 1);
10780	      code = (code == NE ? EQ : NE);
10781	      continue;
10782	    }
10783	  break;
10784
10785	case ASHIFT:
10786	  /* If we have (compare (ashift FOO N) (const_int C)) and
10787	     the high order N bits of FOO (N+1 if an inequality comparison)
10788	     are known to be zero, we can do this by comparing FOO with C
10789	     shifted right N bits so long as the low-order N bits of C are
10790	     zero.  */
10791	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10792	      && INTVAL (XEXP (op0, 1)) >= 0
10793	      && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
10794		  < HOST_BITS_PER_WIDE_INT)
10795	      && ((const_op
10796		   & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
10797	      && mode_width <= HOST_BITS_PER_WIDE_INT
10798	      && (nonzero_bits (XEXP (op0, 0), mode)
10799		  & ~(mask >> (INTVAL (XEXP (op0, 1))
10800			       + ! equality_comparison_p))) == 0)
10801	    {
10802	      /* We must perform a logical shift, not an arithmetic one,
10803		 as we want the top N bits of C to be zero.  */
10804	      unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
10805
10806	      temp >>= INTVAL (XEXP (op0, 1));
10807	      op1 = GEN_INT (trunc_int_for_mode (temp, mode));
10808	      op0 = XEXP (op0, 0);
10809	      continue;
10810	    }
10811
10812	  /* If we are doing a sign bit comparison, it means we are testing
10813	     a particular bit.  Convert it to the appropriate AND.  */
10814	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10815	      && mode_width <= HOST_BITS_PER_WIDE_INT)
10816	    {
10817	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10818					    ((HOST_WIDE_INT) 1
10819					     << (mode_width - 1
10820						 - INTVAL (XEXP (op0, 1)))));
10821	      code = (code == LT ? NE : EQ);
10822	      continue;
10823	    }
10824
10825	  /* If this an equality comparison with zero and we are shifting
10826	     the low bit to the sign bit, we can convert this to an AND of the
10827	     low-order bit.  */
10828	  if (const_op == 0 && equality_comparison_p
10829	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10830	      && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10831	    {
10832	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10833					    (HOST_WIDE_INT) 1);
10834	      continue;
10835	    }
10836	  break;
10837
10838	case ASHIFTRT:
10839	  /* If this is an equality comparison with zero, we can do this
10840	     as a logical shift, which might be much simpler.  */
10841	  if (equality_comparison_p && const_op == 0
10842	      && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10843	    {
10844	      op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10845					  XEXP (op0, 0),
10846					  INTVAL (XEXP (op0, 1)));
10847	      continue;
10848	    }
10849
10850	  /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10851	     do the comparison in a narrower mode.  */
10852	  if (! unsigned_comparison_p
10853	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10854	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
10855	      && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10856	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10857					 MODE_INT, 1)) != BLKmode
10858	      && (((unsigned HOST_WIDE_INT) const_op
10859		   + (GET_MODE_MASK (tmode) >> 1) + 1)
10860		  <= GET_MODE_MASK (tmode)))
10861	    {
10862	      op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10863	      continue;
10864	    }
10865
10866	  /* Likewise if OP0 is a PLUS of a sign extension with a
10867	     constant, which is usually represented with the PLUS
10868	     between the shifts.  */
10869	  if (! unsigned_comparison_p
10870	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10871	      && GET_CODE (XEXP (op0, 0)) == PLUS
10872	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10873	      && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
10874	      && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
10875	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10876					 MODE_INT, 1)) != BLKmode
10877	      && (((unsigned HOST_WIDE_INT) const_op
10878		   + (GET_MODE_MASK (tmode) >> 1) + 1)
10879		  <= GET_MODE_MASK (tmode)))
10880	    {
10881	      rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
10882	      rtx add_const = XEXP (XEXP (op0, 0), 1);
10883	      rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const,
10884					  XEXP (op0, 1));
10885
10886	      op0 = gen_binary (PLUS, tmode,
10887				gen_lowpart_for_combine (tmode, inner),
10888				new_const);
10889	      continue;
10890	    }
10891
10892	  /* ... fall through ...  */
10893	case LSHIFTRT:
10894	  /* If we have (compare (xshiftrt FOO N) (const_int C)) and
10895	     the low order N bits of FOO are known to be zero, we can do this
10896	     by comparing FOO with C shifted left N bits so long as no
10897	     overflow occurs.  */
10898	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10899	      && INTVAL (XEXP (op0, 1)) >= 0
10900	      && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10901	      && mode_width <= HOST_BITS_PER_WIDE_INT
10902	      && (nonzero_bits (XEXP (op0, 0), mode)
10903		  & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
10904	      && (((unsigned HOST_WIDE_INT) const_op
10905		   + (GET_CODE (op0) != LSHIFTRT
10906		      ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
10907			 + 1)
10908		      : 0))
10909		  <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
10910	    {
10911	      /* If the shift was logical, then we must make the condition
10912		 unsigned.  */
10913	      if (GET_CODE (op0) == LSHIFTRT)
10914		code = unsigned_condition (code);
10915
10916	      const_op <<= INTVAL (XEXP (op0, 1));
10917	      op1 = GEN_INT (const_op);
10918	      op0 = XEXP (op0, 0);
10919	      continue;
10920	    }
10921
10922	  /* If we are using this shift to extract just the sign bit, we
10923	     can replace this with an LT or GE comparison.  */
10924	  if (const_op == 0
10925	      && (equality_comparison_p || sign_bit_comparison_p)
10926	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10927	      && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10928	    {
10929	      op0 = XEXP (op0, 0);
10930	      code = (code == NE || code == GT ? LT : GE);
10931	      continue;
10932	    }
10933	  break;
10934
10935	default:
10936	  break;
10937	}
10938
10939      break;
10940    }
10941
10942  /* Now make any compound operations involved in this comparison.  Then,
10943     check for an outmost SUBREG on OP0 that is not doing anything or is
10944     paradoxical.  The latter transformation must only be performed when
10945     it is known that the "extra" bits will be the same in op0 and op1 or
10946     that they don't matter.  There are three cases to consider:
10947
10948     1. SUBREG_REG (op0) is a register.  In this case the bits are don't
10949     care bits and we can assume they have any convenient value.  So
10950     making the transformation is safe.
10951
10952     2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
10953     In this case the upper bits of op0 are undefined.  We should not make
10954     the simplification in that case as we do not know the contents of
10955     those bits.
10956
10957     3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
10958     NIL.  In that case we know those bits are zeros or ones.  We must
10959     also be sure that they are the same as the upper bits of op1.
10960
10961     We can never remove a SUBREG for a non-equality comparison because
10962     the sign bit is in a different place in the underlying object.  */
10963
10964  op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10965  op1 = make_compound_operation (op1, SET);
10966
10967  if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10968      /* Case 3 above, to sometimes allow (subreg (mem x)), isn't
10969	 implemented.  */
10970      && GET_CODE (SUBREG_REG (op0)) == REG
10971      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10972      && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
10973      && (code == NE || code == EQ))
10974    {
10975      if (GET_MODE_SIZE (GET_MODE (op0))
10976	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
10977	{
10978	  op0 = SUBREG_REG (op0);
10979	  op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
10980	}
10981      else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10982		<= HOST_BITS_PER_WIDE_INT)
10983	       && (nonzero_bits (SUBREG_REG (op0),
10984				 GET_MODE (SUBREG_REG (op0)))
10985		   & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
10986	{
10987	  tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)), op1);
10988
10989	  if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
10990	       & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
10991	    op0 = SUBREG_REG (op0), op1 = tem;
10992	}
10993    }
10994
10995  /* We now do the opposite procedure: Some machines don't have compare
10996     insns in all modes.  If OP0's mode is an integer mode smaller than a
10997     word and we can't do a compare in that mode, see if there is a larger
10998     mode for which we can do the compare.  There are a number of cases in
10999     which we can use the wider mode.  */
11000
11001  mode = GET_MODE (op0);
11002  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
11003      && GET_MODE_SIZE (mode) < UNITS_PER_WORD
11004      && ! have_insn_for (COMPARE, mode))
11005    for (tmode = GET_MODE_WIDER_MODE (mode);
11006	 (tmode != VOIDmode
11007	  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
11008	 tmode = GET_MODE_WIDER_MODE (tmode))
11009      if (have_insn_for (COMPARE, tmode))
11010	{
11011	  int zero_extended;
11012
11013	  /* If the only nonzero bits in OP0 and OP1 are those in the
11014	     narrower mode and this is an equality or unsigned comparison,
11015	     we can use the wider mode.  Similarly for sign-extended
11016	     values, in which case it is true for all comparisons.  */
11017	  zero_extended = ((code == EQ || code == NE
11018			    || code == GEU || code == GTU
11019			    || code == LEU || code == LTU)
11020			   && (nonzero_bits (op0, tmode)
11021			       & ~GET_MODE_MASK (mode)) == 0
11022			   && ((GET_CODE (op1) == CONST_INT
11023				|| (nonzero_bits (op1, tmode)
11024				    & ~GET_MODE_MASK (mode)) == 0)));
11025
11026	  if (zero_extended
11027	      || ((num_sign_bit_copies (op0, tmode)
11028		   > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
11029		  && (num_sign_bit_copies (op1, tmode)
11030		      > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
11031	    {
11032	      /* If OP0 is an AND and we don't have an AND in MODE either,
11033		 make a new AND in the proper mode.  */
11034	      if (GET_CODE (op0) == AND
11035		  && !have_insn_for (AND, mode))
11036		op0 = gen_binary (AND, tmode,
11037				  gen_lowpart_for_combine (tmode,
11038							   XEXP (op0, 0)),
11039				  gen_lowpart_for_combine (tmode,
11040							   XEXP (op0, 1)));
11041
11042	      op0 = gen_lowpart_for_combine (tmode, op0);
11043	      if (zero_extended && GET_CODE (op1) == CONST_INT)
11044		op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode));
11045	      op1 = gen_lowpart_for_combine (tmode, op1);
11046	      break;
11047	    }
11048
11049	  /* If this is a test for negative, we can make an explicit
11050	     test of the sign bit.  */
11051
11052	  if (op1 == const0_rtx && (code == LT || code == GE)
11053	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11054	    {
11055	      op0 = gen_binary (AND, tmode,
11056				gen_lowpart_for_combine (tmode, op0),
11057				GEN_INT ((HOST_WIDE_INT) 1
11058					 << (GET_MODE_BITSIZE (mode) - 1)));
11059	      code = (code == LT) ? NE : EQ;
11060	      break;
11061	    }
11062	}
11063
11064#ifdef CANONICALIZE_COMPARISON
11065  /* If this machine only supports a subset of valid comparisons, see if we
11066     can convert an unsupported one into a supported one.  */
11067  CANONICALIZE_COMPARISON (code, op0, op1);
11068#endif
11069
11070  *pop0 = op0;
11071  *pop1 = op1;
11072
11073  return code;
11074}
11075
11076/* Like jump.c' reversed_comparison_code, but use combine infrastructure for
11077   searching backward.  */
11078static enum rtx_code
11079combine_reversed_comparison_code (exp)
11080     rtx exp;
11081{
11082  enum rtx_code code1 = reversed_comparison_code (exp, NULL);
11083  rtx x;
11084
11085  if (code1 != UNKNOWN
11086      || GET_MODE_CLASS (GET_MODE (XEXP (exp, 0))) != MODE_CC)
11087    return code1;
11088  /* Otherwise try and find where the condition codes were last set and
11089     use that.  */
11090  x = get_last_value (XEXP (exp, 0));
11091  if (!x || GET_CODE (x) != COMPARE)
11092    return UNKNOWN;
11093  return reversed_comparison_code_parts (GET_CODE (exp),
11094					 XEXP (x, 0), XEXP (x, 1), NULL);
11095}
11096/* Return comparison with reversed code of EXP and operands OP0 and OP1.
11097   Return NULL_RTX in case we fail to do the reversal.  */
11098static rtx
11099reversed_comparison (exp, mode, op0, op1)
11100     rtx exp, op0, op1;
11101     enum machine_mode mode;
11102{
11103  enum rtx_code reversed_code = combine_reversed_comparison_code (exp);
11104  if (reversed_code == UNKNOWN)
11105    return NULL_RTX;
11106  else
11107    return gen_binary (reversed_code, mode, op0, op1);
11108}
11109
11110/* Utility function for following routine.  Called when X is part of a value
11111   being stored into reg_last_set_value.  Sets reg_last_set_table_tick
11112   for each register mentioned.  Similar to mention_regs in cse.c  */
11113
11114static void
11115update_table_tick (x)
11116     rtx x;
11117{
11118  enum rtx_code code = GET_CODE (x);
11119  const char *fmt = GET_RTX_FORMAT (code);
11120  int i;
11121
11122  if (code == REG)
11123    {
11124      unsigned int regno = REGNO (x);
11125      unsigned int endregno
11126	= regno + (regno < FIRST_PSEUDO_REGISTER
11127		   ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11128      unsigned int r;
11129
11130      for (r = regno; r < endregno; r++)
11131	reg_last_set_table_tick[r] = label_tick;
11132
11133      return;
11134    }
11135
11136  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11137    /* Note that we can't have an "E" in values stored; see
11138       get_last_value_validate.  */
11139    if (fmt[i] == 'e')
11140      update_table_tick (XEXP (x, i));
11141}
11142
11143/* Record that REG is set to VALUE in insn INSN.  If VALUE is zero, we
11144   are saying that the register is clobbered and we no longer know its
11145   value.  If INSN is zero, don't update reg_last_set; this is only permitted
11146   with VALUE also zero and is used to invalidate the register.  */
11147
11148static void
11149record_value_for_reg (reg, insn, value)
11150     rtx reg;
11151     rtx insn;
11152     rtx value;
11153{
11154  unsigned int regno = REGNO (reg);
11155  unsigned int endregno
11156    = regno + (regno < FIRST_PSEUDO_REGISTER
11157	       ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
11158  unsigned int i;
11159
11160  /* If VALUE contains REG and we have a previous value for REG, substitute
11161     the previous value.  */
11162  if (value && insn && reg_overlap_mentioned_p (reg, value))
11163    {
11164      rtx tem;
11165
11166      /* Set things up so get_last_value is allowed to see anything set up to
11167	 our insn.  */
11168      subst_low_cuid = INSN_CUID (insn);
11169      tem = get_last_value (reg);
11170
11171      /* If TEM is simply a binary operation with two CLOBBERs as operands,
11172	 it isn't going to be useful and will take a lot of time to process,
11173	 so just use the CLOBBER.  */
11174
11175      if (tem)
11176	{
11177	  if ((GET_RTX_CLASS (GET_CODE (tem)) == '2'
11178	       || GET_RTX_CLASS (GET_CODE (tem)) == 'c')
11179	      && GET_CODE (XEXP (tem, 0)) == CLOBBER
11180	      && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11181	    tem = XEXP (tem, 0);
11182
11183	  value = replace_rtx (copy_rtx (value), reg, tem);
11184	}
11185    }
11186
11187  /* For each register modified, show we don't know its value, that
11188     we don't know about its bitwise content, that its value has been
11189     updated, and that we don't know the location of the death of the
11190     register.  */
11191  for (i = regno; i < endregno; i++)
11192    {
11193      if (insn)
11194	reg_last_set[i] = insn;
11195
11196      reg_last_set_value[i] = 0;
11197      reg_last_set_mode[i] = 0;
11198      reg_last_set_nonzero_bits[i] = 0;
11199      reg_last_set_sign_bit_copies[i] = 0;
11200      reg_last_death[i] = 0;
11201    }
11202
11203  /* Mark registers that are being referenced in this value.  */
11204  if (value)
11205    update_table_tick (value);
11206
11207  /* Now update the status of each register being set.
11208     If someone is using this register in this block, set this register
11209     to invalid since we will get confused between the two lives in this
11210     basic block.  This makes using this register always invalid.  In cse, we
11211     scan the table to invalidate all entries using this register, but this
11212     is too much work for us.  */
11213
11214  for (i = regno; i < endregno; i++)
11215    {
11216      reg_last_set_label[i] = label_tick;
11217      if (value && reg_last_set_table_tick[i] == label_tick)
11218	reg_last_set_invalid[i] = 1;
11219      else
11220	reg_last_set_invalid[i] = 0;
11221    }
11222
11223  /* The value being assigned might refer to X (like in "x++;").  In that
11224     case, we must replace it with (clobber (const_int 0)) to prevent
11225     infinite loops.  */
11226  if (value && ! get_last_value_validate (&value, insn,
11227					  reg_last_set_label[regno], 0))
11228    {
11229      value = copy_rtx (value);
11230      if (! get_last_value_validate (&value, insn,
11231				     reg_last_set_label[regno], 1))
11232	value = 0;
11233    }
11234
11235  /* For the main register being modified, update the value, the mode, the
11236     nonzero bits, and the number of sign bit copies.  */
11237
11238  reg_last_set_value[regno] = value;
11239
11240  if (value)
11241    {
11242      enum machine_mode mode = GET_MODE (reg);
11243      subst_low_cuid = INSN_CUID (insn);
11244      reg_last_set_mode[regno] = mode;
11245      if (GET_MODE_CLASS (mode) == MODE_INT
11246	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11247	mode = nonzero_bits_mode;
11248      reg_last_set_nonzero_bits[regno] = nonzero_bits (value, mode);
11249      reg_last_set_sign_bit_copies[regno]
11250	= num_sign_bit_copies (value, GET_MODE (reg));
11251    }
11252}
11253
11254/* Called via note_stores from record_dead_and_set_regs to handle one
11255   SET or CLOBBER in an insn.  DATA is the instruction in which the
11256   set is occurring.  */
11257
11258static void
11259record_dead_and_set_regs_1 (dest, setter, data)
11260     rtx dest, setter;
11261     void *data;
11262{
11263  rtx record_dead_insn = (rtx) data;
11264
11265  if (GET_CODE (dest) == SUBREG)
11266    dest = SUBREG_REG (dest);
11267
11268  if (GET_CODE (dest) == REG)
11269    {
11270      /* If we are setting the whole register, we know its value.  Otherwise
11271	 show that we don't know the value.  We can handle SUBREG in
11272	 some cases.  */
11273      if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11274	record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11275      else if (GET_CODE (setter) == SET
11276	       && GET_CODE (SET_DEST (setter)) == SUBREG
11277	       && SUBREG_REG (SET_DEST (setter)) == dest
11278	       && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
11279	       && subreg_lowpart_p (SET_DEST (setter)))
11280	record_value_for_reg (dest, record_dead_insn,
11281			      gen_lowpart_for_combine (GET_MODE (dest),
11282						       SET_SRC (setter)));
11283      else
11284	record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11285    }
11286  else if (GET_CODE (dest) == MEM
11287	   /* Ignore pushes, they clobber nothing.  */
11288	   && ! push_operand (dest, GET_MODE (dest)))
11289    mem_last_set = INSN_CUID (record_dead_insn);
11290}
11291
11292/* Update the records of when each REG was most recently set or killed
11293   for the things done by INSN.  This is the last thing done in processing
11294   INSN in the combiner loop.
11295
11296   We update reg_last_set, reg_last_set_value, reg_last_set_mode,
11297   reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
11298   and also the similar information mem_last_set (which insn most recently
11299   modified memory) and last_call_cuid (which insn was the most recent
11300   subroutine call).  */
11301
11302static void
11303record_dead_and_set_regs (insn)
11304     rtx insn;
11305{
11306  rtx link;
11307  unsigned int i;
11308
11309  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11310    {
11311      if (REG_NOTE_KIND (link) == REG_DEAD
11312	  && GET_CODE (XEXP (link, 0)) == REG)
11313	{
11314	  unsigned int regno = REGNO (XEXP (link, 0));
11315	  unsigned int endregno
11316	    = regno + (regno < FIRST_PSEUDO_REGISTER
11317		       ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
11318		       : 1);
11319
11320	  for (i = regno; i < endregno; i++)
11321	    reg_last_death[i] = insn;
11322	}
11323      else if (REG_NOTE_KIND (link) == REG_INC)
11324	record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11325    }
11326
11327  if (GET_CODE (insn) == CALL_INSN)
11328    {
11329      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11330	if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
11331	  {
11332	    reg_last_set_value[i] = 0;
11333	    reg_last_set_mode[i] = 0;
11334	    reg_last_set_nonzero_bits[i] = 0;
11335	    reg_last_set_sign_bit_copies[i] = 0;
11336	    reg_last_death[i] = 0;
11337	  }
11338
11339      last_call_cuid = mem_last_set = INSN_CUID (insn);
11340
11341      /* Don't bother recording what this insn does.  It might set the
11342	 return value register, but we can't combine into a call
11343	 pattern anyway, so there's no point trying (and it may cause
11344	 a crash, if e.g. we wind up asking for last_set_value of a
11345	 SUBREG of the return value register).  */
11346      return;
11347    }
11348
11349  note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11350}
11351
11352/* If a SUBREG has the promoted bit set, it is in fact a property of the
11353   register present in the SUBREG, so for each such SUBREG go back and
11354   adjust nonzero and sign bit information of the registers that are
11355   known to have some zero/sign bits set.
11356
11357   This is needed because when combine blows the SUBREGs away, the
11358   information on zero/sign bits is lost and further combines can be
11359   missed because of that.  */
11360
11361static void
11362record_promoted_value (insn, subreg)
11363     rtx insn;
11364     rtx subreg;
11365{
11366  rtx links, set;
11367  unsigned int regno = REGNO (SUBREG_REG (subreg));
11368  enum machine_mode mode = GET_MODE (subreg);
11369
11370  if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11371    return;
11372
11373  for (links = LOG_LINKS (insn); links;)
11374    {
11375      insn = XEXP (links, 0);
11376      set = single_set (insn);
11377
11378      if (! set || GET_CODE (SET_DEST (set)) != REG
11379	  || REGNO (SET_DEST (set)) != regno
11380	  || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11381	{
11382	  links = XEXP (links, 1);
11383	  continue;
11384	}
11385
11386      if (reg_last_set[regno] == insn)
11387	{
11388	  if (SUBREG_PROMOTED_UNSIGNED_P (subreg))
11389	    reg_last_set_nonzero_bits[regno] &= GET_MODE_MASK (mode);
11390	}
11391
11392      if (GET_CODE (SET_SRC (set)) == REG)
11393	{
11394	  regno = REGNO (SET_SRC (set));
11395	  links = LOG_LINKS (insn);
11396	}
11397      else
11398	break;
11399    }
11400}
11401
11402/* Scan X for promoted SUBREGs.  For each one found,
11403   note what it implies to the registers used in it.  */
11404
11405static void
11406check_promoted_subreg (insn, x)
11407     rtx insn;
11408     rtx x;
11409{
11410  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11411      && GET_CODE (SUBREG_REG (x)) == REG)
11412    record_promoted_value (insn, x);
11413  else
11414    {
11415      const char *format = GET_RTX_FORMAT (GET_CODE (x));
11416      int i, j;
11417
11418      for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
11419	switch (format[i])
11420	  {
11421	  case 'e':
11422	    check_promoted_subreg (insn, XEXP (x, i));
11423	    break;
11424	  case 'V':
11425	  case 'E':
11426	    if (XVEC (x, i) != 0)
11427	      for (j = 0; j < XVECLEN (x, i); j++)
11428		check_promoted_subreg (insn, XVECEXP (x, i, j));
11429	    break;
11430	  }
11431    }
11432}
11433
11434/* Utility routine for the following function.  Verify that all the registers
11435   mentioned in *LOC are valid when *LOC was part of a value set when
11436   label_tick == TICK.  Return 0 if some are not.
11437
11438   If REPLACE is non-zero, replace the invalid reference with
11439   (clobber (const_int 0)) and return 1.  This replacement is useful because
11440   we often can get useful information about the form of a value (e.g., if
11441   it was produced by a shift that always produces -1 or 0) even though
11442   we don't know exactly what registers it was produced from.  */
11443
11444static int
11445get_last_value_validate (loc, insn, tick, replace)
11446     rtx *loc;
11447     rtx insn;
11448     int tick;
11449     int replace;
11450{
11451  rtx x = *loc;
11452  const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
11453  int len = GET_RTX_LENGTH (GET_CODE (x));
11454  int i;
11455
11456  if (GET_CODE (x) == REG)
11457    {
11458      unsigned int regno = REGNO (x);
11459      unsigned int endregno
11460	= regno + (regno < FIRST_PSEUDO_REGISTER
11461		   ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11462      unsigned int j;
11463
11464      for (j = regno; j < endregno; j++)
11465	if (reg_last_set_invalid[j]
11466	    /* If this is a pseudo-register that was only set once and not
11467	       live at the beginning of the function, it is always valid.  */
11468	    || (! (regno >= FIRST_PSEUDO_REGISTER
11469		   && REG_N_SETS (regno) == 1
11470		   && (! REGNO_REG_SET_P
11471		       (BASIC_BLOCK (0)->global_live_at_start, regno)))
11472		&& reg_last_set_label[j] > tick))
11473	  {
11474	    if (replace)
11475	      *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11476	    return replace;
11477	  }
11478
11479      return 1;
11480    }
11481  /* If this is a memory reference, make sure that there were
11482     no stores after it that might have clobbered the value.  We don't
11483     have alias info, so we assume any store invalidates it.  */
11484  else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
11485	   && INSN_CUID (insn) <= mem_last_set)
11486    {
11487      if (replace)
11488	*loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11489      return replace;
11490    }
11491
11492  for (i = 0; i < len; i++)
11493    if ((fmt[i] == 'e'
11494	 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
11495	/* Don't bother with these.  They shouldn't occur anyway.  */
11496	|| fmt[i] == 'E')
11497      return 0;
11498
11499  /* If we haven't found a reason for it to be invalid, it is valid.  */
11500  return 1;
11501}
11502
11503/* Get the last value assigned to X, if known.  Some registers
11504   in the value may be replaced with (clobber (const_int 0)) if their value
11505   is known longer known reliably.  */
11506
11507static rtx
11508get_last_value (x)
11509     rtx x;
11510{
11511  unsigned int regno;
11512  rtx value;
11513
11514  /* If this is a non-paradoxical SUBREG, get the value of its operand and
11515     then convert it to the desired mode.  If this is a paradoxical SUBREG,
11516     we cannot predict what values the "extra" bits might have.  */
11517  if (GET_CODE (x) == SUBREG
11518      && subreg_lowpart_p (x)
11519      && (GET_MODE_SIZE (GET_MODE (x))
11520	  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11521      && (value = get_last_value (SUBREG_REG (x))) != 0)
11522    return gen_lowpart_for_combine (GET_MODE (x), value);
11523
11524  if (GET_CODE (x) != REG)
11525    return 0;
11526
11527  regno = REGNO (x);
11528  value = reg_last_set_value[regno];
11529
11530  /* If we don't have a value, or if it isn't for this basic block and
11531     it's either a hard register, set more than once, or it's a live
11532     at the beginning of the function, return 0.
11533
11534     Because if it's not live at the beginning of the function then the reg
11535     is always set before being used (is never used without being set).
11536     And, if it's set only once, and it's always set before use, then all
11537     uses must have the same last value, even if it's not from this basic
11538     block.  */
11539
11540  if (value == 0
11541      || (reg_last_set_label[regno] != label_tick
11542	  && (regno < FIRST_PSEUDO_REGISTER
11543	      || REG_N_SETS (regno) != 1
11544	      || (REGNO_REG_SET_P
11545		  (BASIC_BLOCK (0)->global_live_at_start, regno)))))
11546    return 0;
11547
11548  /* If the value was set in a later insn than the ones we are processing,
11549     we can't use it even if the register was only set once.  */
11550  if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
11551    return 0;
11552
11553  /* If the value has all its registers valid, return it.  */
11554  if (get_last_value_validate (&value, reg_last_set[regno],
11555			       reg_last_set_label[regno], 0))
11556    return value;
11557
11558  /* Otherwise, make a copy and replace any invalid register with
11559     (clobber (const_int 0)).  If that fails for some reason, return 0.  */
11560
11561  value = copy_rtx (value);
11562  if (get_last_value_validate (&value, reg_last_set[regno],
11563			       reg_last_set_label[regno], 1))
11564    return value;
11565
11566  return 0;
11567}
11568
11569/* Return nonzero if expression X refers to a REG or to memory
11570   that is set in an instruction more recent than FROM_CUID.  */
11571
11572static int
11573use_crosses_set_p (x, from_cuid)
11574     rtx x;
11575     int from_cuid;
11576{
11577  const char *fmt;
11578  int i;
11579  enum rtx_code code = GET_CODE (x);
11580
11581  if (code == REG)
11582    {
11583      unsigned int regno = REGNO (x);
11584      unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
11585				 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11586
11587#ifdef PUSH_ROUNDING
11588      /* Don't allow uses of the stack pointer to be moved,
11589	 because we don't know whether the move crosses a push insn.  */
11590      if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
11591	return 1;
11592#endif
11593      for (; regno < endreg; regno++)
11594	if (reg_last_set[regno]
11595	    && INSN_CUID (reg_last_set[regno]) > from_cuid)
11596	  return 1;
11597      return 0;
11598    }
11599
11600  if (code == MEM && mem_last_set > from_cuid)
11601    return 1;
11602
11603  fmt = GET_RTX_FORMAT (code);
11604
11605  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11606    {
11607      if (fmt[i] == 'E')
11608	{
11609	  int j;
11610	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11611	    if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11612	      return 1;
11613	}
11614      else if (fmt[i] == 'e'
11615	       && use_crosses_set_p (XEXP (x, i), from_cuid))
11616	return 1;
11617    }
11618  return 0;
11619}
11620
11621/* Define three variables used for communication between the following
11622   routines.  */
11623
11624static unsigned int reg_dead_regno, reg_dead_endregno;
11625static int reg_dead_flag;
11626
11627/* Function called via note_stores from reg_dead_at_p.
11628
11629   If DEST is within [reg_dead_regno, reg_dead_endregno), set
11630   reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET.  */
11631
11632static void
11633reg_dead_at_p_1 (dest, x, data)
11634     rtx dest;
11635     rtx x;
11636     void *data ATTRIBUTE_UNUSED;
11637{
11638  unsigned int regno, endregno;
11639
11640  if (GET_CODE (dest) != REG)
11641    return;
11642
11643  regno = REGNO (dest);
11644  endregno = regno + (regno < FIRST_PSEUDO_REGISTER
11645		      ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
11646
11647  if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11648    reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11649}
11650
11651/* Return non-zero if REG is known to be dead at INSN.
11652
11653   We scan backwards from INSN.  If we hit a REG_DEAD note or a CLOBBER
11654   referencing REG, it is dead.  If we hit a SET referencing REG, it is
11655   live.  Otherwise, see if it is live or dead at the start of the basic
11656   block we are in.  Hard regs marked as being live in NEWPAT_USED_REGS
11657   must be assumed to be always live.  */
11658
11659static int
11660reg_dead_at_p (reg, insn)
11661     rtx reg;
11662     rtx insn;
11663{
11664  int block;
11665  unsigned int i;
11666
11667  /* Set variables for reg_dead_at_p_1.  */
11668  reg_dead_regno = REGNO (reg);
11669  reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11670					? HARD_REGNO_NREGS (reg_dead_regno,
11671							    GET_MODE (reg))
11672					: 1);
11673
11674  reg_dead_flag = 0;
11675
11676  /* Check that reg isn't mentioned in NEWPAT_USED_REGS.  */
11677  if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11678    {
11679      for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11680	if (TEST_HARD_REG_BIT (newpat_used_regs, i))
11681	  return 0;
11682    }
11683
11684  /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11685     beginning of function.  */
11686  for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
11687       insn = prev_nonnote_insn (insn))
11688    {
11689      note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
11690      if (reg_dead_flag)
11691	return reg_dead_flag == 1 ? 1 : 0;
11692
11693      if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11694	return 1;
11695    }
11696
11697  /* Get the basic block number that we were in.  */
11698  if (insn == 0)
11699    block = 0;
11700  else
11701    {
11702      for (block = 0; block < n_basic_blocks; block++)
11703	if (insn == BLOCK_HEAD (block))
11704	  break;
11705
11706      if (block == n_basic_blocks)
11707	return 0;
11708    }
11709
11710  for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11711    if (REGNO_REG_SET_P (BASIC_BLOCK (block)->global_live_at_start, i))
11712      return 0;
11713
11714  return 1;
11715}
11716
11717/* Note hard registers in X that are used.  This code is similar to
11718   that in flow.c, but much simpler since we don't care about pseudos.  */
11719
11720static void
11721mark_used_regs_combine (x)
11722     rtx x;
11723{
11724  RTX_CODE code = GET_CODE (x);
11725  unsigned int regno;
11726  int i;
11727
11728  switch (code)
11729    {
11730    case LABEL_REF:
11731    case SYMBOL_REF:
11732    case CONST_INT:
11733    case CONST:
11734    case CONST_DOUBLE:
11735    case CONST_VECTOR:
11736    case PC:
11737    case ADDR_VEC:
11738    case ADDR_DIFF_VEC:
11739    case ASM_INPUT:
11740#ifdef HAVE_cc0
11741    /* CC0 must die in the insn after it is set, so we don't need to take
11742       special note of it here.  */
11743    case CC0:
11744#endif
11745      return;
11746
11747    case CLOBBER:
11748      /* If we are clobbering a MEM, mark any hard registers inside the
11749	 address as used.  */
11750      if (GET_CODE (XEXP (x, 0)) == MEM)
11751	mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11752      return;
11753
11754    case REG:
11755      regno = REGNO (x);
11756      /* A hard reg in a wide mode may really be multiple registers.
11757	 If so, mark all of them just like the first.  */
11758      if (regno < FIRST_PSEUDO_REGISTER)
11759	{
11760	  unsigned int endregno, r;
11761
11762	  /* None of this applies to the stack, frame or arg pointers */
11763	  if (regno == STACK_POINTER_REGNUM
11764#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11765	      || regno == HARD_FRAME_POINTER_REGNUM
11766#endif
11767#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11768	      || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11769#endif
11770	      || regno == FRAME_POINTER_REGNUM)
11771	    return;
11772
11773	  endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11774	  for (r = regno; r < endregno; r++)
11775	    SET_HARD_REG_BIT (newpat_used_regs, r);
11776	}
11777      return;
11778
11779    case SET:
11780      {
11781	/* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11782	   the address.  */
11783	rtx testreg = SET_DEST (x);
11784
11785	while (GET_CODE (testreg) == SUBREG
11786	       || GET_CODE (testreg) == ZERO_EXTRACT
11787	       || GET_CODE (testreg) == SIGN_EXTRACT
11788	       || GET_CODE (testreg) == STRICT_LOW_PART)
11789	  testreg = XEXP (testreg, 0);
11790
11791	if (GET_CODE (testreg) == MEM)
11792	  mark_used_regs_combine (XEXP (testreg, 0));
11793
11794	mark_used_regs_combine (SET_SRC (x));
11795      }
11796      return;
11797
11798    default:
11799      break;
11800    }
11801
11802  /* Recursively scan the operands of this expression.  */
11803
11804  {
11805    const char *fmt = GET_RTX_FORMAT (code);
11806
11807    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11808      {
11809	if (fmt[i] == 'e')
11810	  mark_used_regs_combine (XEXP (x, i));
11811	else if (fmt[i] == 'E')
11812	  {
11813	    int j;
11814
11815	    for (j = 0; j < XVECLEN (x, i); j++)
11816	      mark_used_regs_combine (XVECEXP (x, i, j));
11817	  }
11818      }
11819  }
11820}
11821
11822/* Remove register number REGNO from the dead registers list of INSN.
11823
11824   Return the note used to record the death, if there was one.  */
11825
11826rtx
11827remove_death (regno, insn)
11828     unsigned int regno;
11829     rtx insn;
11830{
11831  rtx note = find_regno_note (insn, REG_DEAD, regno);
11832
11833  if (note)
11834    {
11835      REG_N_DEATHS (regno)--;
11836      remove_note (insn, note);
11837    }
11838
11839  return note;
11840}
11841
11842/* For each register (hardware or pseudo) used within expression X, if its
11843   death is in an instruction with cuid between FROM_CUID (inclusive) and
11844   TO_INSN (exclusive), put a REG_DEAD note for that register in the
11845   list headed by PNOTES.
11846
11847   That said, don't move registers killed by maybe_kill_insn.
11848
11849   This is done when X is being merged by combination into TO_INSN.  These
11850   notes will then be distributed as needed.  */
11851
11852static void
11853move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
11854     rtx x;
11855     rtx maybe_kill_insn;
11856     int from_cuid;
11857     rtx to_insn;
11858     rtx *pnotes;
11859{
11860  const char *fmt;
11861  int len, i;
11862  enum rtx_code code = GET_CODE (x);
11863
11864  if (code == REG)
11865    {
11866      unsigned int regno = REGNO (x);
11867      rtx where_dead = reg_last_death[regno];
11868      rtx before_dead, after_dead;
11869
11870      /* Don't move the register if it gets killed in between from and to */
11871      if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11872	  && ! reg_referenced_p (x, maybe_kill_insn))
11873	return;
11874
11875      /* WHERE_DEAD could be a USE insn made by combine, so first we
11876	 make sure that we have insns with valid INSN_CUID values.  */
11877      before_dead = where_dead;
11878      while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11879	before_dead = PREV_INSN (before_dead);
11880
11881      after_dead = where_dead;
11882      while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11883	after_dead = NEXT_INSN (after_dead);
11884
11885      if (before_dead && after_dead
11886	  && INSN_CUID (before_dead) >= from_cuid
11887	  && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11888	      || (where_dead != after_dead
11889		  && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
11890	{
11891	  rtx note = remove_death (regno, where_dead);
11892
11893	  /* It is possible for the call above to return 0.  This can occur
11894	     when reg_last_death points to I2 or I1 that we combined with.
11895	     In that case make a new note.
11896
11897	     We must also check for the case where X is a hard register
11898	     and NOTE is a death note for a range of hard registers
11899	     including X.  In that case, we must put REG_DEAD notes for
11900	     the remaining registers in place of NOTE.  */
11901
11902	  if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11903	      && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11904		  > GET_MODE_SIZE (GET_MODE (x))))
11905	    {
11906	      unsigned int deadregno = REGNO (XEXP (note, 0));
11907	      unsigned int deadend
11908		= (deadregno + HARD_REGNO_NREGS (deadregno,
11909						 GET_MODE (XEXP (note, 0))));
11910	      unsigned int ourend
11911		= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11912	      unsigned int i;
11913
11914	      for (i = deadregno; i < deadend; i++)
11915		if (i < regno || i >= ourend)
11916		  REG_NOTES (where_dead)
11917		    = gen_rtx_EXPR_LIST (REG_DEAD,
11918					 gen_rtx_REG (reg_raw_mode[i], i),
11919					 REG_NOTES (where_dead));
11920	    }
11921
11922	  /* If we didn't find any note, or if we found a REG_DEAD note that
11923	     covers only part of the given reg, and we have a multi-reg hard
11924	     register, then to be safe we must check for REG_DEAD notes
11925	     for each register other than the first.  They could have
11926	     their own REG_DEAD notes lying around.  */
11927	  else if ((note == 0
11928		    || (note != 0
11929			&& (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11930			    < GET_MODE_SIZE (GET_MODE (x)))))
11931		   && regno < FIRST_PSEUDO_REGISTER
11932		   && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
11933	    {
11934	      unsigned int ourend
11935		= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11936	      unsigned int i, offset;
11937	      rtx oldnotes = 0;
11938
11939	      if (note)
11940		offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
11941	      else
11942		offset = 1;
11943
11944	      for (i = regno + offset; i < ourend; i++)
11945		move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
11946			     maybe_kill_insn, from_cuid, to_insn, &oldnotes);
11947	    }
11948
11949	  if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
11950	    {
11951	      XEXP (note, 1) = *pnotes;
11952	      *pnotes = note;
11953	    }
11954	  else
11955	    *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
11956
11957	  REG_N_DEATHS (regno)++;
11958	}
11959
11960      return;
11961    }
11962
11963  else if (GET_CODE (x) == SET)
11964    {
11965      rtx dest = SET_DEST (x);
11966
11967      move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
11968
11969      /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11970	 that accesses one word of a multi-word item, some
11971	 piece of everything register in the expression is used by
11972	 this insn, so remove any old death.  */
11973      /* ??? So why do we test for equality of the sizes?  */
11974
11975      if (GET_CODE (dest) == ZERO_EXTRACT
11976	  || GET_CODE (dest) == STRICT_LOW_PART
11977	  || (GET_CODE (dest) == SUBREG
11978	      && (((GET_MODE_SIZE (GET_MODE (dest))
11979		    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11980		  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11981		       + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
11982	{
11983	  move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
11984	  return;
11985	}
11986
11987      /* If this is some other SUBREG, we know it replaces the entire
11988	 value, so use that as the destination.  */
11989      if (GET_CODE (dest) == SUBREG)
11990	dest = SUBREG_REG (dest);
11991
11992      /* If this is a MEM, adjust deaths of anything used in the address.
11993	 For a REG (the only other possibility), the entire value is
11994	 being replaced so the old value is not used in this insn.  */
11995
11996      if (GET_CODE (dest) == MEM)
11997	move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11998		     to_insn, pnotes);
11999      return;
12000    }
12001
12002  else if (GET_CODE (x) == CLOBBER)
12003    return;
12004
12005  len = GET_RTX_LENGTH (code);
12006  fmt = GET_RTX_FORMAT (code);
12007
12008  for (i = 0; i < len; i++)
12009    {
12010      if (fmt[i] == 'E')
12011	{
12012	  int j;
12013	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12014	    move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
12015			 to_insn, pnotes);
12016	}
12017      else if (fmt[i] == 'e')
12018	move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
12019    }
12020}
12021
12022/* Return 1 if X is the target of a bit-field assignment in BODY, the
12023   pattern of an insn.  X must be a REG.  */
12024
12025static int
12026reg_bitfield_target_p (x, body)
12027     rtx x;
12028     rtx body;
12029{
12030  int i;
12031
12032  if (GET_CODE (body) == SET)
12033    {
12034      rtx dest = SET_DEST (body);
12035      rtx target;
12036      unsigned int regno, tregno, endregno, endtregno;
12037
12038      if (GET_CODE (dest) == ZERO_EXTRACT)
12039	target = XEXP (dest, 0);
12040      else if (GET_CODE (dest) == STRICT_LOW_PART)
12041	target = SUBREG_REG (XEXP (dest, 0));
12042      else
12043	return 0;
12044
12045      if (GET_CODE (target) == SUBREG)
12046	target = SUBREG_REG (target);
12047
12048      if (GET_CODE (target) != REG)
12049	return 0;
12050
12051      tregno = REGNO (target), regno = REGNO (x);
12052      if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
12053	return target == x;
12054
12055      endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
12056      endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12057
12058      return endregno > tregno && regno < endtregno;
12059    }
12060
12061  else if (GET_CODE (body) == PARALLEL)
12062    for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
12063      if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
12064	return 1;
12065
12066  return 0;
12067}
12068
12069/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
12070   as appropriate.  I3 and I2 are the insns resulting from the combination
12071   insns including FROM (I2 may be zero).
12072
12073   ELIM_I2 and ELIM_I1 are either zero or registers that we know will
12074   not need REG_DEAD notes because they are being substituted for.  This
12075   saves searching in the most common cases.
12076
12077   Each note in the list is either ignored or placed on some insns, depending
12078   on the type of note.  */
12079
12080static void
12081distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
12082     rtx notes;
12083     rtx from_insn;
12084     rtx i3, i2;
12085     rtx elim_i2, elim_i1;
12086{
12087  rtx note, next_note;
12088  rtx tem;
12089
12090  for (note = notes; note; note = next_note)
12091    {
12092      rtx place = 0, place2 = 0;
12093
12094      /* If this NOTE references a pseudo register, ensure it references
12095	 the latest copy of that register.  */
12096      if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
12097	  && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
12098	XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
12099
12100      next_note = XEXP (note, 1);
12101      switch (REG_NOTE_KIND (note))
12102	{
12103	case REG_BR_PROB:
12104	case REG_BR_PRED:
12105	case REG_EXEC_COUNT:
12106	  /* Doesn't matter much where we put this, as long as it's somewhere.
12107	     It is preferable to keep these notes on branches, which is most
12108	     likely to be i3.  */
12109	  place = i3;
12110	  break;
12111
12112	case REG_VTABLE_REF:
12113	  /* ??? Should remain with *a particular* memory load.  Given the
12114	     nature of vtable data, the last insn seems relatively safe.  */
12115	  place = i3;
12116	  break;
12117
12118	case REG_NON_LOCAL_GOTO:
12119	  if (GET_CODE (i3) == JUMP_INSN)
12120	    place = i3;
12121	  else if (i2 && GET_CODE (i2) == JUMP_INSN)
12122	    place = i2;
12123	  else
12124	    abort ();
12125	  break;
12126
12127	case REG_EH_REGION:
12128	  /* These notes must remain with the call or trapping instruction.  */
12129	  if (GET_CODE (i3) == CALL_INSN)
12130	    place = i3;
12131	  else if (i2 && GET_CODE (i2) == CALL_INSN)
12132	    place = i2;
12133	  else if (flag_non_call_exceptions)
12134	    {
12135	      if (may_trap_p (i3))
12136		place = i3;
12137	      else if (i2 && may_trap_p (i2))
12138		place = i2;
12139	      /* ??? Otherwise assume we've combined things such that we
12140		 can now prove that the instructions can't trap.  Drop the
12141		 note in this case.  */
12142	    }
12143	  else
12144	    abort ();
12145	  break;
12146
12147	case REG_NORETURN:
12148	case REG_SETJMP:
12149	  /* These notes must remain with the call.  It should not be
12150	     possible for both I2 and I3 to be a call.  */
12151	  if (GET_CODE (i3) == CALL_INSN)
12152	    place = i3;
12153	  else if (i2 && GET_CODE (i2) == CALL_INSN)
12154	    place = i2;
12155	  else
12156	    abort ();
12157	  break;
12158
12159	case REG_UNUSED:
12160	  /* Any clobbers for i3 may still exist, and so we must process
12161	     REG_UNUSED notes from that insn.
12162
12163	     Any clobbers from i2 or i1 can only exist if they were added by
12164	     recog_for_combine.  In that case, recog_for_combine created the
12165	     necessary REG_UNUSED notes.  Trying to keep any original
12166	     REG_UNUSED notes from these insns can cause incorrect output
12167	     if it is for the same register as the original i3 dest.
12168	     In that case, we will notice that the register is set in i3,
12169	     and then add a REG_UNUSED note for the destination of i3, which
12170	     is wrong.  However, it is possible to have REG_UNUSED notes from
12171	     i2 or i1 for register which were both used and clobbered, so
12172	     we keep notes from i2 or i1 if they will turn into REG_DEAD
12173	     notes.  */
12174
12175	  /* If this register is set or clobbered in I3, put the note there
12176	     unless there is one already.  */
12177	  if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
12178	    {
12179	      if (from_insn != i3)
12180		break;
12181
12182	      if (! (GET_CODE (XEXP (note, 0)) == REG
12183		     ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12184		     : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12185		place = i3;
12186	    }
12187	  /* Otherwise, if this register is used by I3, then this register
12188	     now dies here, so we must put a REG_DEAD note here unless there
12189	     is one already.  */
12190	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12191		   && ! (GET_CODE (XEXP (note, 0)) == REG
12192			 ? find_regno_note (i3, REG_DEAD,
12193					    REGNO (XEXP (note, 0)))
12194			 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12195	    {
12196	      PUT_REG_NOTE_KIND (note, REG_DEAD);
12197	      place = i3;
12198	    }
12199	  break;
12200
12201	case REG_EQUAL:
12202	case REG_EQUIV:
12203	case REG_NOALIAS:
12204	  /* These notes say something about results of an insn.  We can
12205	     only support them if they used to be on I3 in which case they
12206	     remain on I3.  Otherwise they are ignored.
12207
12208	     If the note refers to an expression that is not a constant, we
12209	     must also ignore the note since we cannot tell whether the
12210	     equivalence is still true.  It might be possible to do
12211	     slightly better than this (we only have a problem if I2DEST
12212	     or I1DEST is present in the expression), but it doesn't
12213	     seem worth the trouble.  */
12214
12215	  if (from_insn == i3
12216	      && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12217	    place = i3;
12218	  break;
12219
12220	case REG_INC:
12221	case REG_NO_CONFLICT:
12222	  /* These notes say something about how a register is used.  They must
12223	     be present on any use of the register in I2 or I3.  */
12224	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12225	    place = i3;
12226
12227	  if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12228	    {
12229	      if (place)
12230		place2 = i2;
12231	      else
12232		place = i2;
12233	    }
12234	  break;
12235
12236	case REG_LABEL:
12237	  /* This can show up in several ways -- either directly in the
12238	     pattern, or hidden off in the constant pool with (or without?)
12239	     a REG_EQUAL note.  */
12240	  /* ??? Ignore the without-reg_equal-note problem for now.  */
12241	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12242	      || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12243		  && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12244		  && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12245	    place = i3;
12246
12247	  if (i2
12248	      && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12249		  || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12250		      && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12251		      && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12252	    {
12253	      if (place)
12254		place2 = i2;
12255	      else
12256		place = i2;
12257	    }
12258
12259	  /* Don't attach REG_LABEL note to a JUMP_INSN which has
12260	     JUMP_LABEL already.  Instead, decrement LABEL_NUSES.  */
12261	  if (place && GET_CODE (place) == JUMP_INSN && JUMP_LABEL (place))
12262	    {
12263	      if (JUMP_LABEL (place) != XEXP (note, 0))
12264		abort ();
12265	      if (GET_CODE (JUMP_LABEL (place)) == CODE_LABEL)
12266		LABEL_NUSES (JUMP_LABEL (place))--;
12267	      place = 0;
12268	    }
12269	  if (place2 && GET_CODE (place2) == JUMP_INSN && JUMP_LABEL (place2))
12270	    {
12271	      if (JUMP_LABEL (place2) != XEXP (note, 0))
12272		abort ();
12273	      if (GET_CODE (JUMP_LABEL (place2)) == CODE_LABEL)
12274		LABEL_NUSES (JUMP_LABEL (place2))--;
12275	      place2 = 0;
12276	    }
12277	  break;
12278
12279	case REG_NONNEG:
12280	case REG_WAS_0:
12281	  /* These notes say something about the value of a register prior
12282	     to the execution of an insn.  It is too much trouble to see
12283	     if the note is still correct in all situations.  It is better
12284	     to simply delete it.  */
12285	  break;
12286
12287	case REG_RETVAL:
12288	  /* If the insn previously containing this note still exists,
12289	     put it back where it was.  Otherwise move it to the previous
12290	     insn.  Adjust the corresponding REG_LIBCALL note.  */
12291	  if (GET_CODE (from_insn) != NOTE)
12292	    place = from_insn;
12293	  else
12294	    {
12295	      tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
12296	      place = prev_real_insn (from_insn);
12297	      if (tem && place)
12298		XEXP (tem, 0) = place;
12299	      /* If we're deleting the last remaining instruction of a
12300		 libcall sequence, don't add the notes.  */
12301	      else if (XEXP (note, 0) == from_insn)
12302		tem = place = 0;
12303	    }
12304	  break;
12305
12306	case REG_LIBCALL:
12307	  /* This is handled similarly to REG_RETVAL.  */
12308	  if (GET_CODE (from_insn) != NOTE)
12309	    place = from_insn;
12310	  else
12311	    {
12312	      tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
12313	      place = next_real_insn (from_insn);
12314	      if (tem && place)
12315		XEXP (tem, 0) = place;
12316	      /* If we're deleting the last remaining instruction of a
12317		 libcall sequence, don't add the notes.  */
12318	      else if (XEXP (note, 0) == from_insn)
12319		tem = place = 0;
12320	    }
12321	  break;
12322
12323	case REG_DEAD:
12324	  /* If the register is used as an input in I3, it dies there.
12325	     Similarly for I2, if it is non-zero and adjacent to I3.
12326
12327	     If the register is not used as an input in either I3 or I2
12328	     and it is not one of the registers we were supposed to eliminate,
12329	     there are two possibilities.  We might have a non-adjacent I2
12330	     or we might have somehow eliminated an additional register
12331	     from a computation.  For example, we might have had A & B where
12332	     we discover that B will always be zero.  In this case we will
12333	     eliminate the reference to A.
12334
12335	     In both cases, we must search to see if we can find a previous
12336	     use of A and put the death note there.  */
12337
12338	  if (from_insn
12339	      && GET_CODE (from_insn) == CALL_INSN
12340	      && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12341	    place = from_insn;
12342	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12343	    place = i3;
12344	  else if (i2 != 0 && next_nonnote_insn (i2) == i3
12345		   && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12346	    place = i2;
12347
12348	  if (rtx_equal_p (XEXP (note, 0), elim_i2)
12349	      || rtx_equal_p (XEXP (note, 0), elim_i1))
12350	    break;
12351
12352	  if (place == 0)
12353	    {
12354	      basic_block bb = BASIC_BLOCK (this_basic_block);
12355
12356	      for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
12357		{
12358		  if (! INSN_P (tem))
12359		    {
12360		      if (tem == bb->head)
12361			break;
12362		      continue;
12363		    }
12364
12365		  /* If the register is being set at TEM, see if that is all
12366		     TEM is doing.  If so, delete TEM.  Otherwise, make this
12367		     into a REG_UNUSED note instead.  */
12368		  if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
12369		    {
12370		      rtx set = single_set (tem);
12371		      rtx inner_dest = 0;
12372#ifdef HAVE_cc0
12373		      rtx cc0_setter = NULL_RTX;
12374#endif
12375
12376		      if (set != 0)
12377			for (inner_dest = SET_DEST (set);
12378			     (GET_CODE (inner_dest) == STRICT_LOW_PART
12379			      || GET_CODE (inner_dest) == SUBREG
12380			      || GET_CODE (inner_dest) == ZERO_EXTRACT);
12381			     inner_dest = XEXP (inner_dest, 0))
12382			  ;
12383
12384		      /* Verify that it was the set, and not a clobber that
12385			 modified the register.
12386
12387			 CC0 targets must be careful to maintain setter/user
12388			 pairs.  If we cannot delete the setter due to side
12389			 effects, mark the user with an UNUSED note instead
12390			 of deleting it.  */
12391
12392		      if (set != 0 && ! side_effects_p (SET_SRC (set))
12393			  && rtx_equal_p (XEXP (note, 0), inner_dest)
12394#ifdef HAVE_cc0
12395			  && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12396			      || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12397				  && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12398#endif
12399			  )
12400			{
12401			  /* Move the notes and links of TEM elsewhere.
12402			     This might delete other dead insns recursively.
12403			     First set the pattern to something that won't use
12404			     any register.  */
12405
12406			  PATTERN (tem) = pc_rtx;
12407
12408			  distribute_notes (REG_NOTES (tem), tem, tem,
12409					    NULL_RTX, NULL_RTX, NULL_RTX);
12410			  distribute_links (LOG_LINKS (tem));
12411
12412			  PUT_CODE (tem, NOTE);
12413			  NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
12414			  NOTE_SOURCE_FILE (tem) = 0;
12415
12416#ifdef HAVE_cc0
12417			  /* Delete the setter too.  */
12418			  if (cc0_setter)
12419			    {
12420			      PATTERN (cc0_setter) = pc_rtx;
12421
12422			      distribute_notes (REG_NOTES (cc0_setter),
12423						cc0_setter, cc0_setter,
12424						NULL_RTX, NULL_RTX, NULL_RTX);
12425			      distribute_links (LOG_LINKS (cc0_setter));
12426
12427			      PUT_CODE (cc0_setter, NOTE);
12428			      NOTE_LINE_NUMBER (cc0_setter)
12429				= NOTE_INSN_DELETED;
12430			      NOTE_SOURCE_FILE (cc0_setter) = 0;
12431			    }
12432#endif
12433			}
12434		      /* If the register is both set and used here, put the
12435			 REG_DEAD note here, but place a REG_UNUSED note
12436			 here too unless there already is one.  */
12437		      else if (reg_referenced_p (XEXP (note, 0),
12438						 PATTERN (tem)))
12439			{
12440			  place = tem;
12441
12442			  if (! find_regno_note (tem, REG_UNUSED,
12443						 REGNO (XEXP (note, 0))))
12444			    REG_NOTES (tem)
12445			      = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0),
12446						   REG_NOTES (tem));
12447			}
12448		      else
12449			{
12450			  PUT_REG_NOTE_KIND (note, REG_UNUSED);
12451
12452			  /*  If there isn't already a REG_UNUSED note, put one
12453			      here.  */
12454			  if (! find_regno_note (tem, REG_UNUSED,
12455						 REGNO (XEXP (note, 0))))
12456			    place = tem;
12457			  break;
12458			}
12459		    }
12460		  else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12461			   || (GET_CODE (tem) == CALL_INSN
12462			       && find_reg_fusage (tem, USE, XEXP (note, 0))))
12463		    {
12464		      place = tem;
12465
12466		      /* If we are doing a 3->2 combination, and we have a
12467			 register which formerly died in i3 and was not used
12468			 by i2, which now no longer dies in i3 and is used in
12469			 i2 but does not die in i2, and place is between i2
12470			 and i3, then we may need to move a link from place to
12471			 i2.  */
12472		      if (i2 && INSN_UID (place) <= max_uid_cuid
12473			  && INSN_CUID (place) > INSN_CUID (i2)
12474			  && from_insn
12475			  && INSN_CUID (from_insn) > INSN_CUID (i2)
12476			  && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12477			{
12478			  rtx links = LOG_LINKS (place);
12479			  LOG_LINKS (place) = 0;
12480			  distribute_links (links);
12481			}
12482		      break;
12483		    }
12484
12485		  if (tem == bb->head)
12486		    break;
12487		}
12488
12489	      /* We haven't found an insn for the death note and it
12490		 is still a REG_DEAD note, but we have hit the beginning
12491		 of the block.  If the existing life info says the reg
12492		 was dead, there's nothing left to do.  Otherwise, we'll
12493		 need to do a global life update after combine.  */
12494	      if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12495		  && REGNO_REG_SET_P (bb->global_live_at_start,
12496				      REGNO (XEXP (note, 0))))
12497		{
12498		  SET_BIT (refresh_blocks, this_basic_block);
12499		  need_refresh = 1;
12500		}
12501	    }
12502
12503	  /* If the register is set or already dead at PLACE, we needn't do
12504	     anything with this note if it is still a REG_DEAD note.
12505	     We can here if it is set at all, not if is it totally replace,
12506	     which is what `dead_or_set_p' checks, so also check for it being
12507	     set partially.  */
12508
12509	  if (place && REG_NOTE_KIND (note) == REG_DEAD)
12510	    {
12511	      unsigned int regno = REGNO (XEXP (note, 0));
12512
12513	      /* Similarly, if the instruction on which we want to place
12514		 the note is a noop, we'll need do a global live update
12515		 after we remove them in delete_noop_moves.  */
12516	      if (noop_move_p (place))
12517		{
12518		  SET_BIT (refresh_blocks, this_basic_block);
12519		  need_refresh = 1;
12520		}
12521
12522	      if (dead_or_set_p (place, XEXP (note, 0))
12523		  || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12524		{
12525		  /* Unless the register previously died in PLACE, clear
12526		     reg_last_death.  [I no longer understand why this is
12527		     being done.] */
12528		  if (reg_last_death[regno] != place)
12529		    reg_last_death[regno] = 0;
12530		  place = 0;
12531		}
12532	      else
12533		reg_last_death[regno] = place;
12534
12535	      /* If this is a death note for a hard reg that is occupying
12536		 multiple registers, ensure that we are still using all
12537		 parts of the object.  If we find a piece of the object
12538		 that is unused, we must arrange for an appropriate REG_DEAD
12539		 note to be added for it.  However, we can't just emit a USE
12540		 and tag the note to it, since the register might actually
12541		 be dead; so we recourse, and the recursive call then finds
12542		 the previous insn that used this register.  */
12543
12544	      if (place && regno < FIRST_PSEUDO_REGISTER
12545		  && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
12546		{
12547		  unsigned int endregno
12548		    = regno + HARD_REGNO_NREGS (regno,
12549						GET_MODE (XEXP (note, 0)));
12550		  int all_used = 1;
12551		  unsigned int i;
12552
12553		  for (i = regno; i < endregno; i++)
12554		    if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12555			 && ! find_regno_fusage (place, USE, i))
12556			|| dead_or_set_regno_p (place, i))
12557		      all_used = 0;
12558
12559		  if (! all_used)
12560		    {
12561		      /* Put only REG_DEAD notes for pieces that are
12562			 not already dead or set.  */
12563
12564		      for (i = regno; i < endregno;
12565			   i += HARD_REGNO_NREGS (i, reg_raw_mode[i]))
12566			{
12567			  rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
12568			  basic_block bb = BASIC_BLOCK (this_basic_block);
12569
12570			  if (! dead_or_set_p (place, piece)
12571			      && ! reg_bitfield_target_p (piece,
12572							  PATTERN (place)))
12573			    {
12574			      rtx new_note
12575				= gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
12576
12577			      distribute_notes (new_note, place, place,
12578						NULL_RTX, NULL_RTX, NULL_RTX);
12579			    }
12580			  else if (! refers_to_regno_p (i, i + 1,
12581							PATTERN (place), 0)
12582				   && ! find_regno_fusage (place, USE, i))
12583			    for (tem = PREV_INSN (place); ;
12584				 tem = PREV_INSN (tem))
12585			      {
12586				if (! INSN_P (tem))
12587				  {
12588				    if (tem == bb->head)
12589				      {
12590					SET_BIT (refresh_blocks,
12591						 this_basic_block);
12592					need_refresh = 1;
12593					break;
12594				      }
12595				    continue;
12596				  }
12597				if (dead_or_set_p (tem, piece)
12598				    || reg_bitfield_target_p (piece,
12599							      PATTERN (tem)))
12600				  {
12601				    REG_NOTES (tem)
12602				      = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
12603							   REG_NOTES (tem));
12604				    break;
12605				  }
12606			      }
12607
12608			}
12609
12610		      place = 0;
12611		    }
12612		}
12613	    }
12614	  break;
12615
12616	default:
12617	  /* Any other notes should not be present at this point in the
12618	     compilation.  */
12619	  abort ();
12620	}
12621
12622      if (place)
12623	{
12624	  XEXP (note, 1) = REG_NOTES (place);
12625	  REG_NOTES (place) = note;
12626	}
12627      else if ((REG_NOTE_KIND (note) == REG_DEAD
12628		|| REG_NOTE_KIND (note) == REG_UNUSED)
12629	       && GET_CODE (XEXP (note, 0)) == REG)
12630	REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
12631
12632      if (place2)
12633	{
12634	  if ((REG_NOTE_KIND (note) == REG_DEAD
12635	       || REG_NOTE_KIND (note) == REG_UNUSED)
12636	      && GET_CODE (XEXP (note, 0)) == REG)
12637	    REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
12638
12639	  REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12640					       REG_NOTE_KIND (note),
12641					       XEXP (note, 0),
12642					       REG_NOTES (place2));
12643	}
12644    }
12645}
12646
12647/* Similarly to above, distribute the LOG_LINKS that used to be present on
12648   I3, I2, and I1 to new locations.  This is also called in one case to
12649   add a link pointing at I3 when I3's destination is changed.  */
12650
12651static void
12652distribute_links (links)
12653     rtx links;
12654{
12655  rtx link, next_link;
12656
12657  for (link = links; link; link = next_link)
12658    {
12659      rtx place = 0;
12660      rtx insn;
12661      rtx set, reg;
12662
12663      next_link = XEXP (link, 1);
12664
12665      /* If the insn that this link points to is a NOTE or isn't a single
12666	 set, ignore it.  In the latter case, it isn't clear what we
12667	 can do other than ignore the link, since we can't tell which
12668	 register it was for.  Such links wouldn't be used by combine
12669	 anyway.
12670
12671	 It is not possible for the destination of the target of the link to
12672	 have been changed by combine.  The only potential of this is if we
12673	 replace I3, I2, and I1 by I3 and I2.  But in that case the
12674	 destination of I2 also remains unchanged.  */
12675
12676      if (GET_CODE (XEXP (link, 0)) == NOTE
12677	  || (set = single_set (XEXP (link, 0))) == 0)
12678	continue;
12679
12680      reg = SET_DEST (set);
12681      while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12682	     || GET_CODE (reg) == SIGN_EXTRACT
12683	     || GET_CODE (reg) == STRICT_LOW_PART)
12684	reg = XEXP (reg, 0);
12685
12686      /* A LOG_LINK is defined as being placed on the first insn that uses
12687	 a register and points to the insn that sets the register.  Start
12688	 searching at the next insn after the target of the link and stop
12689	 when we reach a set of the register or the end of the basic block.
12690
12691	 Note that this correctly handles the link that used to point from
12692	 I3 to I2.  Also note that not much searching is typically done here
12693	 since most links don't point very far away.  */
12694
12695      for (insn = NEXT_INSN (XEXP (link, 0));
12696	   (insn && (this_basic_block == n_basic_blocks - 1
12697		     || BLOCK_HEAD (this_basic_block + 1) != insn));
12698	   insn = NEXT_INSN (insn))
12699	if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
12700	  {
12701	    if (reg_referenced_p (reg, PATTERN (insn)))
12702	      place = insn;
12703	    break;
12704	  }
12705	else if (GET_CODE (insn) == CALL_INSN
12706		 && find_reg_fusage (insn, USE, reg))
12707	  {
12708	    place = insn;
12709	    break;
12710	  }
12711
12712      /* If we found a place to put the link, place it there unless there
12713	 is already a link to the same insn as LINK at that point.  */
12714
12715      if (place)
12716	{
12717	  rtx link2;
12718
12719	  for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
12720	    if (XEXP (link2, 0) == XEXP (link, 0))
12721	      break;
12722
12723	  if (link2 == 0)
12724	    {
12725	      XEXP (link, 1) = LOG_LINKS (place);
12726	      LOG_LINKS (place) = link;
12727
12728	      /* Set added_links_insn to the earliest insn we added a
12729		 link to.  */
12730	      if (added_links_insn == 0
12731		  || INSN_CUID (added_links_insn) > INSN_CUID (place))
12732		added_links_insn = place;
12733	    }
12734	}
12735    }
12736}
12737
12738/* Compute INSN_CUID for INSN, which is an insn made by combine.  */
12739
12740static int
12741insn_cuid (insn)
12742     rtx insn;
12743{
12744  while (insn != 0 && INSN_UID (insn) > max_uid_cuid
12745	 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
12746    insn = NEXT_INSN (insn);
12747
12748  if (INSN_UID (insn) > max_uid_cuid)
12749    abort ();
12750
12751  return INSN_CUID (insn);
12752}
12753
12754void
12755dump_combine_stats (file)
12756     FILE *file;
12757{
12758  fnotice
12759    (file,
12760     ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12761     combine_attempts, combine_merges, combine_extras, combine_successes);
12762}
12763
12764void
12765dump_combine_total_stats (file)
12766     FILE *file;
12767{
12768  fnotice
12769    (file,
12770     "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12771     total_attempts, total_merges, total_extras, total_successes);
12772}
12773