1/* Optimize by combining instructions for GNU compiler.
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23   Portable Optimizer, but redone to work on our list-structured
24   representation for RTL instead of their string representation.
25
26   The LOG_LINKS of each insn identify the most recent assignment
27   to each REG used in the insn.  It is a list of previous insns,
28   each of which contains a SET for a REG that is used in this insn
29   and not used or set in between.  LOG_LINKs never cross basic blocks.
30   They were set up by the preceding pass (lifetime analysis).
31
32   We try to combine each pair of insns joined by a logical link.
33   We also try to combine triples of insns A, B and C when
34   C has a link back to B and B has a link back to A.
35
36   LOG_LINKS does not have links for use of the CC0.  They don't
37   need to, because the insn that sets the CC0 is always immediately
38   before the insn that tests it.  So we always regard a branch
39   insn as having a logical link to the preceding insn.  The same is true
40   for an insn explicitly using CC0.
41
42   We check (with use_crosses_set_p) to avoid combining in such a way
43   as to move a computation to a place where its value would be different.
44
45   Combination is done by mathematically substituting the previous
46   insn(s) values for the regs they set into the expressions in
47   the later insns that refer to these regs.  If the result is a valid insn
48   for our target machine, according to the machine description,
49   we install it, delete the earlier insns, and update the data flow
50   information (LOG_LINKS and REG_NOTES) for what we did.
51
52   There are a few exceptions where the dataflow information created by
53   flow.c aren't completely updated:
54
55   - reg_live_length is not updated
56   - a LOG_LINKS entry that refers to an insn with multiple SETs may be
57     removed because there is no way to know which register it was
58     linking
59
60   To simplify substitution, we combine only when the earlier insn(s)
61   consist of only a single assignment.  To simplify updating afterward,
62   we never combine when a subroutine call appears in the middle.
63
64   Since we do not represent assignments to CC0 explicitly except when that
65   is all an insn does, there is no LOG_LINKS entry in an insn that uses
66   the condition code for the insn that set the condition code.
67   Fortunately, these two insns must be consecutive.
68   Therefore, every JUMP_INSN is taken to have an implicit logical link
69   to the preceding insn.  This is not quite right, since non-jumps can
70   also use the condition code; but in practice such insns would not
71   combine anyway.  */
72
73#include "config.h"
74#include "system.h"
75#include "rtl.h"
76#include "tm_p.h"
77#include "flags.h"
78#include "regs.h"
79#include "hard-reg-set.h"
80#include "basic-block.h"
81#include "insn-config.h"
82#include "function.h"
83/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
84#include "expr.h"
85#include "insn-attr.h"
86#include "recog.h"
87#include "real.h"
88#include "toplev.h"
89
90#ifndef SHIFT_COUNT_TRUNCATED
91#define SHIFT_COUNT_TRUNCATED 0
92#endif
93
94/* It is not safe to use ordinary gen_lowpart in combine.
95   Use gen_lowpart_for_combine instead.  See comments there.  */
96#define gen_lowpart dont_use_gen_lowpart_you_dummy
97
98/* Number of attempts to combine instructions in this function.  */
99
100static int combine_attempts;
101
102/* Number of attempts that got as far as substitution in this function.  */
103
104static int combine_merges;
105
106/* Number of instructions combined with added SETs in this function.  */
107
108static int combine_extras;
109
110/* Number of instructions combined in this function.  */
111
112static int combine_successes;
113
114/* Totals over entire compilation.  */
115
116static int total_attempts, total_merges, total_extras, total_successes;
117
118
119/* Vector mapping INSN_UIDs to cuids.
120   The cuids are like uids but increase monotonically always.
121   Combine always uses cuids so that it can compare them.
122   But actually renumbering the uids, which we used to do,
123   proves to be a bad idea because it makes it hard to compare
124   the dumps produced by earlier passes with those from later passes.  */
125
126static int *uid_cuid;
127static int max_uid_cuid;
128
129/* Get the cuid of an insn.  */
130
131#define INSN_CUID(INSN) \
132(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
133
134/* In case BITS_PER_WORD == HOST_BITS_PER_WIDE_INT, shifting by
135   BITS_PER_WORD would invoke undefined behavior.  Work around it.  */
136
137#define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \
138  (((unsigned HOST_WIDE_INT) (val) << (BITS_PER_WORD - 1)) << 1)
139
140#define nonzero_bits(X, M) \
141  cached_nonzero_bits (X, M, NULL_RTX, VOIDmode, 0)
142
143#define num_sign_bit_copies(X, M) \
144  cached_num_sign_bit_copies (X, M, NULL_RTX, VOIDmode, 0)
145
146/* Maximum register number, which is the size of the tables below.  */
147
148static unsigned int combine_max_regno;
149
150/* Record last point of death of (hard or pseudo) register n.  */
151
152static rtx *reg_last_death;
153
154/* Record last point of modification of (hard or pseudo) register n.  */
155
156static rtx *reg_last_set;
157
158/* Record the cuid of the last insn that invalidated memory
159   (anything that writes memory, and subroutine calls, but not pushes).  */
160
161static int mem_last_set;
162
163/* Record the cuid of the last CALL_INSN
164   so we can tell whether a potential combination crosses any calls.  */
165
166static int last_call_cuid;
167
168/* When `subst' is called, this is the insn that is being modified
169   (by combining in a previous insn).  The PATTERN of this insn
170   is still the old pattern partially modified and it should not be
171   looked at, but this may be used to examine the successors of the insn
172   to judge whether a simplification is valid.  */
173
174static rtx subst_insn;
175
176/* This is an insn that belongs before subst_insn, but is not currently
177   on the insn chain.  */
178
179static rtx subst_prev_insn;
180
181/* This is the lowest CUID that `subst' is currently dealing with.
182   get_last_value will not return a value if the register was set at or
183   after this CUID.  If not for this mechanism, we could get confused if
184   I2 or I1 in try_combine were an insn that used the old value of a register
185   to obtain a new value.  In that case, we might erroneously get the
186   new value of the register when we wanted the old one.  */
187
188static int subst_low_cuid;
189
190/* This contains any hard registers that are used in newpat; reg_dead_at_p
191   must consider all these registers to be always live.  */
192
193static HARD_REG_SET newpat_used_regs;
194
195/* This is an insn to which a LOG_LINKS entry has been added.  If this
196   insn is the earlier than I2 or I3, combine should rescan starting at
197   that location.  */
198
199static rtx added_links_insn;
200
201/* Basic block in which we are performing combines.  */
202static basic_block this_basic_block;
203
204/* A bitmap indicating which blocks had registers go dead at entry.
205   After combine, we'll need to re-do global life analysis with
206   those blocks as starting points.  */
207static sbitmap refresh_blocks;
208static int need_refresh;
209
210/* The next group of arrays allows the recording of the last value assigned
211   to (hard or pseudo) register n.  We use this information to see if an
212   operation being processed is redundant given a prior operation performed
213   on the register.  For example, an `and' with a constant is redundant if
214   all the zero bits are already known to be turned off.
215
216   We use an approach similar to that used by cse, but change it in the
217   following ways:
218
219   (1) We do not want to reinitialize at each label.
220   (2) It is useful, but not critical, to know the actual value assigned
221       to a register.  Often just its form is helpful.
222
223   Therefore, we maintain the following arrays:
224
225   reg_last_set_value		the last value assigned
226   reg_last_set_label		records the value of label_tick when the
227				register was assigned
228   reg_last_set_table_tick	records the value of label_tick when a
229				value using the register is assigned
230   reg_last_set_invalid		set to nonzero when it is not valid
231				to use the value of this register in some
232				register's value
233
234   To understand the usage of these tables, it is important to understand
235   the distinction between the value in reg_last_set_value being valid
236   and the register being validly contained in some other expression in the
237   table.
238
239   Entry I in reg_last_set_value is valid if it is nonzero, and either
240   reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
241
242   Register I may validly appear in any expression returned for the value
243   of another register if reg_n_sets[i] is 1.  It may also appear in the
244   value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
245   reg_last_set_invalid[j] is zero.
246
247   If an expression is found in the table containing a register which may
248   not validly appear in an expression, the register is replaced by
249   something that won't match, (clobber (const_int 0)).
250
251   reg_last_set_invalid[i] is set nonzero when register I is being assigned
252   to and reg_last_set_table_tick[i] == label_tick.  */
253
254/* Record last value assigned to (hard or pseudo) register n.  */
255
256static rtx *reg_last_set_value;
257
258/* Record the value of label_tick when the value for register n is placed in
259   reg_last_set_value[n].  */
260
261static int *reg_last_set_label;
262
263/* Record the value of label_tick when an expression involving register n
264   is placed in reg_last_set_value.  */
265
266static int *reg_last_set_table_tick;
267
268/* Set nonzero if references to register n in expressions should not be
269   used.  */
270
271static char *reg_last_set_invalid;
272
273/* Incremented for each label.  */
274
275static int label_tick;
276
277/* Some registers that are set more than once and used in more than one
278   basic block are nevertheless always set in similar ways.  For example,
279   a QImode register may be loaded from memory in two places on a machine
280   where byte loads zero extend.
281
282   We record in the following array what we know about the nonzero
283   bits of a register, specifically which bits are known to be zero.
284
285   If an entry is zero, it means that we don't know anything special.  */
286
287static unsigned HOST_WIDE_INT *reg_nonzero_bits;
288
289/* Mode used to compute significance in reg_nonzero_bits.  It is the largest
290   integer mode that can fit in HOST_BITS_PER_WIDE_INT.  */
291
292static enum machine_mode nonzero_bits_mode;
293
294/* Nonzero if we know that a register has some leading bits that are always
295   equal to the sign bit.  */
296
297static unsigned char *reg_sign_bit_copies;
298
299/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
300   It is zero while computing them and after combine has completed.  This
301   former test prevents propagating values based on previously set values,
302   which can be incorrect if a variable is modified in a loop.  */
303
304static int nonzero_sign_valid;
305
306/* These arrays are maintained in parallel with reg_last_set_value
307   and are used to store the mode in which the register was last set,
308   the bits that were known to be zero when it was last set, and the
309   number of sign bits copies it was known to have when it was last set.  */
310
311static enum machine_mode *reg_last_set_mode;
312static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
313static char *reg_last_set_sign_bit_copies;
314
315/* Record one modification to rtl structure
316   to be undone by storing old_contents into *where.
317   is_int is 1 if the contents are an int.  */
318
319struct undo
320{
321  struct undo *next;
322  int is_int;
323  union {rtx r; int i;} old_contents;
324  union {rtx *r; int *i;} where;
325};
326
327/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
328   num_undo says how many are currently recorded.
329
330   other_insn is nonzero if we have modified some other insn in the process
331   of working on subst_insn.  It must be verified too.  */
332
333struct undobuf
334{
335  struct undo *undos;
336  struct undo *frees;
337  rtx other_insn;
338};
339
340static struct undobuf undobuf;
341
342/* Number of times the pseudo being substituted for
343   was found and replaced.  */
344
345static int n_occurrences;
346
347static void do_SUBST			PARAMS ((rtx *, rtx));
348static void do_SUBST_INT		PARAMS ((int *, int));
349static void init_reg_last_arrays	PARAMS ((void));
350static void setup_incoming_promotions   PARAMS ((void));
351static void set_nonzero_bits_and_sign_copies  PARAMS ((rtx, rtx, void *));
352static int cant_combine_insn_p	PARAMS ((rtx));
353static int can_combine_p	PARAMS ((rtx, rtx, rtx, rtx, rtx *, rtx *));
354static int sets_function_arg_p	PARAMS ((rtx));
355static int combinable_i3pat	PARAMS ((rtx, rtx *, rtx, rtx, int, rtx *));
356static int contains_muldiv	PARAMS ((rtx));
357static rtx try_combine		PARAMS ((rtx, rtx, rtx, int *));
358static void undo_all		PARAMS ((void));
359static void undo_commit		PARAMS ((void));
360static rtx *find_split_point	PARAMS ((rtx *, rtx));
361static rtx subst		PARAMS ((rtx, rtx, rtx, int, int));
362static rtx combine_simplify_rtx	PARAMS ((rtx, enum machine_mode, int, int));
363static rtx simplify_if_then_else  PARAMS ((rtx));
364static rtx simplify_set		PARAMS ((rtx));
365static rtx simplify_logical	PARAMS ((rtx, int));
366static rtx expand_compound_operation  PARAMS ((rtx));
367static rtx expand_field_assignment  PARAMS ((rtx));
368static rtx make_extraction	PARAMS ((enum machine_mode, rtx, HOST_WIDE_INT,
369					 rtx, unsigned HOST_WIDE_INT, int,
370					 int, int));
371static rtx extract_left_shift	PARAMS ((rtx, int));
372static rtx make_compound_operation  PARAMS ((rtx, enum rtx_code));
373static int get_pos_from_mask	PARAMS ((unsigned HOST_WIDE_INT,
374					 unsigned HOST_WIDE_INT *));
375static rtx force_to_mode	PARAMS ((rtx, enum machine_mode,
376					 unsigned HOST_WIDE_INT, rtx, int));
377static rtx if_then_else_cond	PARAMS ((rtx, rtx *, rtx *));
378static rtx known_cond		PARAMS ((rtx, enum rtx_code, rtx, rtx));
379static int rtx_equal_for_field_assignment_p PARAMS ((rtx, rtx));
380static rtx make_field_assignment  PARAMS ((rtx));
381static rtx apply_distributive_law  PARAMS ((rtx));
382static rtx simplify_and_const_int  PARAMS ((rtx, enum machine_mode, rtx,
383					    unsigned HOST_WIDE_INT));
384static unsigned HOST_WIDE_INT cached_nonzero_bits
385				PARAMS ((rtx, enum machine_mode, rtx,
386					 enum machine_mode,
387					 unsigned HOST_WIDE_INT));
388static unsigned HOST_WIDE_INT nonzero_bits1
389				PARAMS ((rtx, enum machine_mode, rtx,
390					 enum machine_mode,
391					 unsigned HOST_WIDE_INT));
392static unsigned int cached_num_sign_bit_copies
393				PARAMS ((rtx, enum machine_mode, rtx,
394					 enum machine_mode, unsigned int));
395static unsigned int num_sign_bit_copies1
396				PARAMS ((rtx, enum machine_mode, rtx,
397					 enum machine_mode, unsigned int));
398static int merge_outer_ops	PARAMS ((enum rtx_code *, HOST_WIDE_INT *,
399					 enum rtx_code, HOST_WIDE_INT,
400					 enum machine_mode, int *));
401static rtx simplify_shift_const	PARAMS ((rtx, enum rtx_code, enum machine_mode,
402					 rtx, int));
403static int recog_for_combine	PARAMS ((rtx *, rtx, rtx *));
404static rtx gen_lowpart_for_combine  PARAMS ((enum machine_mode, rtx));
405static rtx gen_binary		PARAMS ((enum rtx_code, enum machine_mode,
406					 rtx, rtx));
407static enum rtx_code simplify_comparison  PARAMS ((enum rtx_code, rtx *, rtx *));
408static void update_table_tick	PARAMS ((rtx));
409static void record_value_for_reg  PARAMS ((rtx, rtx, rtx));
410static void check_promoted_subreg PARAMS ((rtx, rtx));
411static void record_dead_and_set_regs_1  PARAMS ((rtx, rtx, void *));
412static void record_dead_and_set_regs  PARAMS ((rtx));
413static int get_last_value_validate  PARAMS ((rtx *, rtx, int, int));
414static rtx get_last_value	PARAMS ((rtx));
415static int use_crosses_set_p	PARAMS ((rtx, int));
416static void reg_dead_at_p_1	PARAMS ((rtx, rtx, void *));
417static int reg_dead_at_p	PARAMS ((rtx, rtx));
418static void move_deaths		PARAMS ((rtx, rtx, int, rtx, rtx *));
419static int reg_bitfield_target_p  PARAMS ((rtx, rtx));
420static void distribute_notes	PARAMS ((rtx, rtx, rtx, rtx));
421static void distribute_links	PARAMS ((rtx));
422static void mark_used_regs_combine PARAMS ((rtx));
423static int insn_cuid		PARAMS ((rtx));
424static void record_promoted_value PARAMS ((rtx, rtx));
425static rtx reversed_comparison  PARAMS ((rtx, enum machine_mode, rtx, rtx));
426static enum rtx_code combine_reversed_comparison_code PARAMS ((rtx));
427static void adjust_for_new_dest PARAMS ((rtx));
428
429/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
430   insn.  The substitution can be undone by undo_all.  If INTO is already
431   set to NEWVAL, do not record this change.  Because computing NEWVAL might
432   also call SUBST, we have to compute it before we put anything into
433   the undo table.  */
434
435static void
436do_SUBST (into, newval)
437     rtx *into, newval;
438{
439  struct undo *buf;
440  rtx oldval = *into;
441
442  if (oldval == newval)
443    return;
444
445  /* We'd like to catch as many invalid transformations here as
446     possible.  Unfortunately, there are way too many mode changes
447     that are perfectly valid, so we'd waste too much effort for
448     little gain doing the checks here.  Focus on catching invalid
449     transformations involving integer constants.  */
450  if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
451      && GET_CODE (newval) == CONST_INT)
452    {
453      /* Sanity check that we're replacing oldval with a CONST_INT
454	 that is a valid sign-extension for the original mode.  */
455      if (INTVAL (newval) != trunc_int_for_mode (INTVAL (newval),
456						 GET_MODE (oldval)))
457	abort ();
458
459      /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
460	 CONST_INT is not valid, because after the replacement, the
461	 original mode would be gone.  Unfortunately, we can't tell
462	 when do_SUBST is called to replace the operand thereof, so we
463	 perform this test on oldval instead, checking whether an
464	 invalid replacement took place before we got here.  */
465      if ((GET_CODE (oldval) == SUBREG
466	   && GET_CODE (SUBREG_REG (oldval)) == CONST_INT)
467	  || (GET_CODE (oldval) == ZERO_EXTEND
468	      && GET_CODE (XEXP (oldval, 0)) == CONST_INT))
469	abort ();
470     }
471
472  if (undobuf.frees)
473    buf = undobuf.frees, undobuf.frees = buf->next;
474  else
475    buf = (struct undo *) xmalloc (sizeof (struct undo));
476
477  buf->is_int = 0;
478  buf->where.r = into;
479  buf->old_contents.r = oldval;
480  *into = newval;
481
482  buf->next = undobuf.undos, undobuf.undos = buf;
483}
484
485#define SUBST(INTO, NEWVAL)	do_SUBST(&(INTO), (NEWVAL))
486
487/* Similar to SUBST, but NEWVAL is an int expression.  Note that substitution
488   for the value of a HOST_WIDE_INT value (including CONST_INT) is
489   not safe.  */
490
491static void
492do_SUBST_INT (into, newval)
493     int *into, newval;
494{
495  struct undo *buf;
496  int oldval = *into;
497
498  if (oldval == newval)
499    return;
500
501  if (undobuf.frees)
502    buf = undobuf.frees, undobuf.frees = buf->next;
503  else
504    buf = (struct undo *) xmalloc (sizeof (struct undo));
505
506  buf->is_int = 1;
507  buf->where.i = into;
508  buf->old_contents.i = oldval;
509  *into = newval;
510
511  buf->next = undobuf.undos, undobuf.undos = buf;
512}
513
514#define SUBST_INT(INTO, NEWVAL)  do_SUBST_INT(&(INTO), (NEWVAL))
515
516/* Main entry point for combiner.  F is the first insn of the function.
517   NREGS is the first unused pseudo-reg number.
518
519   Return nonzero if the combiner has turned an indirect jump
520   instruction into a direct jump.  */
521int
522combine_instructions (f, nregs)
523     rtx f;
524     unsigned int nregs;
525{
526  rtx insn, next;
527#ifdef HAVE_cc0
528  rtx prev;
529#endif
530  int i;
531  rtx links, nextlinks;
532
533  int new_direct_jump_p = 0;
534
535  combine_attempts = 0;
536  combine_merges = 0;
537  combine_extras = 0;
538  combine_successes = 0;
539
540  combine_max_regno = nregs;
541
542  reg_nonzero_bits = ((unsigned HOST_WIDE_INT *)
543		      xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT)));
544  reg_sign_bit_copies
545    = (unsigned char *) xcalloc (nregs, sizeof (unsigned char));
546
547  reg_last_death = (rtx *) xmalloc (nregs * sizeof (rtx));
548  reg_last_set = (rtx *) xmalloc (nregs * sizeof (rtx));
549  reg_last_set_value = (rtx *) xmalloc (nregs * sizeof (rtx));
550  reg_last_set_table_tick = (int *) xmalloc (nregs * sizeof (int));
551  reg_last_set_label = (int *) xmalloc (nregs * sizeof (int));
552  reg_last_set_invalid = (char *) xmalloc (nregs * sizeof (char));
553  reg_last_set_mode
554    = (enum machine_mode *) xmalloc (nregs * sizeof (enum machine_mode));
555  reg_last_set_nonzero_bits
556    = (unsigned HOST_WIDE_INT *) xmalloc (nregs * sizeof (HOST_WIDE_INT));
557  reg_last_set_sign_bit_copies
558    = (char *) xmalloc (nregs * sizeof (char));
559
560  init_reg_last_arrays ();
561
562  init_recog_no_volatile ();
563
564  /* Compute maximum uid value so uid_cuid can be allocated.  */
565
566  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
567    if (INSN_UID (insn) > i)
568      i = INSN_UID (insn);
569
570  uid_cuid = (int *) xmalloc ((i + 1) * sizeof (int));
571  max_uid_cuid = i;
572
573  nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
574
575  /* Don't use reg_nonzero_bits when computing it.  This can cause problems
576     when, for example, we have j <<= 1 in a loop.  */
577
578  nonzero_sign_valid = 0;
579
580  /* Compute the mapping from uids to cuids.
581     Cuids are numbers assigned to insns, like uids,
582     except that cuids increase monotonically through the code.
583
584     Scan all SETs and see if we can deduce anything about what
585     bits are known to be zero for some registers and how many copies
586     of the sign bit are known to exist for those registers.
587
588     Also set any known values so that we can use it while searching
589     for what bits are known to be set.  */
590
591  label_tick = 1;
592
593  /* We need to initialize it here, because record_dead_and_set_regs may call
594     get_last_value.  */
595  subst_prev_insn = NULL_RTX;
596
597  setup_incoming_promotions ();
598
599  refresh_blocks = sbitmap_alloc (last_basic_block);
600  sbitmap_zero (refresh_blocks);
601  need_refresh = 0;
602
603  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
604    {
605      uid_cuid[INSN_UID (insn)] = ++i;
606      subst_low_cuid = i;
607      subst_insn = insn;
608
609      if (INSN_P (insn))
610	{
611	  note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
612		       NULL);
613	  record_dead_and_set_regs (insn);
614
615#ifdef AUTO_INC_DEC
616	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
617	    if (REG_NOTE_KIND (links) == REG_INC)
618	      set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
619						NULL);
620#endif
621	}
622
623      if (GET_CODE (insn) == CODE_LABEL)
624	label_tick++;
625    }
626
627  nonzero_sign_valid = 1;
628
629  /* Now scan all the insns in forward order.  */
630
631  label_tick = 1;
632  last_call_cuid = 0;
633  mem_last_set = 0;
634  init_reg_last_arrays ();
635  setup_incoming_promotions ();
636
637  FOR_EACH_BB (this_basic_block)
638    {
639      for (insn = this_basic_block->head;
640           insn != NEXT_INSN (this_basic_block->end);
641	   insn = next ? next : NEXT_INSN (insn))
642	{
643	  next = 0;
644
645	  if (GET_CODE (insn) == CODE_LABEL)
646	    label_tick++;
647
648	  else if (INSN_P (insn))
649	    {
650	      /* See if we know about function return values before this
651		 insn based upon SUBREG flags.  */
652	      check_promoted_subreg (insn, PATTERN (insn));
653
654	      /* Try this insn with each insn it links back to.  */
655
656	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
657		if ((next = try_combine (insn, XEXP (links, 0),
658					 NULL_RTX, &new_direct_jump_p)) != 0)
659		  goto retry;
660
661	      /* Try each sequence of three linked insns ending with this one.  */
662
663	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
664		{
665		  rtx link = XEXP (links, 0);
666
667		  /* If the linked insn has been replaced by a note, then there
668		     is no point in pursuing this chain any further.  */
669		  if (GET_CODE (link) == NOTE)
670		    continue;
671
672		  for (nextlinks = LOG_LINKS (link);
673		       nextlinks;
674		       nextlinks = XEXP (nextlinks, 1))
675		    if ((next = try_combine (insn, link,
676					     XEXP (nextlinks, 0),
677					     &new_direct_jump_p)) != 0)
678		      goto retry;
679		}
680
681#ifdef HAVE_cc0
682	      /* Try to combine a jump insn that uses CC0
683		 with a preceding insn that sets CC0, and maybe with its
684		 logical predecessor as well.
685		 This is how we make decrement-and-branch insns.
686		 We need this special code because data flow connections
687		 via CC0 do not get entered in LOG_LINKS.  */
688
689	      if (GET_CODE (insn) == JUMP_INSN
690		  && (prev = prev_nonnote_insn (insn)) != 0
691		  && GET_CODE (prev) == INSN
692		  && sets_cc0_p (PATTERN (prev)))
693		{
694		  if ((next = try_combine (insn, prev,
695					   NULL_RTX, &new_direct_jump_p)) != 0)
696		    goto retry;
697
698		  for (nextlinks = LOG_LINKS (prev); nextlinks;
699		       nextlinks = XEXP (nextlinks, 1))
700		    if ((next = try_combine (insn, prev,
701					     XEXP (nextlinks, 0),
702					     &new_direct_jump_p)) != 0)
703		      goto retry;
704		}
705
706	      /* Do the same for an insn that explicitly references CC0.  */
707	      if (GET_CODE (insn) == INSN
708		  && (prev = prev_nonnote_insn (insn)) != 0
709		  && GET_CODE (prev) == INSN
710		  && sets_cc0_p (PATTERN (prev))
711		  && GET_CODE (PATTERN (insn)) == SET
712		  && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
713		{
714		  if ((next = try_combine (insn, prev,
715					   NULL_RTX, &new_direct_jump_p)) != 0)
716		    goto retry;
717
718		  for (nextlinks = LOG_LINKS (prev); nextlinks;
719		       nextlinks = XEXP (nextlinks, 1))
720		    if ((next = try_combine (insn, prev,
721					     XEXP (nextlinks, 0),
722					     &new_direct_jump_p)) != 0)
723		      goto retry;
724		}
725
726	      /* Finally, see if any of the insns that this insn links to
727		 explicitly references CC0.  If so, try this insn, that insn,
728		 and its predecessor if it sets CC0.  */
729	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
730		if (GET_CODE (XEXP (links, 0)) == INSN
731		    && GET_CODE (PATTERN (XEXP (links, 0))) == SET
732		    && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
733		    && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
734		    && GET_CODE (prev) == INSN
735		    && sets_cc0_p (PATTERN (prev))
736		    && (next = try_combine (insn, XEXP (links, 0),
737					    prev, &new_direct_jump_p)) != 0)
738		  goto retry;
739#endif
740
741	      /* Try combining an insn with two different insns whose results it
742		 uses.  */
743	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
744		for (nextlinks = XEXP (links, 1); nextlinks;
745		     nextlinks = XEXP (nextlinks, 1))
746		  if ((next = try_combine (insn, XEXP (links, 0),
747					   XEXP (nextlinks, 0),
748					   &new_direct_jump_p)) != 0)
749		    goto retry;
750
751	      if (GET_CODE (insn) != NOTE)
752		record_dead_and_set_regs (insn);
753
754	    retry:
755	      ;
756	    }
757	}
758    }
759  clear_bb_flags ();
760
761  EXECUTE_IF_SET_IN_SBITMAP (refresh_blocks, 0, i,
762			     BASIC_BLOCK (i)->flags |= BB_DIRTY);
763  new_direct_jump_p |= purge_all_dead_edges (0);
764  delete_noop_moves (f);
765
766  update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
767				    PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
768				    | PROP_KILL_DEAD_CODE);
769
770  /* Clean up.  */
771  sbitmap_free (refresh_blocks);
772  free (reg_nonzero_bits);
773  free (reg_sign_bit_copies);
774  free (reg_last_death);
775  free (reg_last_set);
776  free (reg_last_set_value);
777  free (reg_last_set_table_tick);
778  free (reg_last_set_label);
779  free (reg_last_set_invalid);
780  free (reg_last_set_mode);
781  free (reg_last_set_nonzero_bits);
782  free (reg_last_set_sign_bit_copies);
783  free (uid_cuid);
784
785  {
786    struct undo *undo, *next;
787    for (undo = undobuf.frees; undo; undo = next)
788      {
789	next = undo->next;
790	free (undo);
791      }
792    undobuf.frees = 0;
793  }
794
795  total_attempts += combine_attempts;
796  total_merges += combine_merges;
797  total_extras += combine_extras;
798  total_successes += combine_successes;
799
800  nonzero_sign_valid = 0;
801
802  /* Make recognizer allow volatile MEMs again.  */
803  init_recog ();
804
805  return new_direct_jump_p;
806}
807
808/* Wipe the reg_last_xxx arrays in preparation for another pass.  */
809
810static void
811init_reg_last_arrays ()
812{
813  unsigned int nregs = combine_max_regno;
814
815  memset ((char *) reg_last_death, 0, nregs * sizeof (rtx));
816  memset ((char *) reg_last_set, 0, nregs * sizeof (rtx));
817  memset ((char *) reg_last_set_value, 0, nregs * sizeof (rtx));
818  memset ((char *) reg_last_set_table_tick, 0, nregs * sizeof (int));
819  memset ((char *) reg_last_set_label, 0, nregs * sizeof (int));
820  memset (reg_last_set_invalid, 0, nregs * sizeof (char));
821  memset ((char *) reg_last_set_mode, 0, nregs * sizeof (enum machine_mode));
822  memset ((char *) reg_last_set_nonzero_bits, 0, nregs * sizeof (HOST_WIDE_INT));
823  memset (reg_last_set_sign_bit_copies, 0, nregs * sizeof (char));
824}
825
826/* Set up any promoted values for incoming argument registers.  */
827
828static void
829setup_incoming_promotions ()
830{
831#ifdef PROMOTE_FUNCTION_ARGS
832  unsigned int regno;
833  rtx reg;
834  enum machine_mode mode;
835  int unsignedp;
836  rtx first = get_insns ();
837
838#ifndef OUTGOING_REGNO
839#define OUTGOING_REGNO(N) N
840#endif
841  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
842    /* Check whether this register can hold an incoming pointer
843       argument.  FUNCTION_ARG_REGNO_P tests outgoing register
844       numbers, so translate if necessary due to register windows.  */
845    if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
846	&& (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
847      {
848	record_value_for_reg
849	  (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
850				       : SIGN_EXTEND),
851				      GET_MODE (reg),
852				      gen_rtx_CLOBBER (mode, const0_rtx)));
853      }
854#endif
855}
856
857/* Called via note_stores.  If X is a pseudo that is narrower than
858   HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
859
860   If we are setting only a portion of X and we can't figure out what
861   portion, assume all bits will be used since we don't know what will
862   be happening.
863
864   Similarly, set how many bits of X are known to be copies of the sign bit
865   at all locations in the function.  This is the smallest number implied
866   by any set of X.  */
867
868static void
869set_nonzero_bits_and_sign_copies (x, set, data)
870     rtx x;
871     rtx set;
872     void *data ATTRIBUTE_UNUSED;
873{
874  unsigned int num;
875
876  if (GET_CODE (x) == REG
877      && REGNO (x) >= FIRST_PSEUDO_REGISTER
878      /* If this register is undefined at the start of the file, we can't
879	 say what its contents were.  */
880      && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, REGNO (x))
881      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
882    {
883      if (set == 0 || GET_CODE (set) == CLOBBER)
884	{
885	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
886	  reg_sign_bit_copies[REGNO (x)] = 1;
887	  return;
888	}
889
890      /* If this is a complex assignment, see if we can convert it into a
891	 simple assignment.  */
892      set = expand_field_assignment (set);
893
894      /* If this is a simple assignment, or we have a paradoxical SUBREG,
895	 set what we know about X.  */
896
897      if (SET_DEST (set) == x
898	  || (GET_CODE (SET_DEST (set)) == SUBREG
899	      && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
900		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
901	      && SUBREG_REG (SET_DEST (set)) == x))
902	{
903	  rtx src = SET_SRC (set);
904
905#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
906	  /* If X is narrower than a word and SRC is a non-negative
907	     constant that would appear negative in the mode of X,
908	     sign-extend it for use in reg_nonzero_bits because some
909	     machines (maybe most) will actually do the sign-extension
910	     and this is the conservative approach.
911
912	     ??? For 2.5, try to tighten up the MD files in this regard
913	     instead of this kludge.  */
914
915	  if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
916	      && GET_CODE (src) == CONST_INT
917	      && INTVAL (src) > 0
918	      && 0 != (INTVAL (src)
919		       & ((HOST_WIDE_INT) 1
920			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
921	    src = GEN_INT (INTVAL (src)
922			   | ((HOST_WIDE_INT) (-1)
923			      << GET_MODE_BITSIZE (GET_MODE (x))));
924#endif
925
926	  /* Don't call nonzero_bits if it cannot change anything.  */
927	  if (reg_nonzero_bits[REGNO (x)] != ~(unsigned HOST_WIDE_INT) 0)
928	    reg_nonzero_bits[REGNO (x)]
929	      |= nonzero_bits (src, nonzero_bits_mode);
930	  num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
931	  if (reg_sign_bit_copies[REGNO (x)] == 0
932	      || reg_sign_bit_copies[REGNO (x)] > num)
933	    reg_sign_bit_copies[REGNO (x)] = num;
934	}
935      else
936	{
937	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
938	  reg_sign_bit_copies[REGNO (x)] = 1;
939	}
940    }
941}
942
943/* See if INSN can be combined into I3.  PRED and SUCC are optionally
944   insns that were previously combined into I3 or that will be combined
945   into the merger of INSN and I3.
946
947   Return 0 if the combination is not allowed for any reason.
948
949   If the combination is allowed, *PDEST will be set to the single
950   destination of INSN and *PSRC to the single source, and this function
951   will return 1.  */
952
953static int
954can_combine_p (insn, i3, pred, succ, pdest, psrc)
955     rtx insn;
956     rtx i3;
957     rtx pred ATTRIBUTE_UNUSED;
958     rtx succ;
959     rtx *pdest, *psrc;
960{
961  int i;
962  rtx set = 0, src, dest;
963  rtx p;
964#ifdef AUTO_INC_DEC
965  rtx link;
966#endif
967  int all_adjacent = (succ ? (next_active_insn (insn) == succ
968			      && next_active_insn (succ) == i3)
969		      : next_active_insn (insn) == i3);
970
971  /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
972     or a PARALLEL consisting of such a SET and CLOBBERs.
973
974     If INSN has CLOBBER parallel parts, ignore them for our processing.
975     By definition, these happen during the execution of the insn.  When it
976     is merged with another insn, all bets are off.  If they are, in fact,
977     needed and aren't also supplied in I3, they may be added by
978     recog_for_combine.  Otherwise, it won't match.
979
980     We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
981     note.
982
983     Get the source and destination of INSN.  If more than one, can't
984     combine.  */
985
986  if (GET_CODE (PATTERN (insn)) == SET)
987    set = PATTERN (insn);
988  else if (GET_CODE (PATTERN (insn)) == PARALLEL
989	   && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
990    {
991      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
992	{
993	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
994
995	  switch (GET_CODE (elt))
996	    {
997	    /* This is important to combine floating point insns
998	       for the SH4 port.  */
999	    case USE:
1000	      /* Combining an isolated USE doesn't make sense.
1001		 We depend here on combinable_i3pat to reject them.  */
1002	      /* The code below this loop only verifies that the inputs of
1003		 the SET in INSN do not change.  We call reg_set_between_p
1004		 to verify that the REG in the USE does not change between
1005		 I3 and INSN.
1006		 If the USE in INSN was for a pseudo register, the matching
1007		 insn pattern will likely match any register; combining this
1008		 with any other USE would only be safe if we knew that the
1009		 used registers have identical values, or if there was
1010		 something to tell them apart, e.g. different modes.  For
1011		 now, we forgo such complicated tests and simply disallow
1012		 combining of USES of pseudo registers with any other USE.  */
1013	      if (GET_CODE (XEXP (elt, 0)) == REG
1014		  && GET_CODE (PATTERN (i3)) == PARALLEL)
1015		{
1016		  rtx i3pat = PATTERN (i3);
1017		  int i = XVECLEN (i3pat, 0) - 1;
1018		  unsigned int regno = REGNO (XEXP (elt, 0));
1019
1020		  do
1021		    {
1022		      rtx i3elt = XVECEXP (i3pat, 0, i);
1023
1024		      if (GET_CODE (i3elt) == USE
1025			  && GET_CODE (XEXP (i3elt, 0)) == REG
1026			  && (REGNO (XEXP (i3elt, 0)) == regno
1027			      ? reg_set_between_p (XEXP (elt, 0),
1028						   PREV_INSN (insn), i3)
1029			      : regno >= FIRST_PSEUDO_REGISTER))
1030			return 0;
1031		    }
1032		  while (--i >= 0);
1033		}
1034	      break;
1035
1036	      /* We can ignore CLOBBERs.  */
1037	    case CLOBBER:
1038	      break;
1039
1040	    case SET:
1041	      /* Ignore SETs whose result isn't used but not those that
1042		 have side-effects.  */
1043	      if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1044		  && ! side_effects_p (elt))
1045		break;
1046
1047	      /* If we have already found a SET, this is a second one and
1048		 so we cannot combine with this insn.  */
1049	      if (set)
1050		return 0;
1051
1052	      set = elt;
1053	      break;
1054
1055	    default:
1056	      /* Anything else means we can't combine.  */
1057	      return 0;
1058	    }
1059	}
1060
1061      if (set == 0
1062	  /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1063	     so don't do anything with it.  */
1064	  || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1065	return 0;
1066    }
1067  else
1068    return 0;
1069
1070  if (set == 0)
1071    return 0;
1072
1073  set = expand_field_assignment (set);
1074  src = SET_SRC (set), dest = SET_DEST (set);
1075
1076  /* Don't eliminate a store in the stack pointer.  */
1077  if (dest == stack_pointer_rtx
1078      /* If we couldn't eliminate a field assignment, we can't combine.  */
1079      || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
1080      /* Don't combine with an insn that sets a register to itself if it has
1081	 a REG_EQUAL note.  This may be part of a REG_NO_CONFLICT sequence.  */
1082      || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1083      /* Can't merge an ASM_OPERANDS.  */
1084      || GET_CODE (src) == ASM_OPERANDS
1085      /* Can't merge a function call.  */
1086      || GET_CODE (src) == CALL
1087      /* Don't eliminate a function call argument.  */
1088      || (GET_CODE (i3) == CALL_INSN
1089	  && (find_reg_fusage (i3, USE, dest)
1090	      || (GET_CODE (dest) == REG
1091		  && REGNO (dest) < FIRST_PSEUDO_REGISTER
1092		  && global_regs[REGNO (dest)])))
1093      /* Don't substitute into an incremented register.  */
1094      || FIND_REG_INC_NOTE (i3, dest)
1095      || (succ && FIND_REG_INC_NOTE (succ, dest))
1096#if 0
1097      /* Don't combine the end of a libcall into anything.  */
1098      /* ??? This gives worse code, and appears to be unnecessary, since no
1099	 pass after flow uses REG_LIBCALL/REG_RETVAL notes.  Local-alloc does
1100	 use REG_RETVAL notes for noconflict blocks, but other code here
1101	 makes sure that those insns don't disappear.  */
1102      || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1103#endif
1104      /* Make sure that DEST is not used after SUCC but before I3.  */
1105      || (succ && ! all_adjacent
1106	  && reg_used_between_p (dest, succ, i3))
1107      /* Make sure that the value that is to be substituted for the register
1108	 does not use any registers whose values alter in between.  However,
1109	 If the insns are adjacent, a use can't cross a set even though we
1110	 think it might (this can happen for a sequence of insns each setting
1111	 the same destination; reg_last_set of that register might point to
1112	 a NOTE).  If INSN has a REG_EQUIV note, the register is always
1113	 equivalent to the memory so the substitution is valid even if there
1114	 are intervening stores.  Also, don't move a volatile asm or
1115	 UNSPEC_VOLATILE across any other insns.  */
1116      || (! all_adjacent
1117	  && (((GET_CODE (src) != MEM
1118		|| ! find_reg_note (insn, REG_EQUIV, src))
1119	       && use_crosses_set_p (src, INSN_CUID (insn)))
1120	      || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1121	      || GET_CODE (src) == UNSPEC_VOLATILE))
1122      /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1123	 better register allocation by not doing the combine.  */
1124      || find_reg_note (i3, REG_NO_CONFLICT, dest)
1125      || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1126      /* Don't combine across a CALL_INSN, because that would possibly
1127	 change whether the life span of some REGs crosses calls or not,
1128	 and it is a pain to update that information.
1129	 Exception: if source is a constant, moving it later can't hurt.
1130	 Accept that special case, because it helps -fforce-addr a lot.  */
1131      || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1132    return 0;
1133
1134  /* DEST must either be a REG or CC0.  */
1135  if (GET_CODE (dest) == REG)
1136    {
1137      /* If register alignment is being enforced for multi-word items in all
1138	 cases except for parameters, it is possible to have a register copy
1139	 insn referencing a hard register that is not allowed to contain the
1140	 mode being copied and which would not be valid as an operand of most
1141	 insns.  Eliminate this problem by not combining with such an insn.
1142
1143	 Also, on some machines we don't want to extend the life of a hard
1144	 register.  */
1145
1146      if (GET_CODE (src) == REG
1147	  && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1148	       && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1149	      /* Don't extend the life of a hard register unless it is
1150		 user variable (if we have few registers) or it can't
1151		 fit into the desired register (meaning something special
1152		 is going on).
1153		 Also avoid substituting a return register into I3, because
1154		 reload can't handle a conflict with constraints of other
1155		 inputs.  */
1156	      || (REGNO (src) < FIRST_PSEUDO_REGISTER
1157		  && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1158	return 0;
1159    }
1160  else if (GET_CODE (dest) != CC0)
1161    return 0;
1162
1163  /* Don't substitute for a register intended as a clobberable operand.
1164     Similarly, don't substitute an expression containing a register that
1165     will be clobbered in I3.  */
1166  if (GET_CODE (PATTERN (i3)) == PARALLEL)
1167    for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1168      if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
1169	  && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1170				       src)
1171	      || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
1172	return 0;
1173
1174  /* If INSN contains anything volatile, or is an `asm' (whether volatile
1175     or not), reject, unless nothing volatile comes between it and I3 */
1176
1177  if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1178    {
1179      /* Make sure succ doesn't contain a volatile reference.  */
1180      if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1181        return 0;
1182
1183      for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1184        if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1185	  return 0;
1186    }
1187
1188  /* If INSN is an asm, and DEST is a hard register, reject, since it has
1189     to be an explicit register variable, and was chosen for a reason.  */
1190
1191  if (GET_CODE (src) == ASM_OPERANDS
1192      && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1193    return 0;
1194
1195  /* If there are any volatile insns between INSN and I3, reject, because
1196     they might affect machine state.  */
1197
1198  for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1199    if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1200      return 0;
1201
1202  /* If INSN or I2 contains an autoincrement or autodecrement,
1203     make sure that register is not used between there and I3,
1204     and not already used in I3 either.
1205     Also insist that I3 not be a jump; if it were one
1206     and the incremented register were spilled, we would lose.  */
1207
1208#ifdef AUTO_INC_DEC
1209  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1210    if (REG_NOTE_KIND (link) == REG_INC
1211	&& (GET_CODE (i3) == JUMP_INSN
1212	    || reg_used_between_p (XEXP (link, 0), insn, i3)
1213	    || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1214      return 0;
1215#endif
1216
1217#ifdef HAVE_cc0
1218  /* Don't combine an insn that follows a CC0-setting insn.
1219     An insn that uses CC0 must not be separated from the one that sets it.
1220     We do, however, allow I2 to follow a CC0-setting insn if that insn
1221     is passed as I1; in that case it will be deleted also.
1222     We also allow combining in this case if all the insns are adjacent
1223     because that would leave the two CC0 insns adjacent as well.
1224     It would be more logical to test whether CC0 occurs inside I1 or I2,
1225     but that would be much slower, and this ought to be equivalent.  */
1226
1227  p = prev_nonnote_insn (insn);
1228  if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1229      && ! all_adjacent)
1230    return 0;
1231#endif
1232
1233  /* If we get here, we have passed all the tests and the combination is
1234     to be allowed.  */
1235
1236  *pdest = dest;
1237  *psrc = src;
1238
1239  return 1;
1240}
1241
1242/* Check if PAT is an insn - or a part of it - used to set up an
1243   argument for a function in a hard register.  */
1244
1245static int
1246sets_function_arg_p (pat)
1247     rtx pat;
1248{
1249  int i;
1250  rtx inner_dest;
1251
1252  switch (GET_CODE (pat))
1253    {
1254    case INSN:
1255      return sets_function_arg_p (PATTERN (pat));
1256
1257    case PARALLEL:
1258      for (i = XVECLEN (pat, 0); --i >= 0;)
1259	if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1260	  return 1;
1261
1262      break;
1263
1264    case SET:
1265      inner_dest = SET_DEST (pat);
1266      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1267	     || GET_CODE (inner_dest) == SUBREG
1268	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
1269	inner_dest = XEXP (inner_dest, 0);
1270
1271      return (GET_CODE (inner_dest) == REG
1272	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1273	      && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1274
1275    default:
1276      break;
1277    }
1278
1279  return 0;
1280}
1281
1282/* LOC is the location within I3 that contains its pattern or the component
1283   of a PARALLEL of the pattern.  We validate that it is valid for combining.
1284
1285   One problem is if I3 modifies its output, as opposed to replacing it
1286   entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1287   so would produce an insn that is not equivalent to the original insns.
1288
1289   Consider:
1290
1291         (set (reg:DI 101) (reg:DI 100))
1292	 (set (subreg:SI (reg:DI 101) 0) <foo>)
1293
1294   This is NOT equivalent to:
1295
1296         (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1297		    (set (reg:DI 101) (reg:DI 100))])
1298
1299   Not only does this modify 100 (in which case it might still be valid
1300   if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1301
1302   We can also run into a problem if I2 sets a register that I1
1303   uses and I1 gets directly substituted into I3 (not via I2).  In that
1304   case, we would be getting the wrong value of I2DEST into I3, so we
1305   must reject the combination.  This case occurs when I2 and I1 both
1306   feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1307   If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
1308   of a SET must prevent combination from occurring.
1309
1310   Before doing the above check, we first try to expand a field assignment
1311   into a set of logical operations.
1312
1313   If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
1314   we place a register that is both set and used within I3.  If more than one
1315   such register is detected, we fail.
1316
1317   Return 1 if the combination is valid, zero otherwise.  */
1318
1319static int
1320combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1321     rtx i3;
1322     rtx *loc;
1323     rtx i2dest;
1324     rtx i1dest;
1325     int i1_not_in_src;
1326     rtx *pi3dest_killed;
1327{
1328  rtx x = *loc;
1329
1330  if (GET_CODE (x) == SET)
1331    {
1332      rtx set = expand_field_assignment (x);
1333      rtx dest = SET_DEST (set);
1334      rtx src = SET_SRC (set);
1335      rtx inner_dest = dest;
1336
1337#if 0
1338      rtx inner_src = src;
1339#endif
1340
1341      SUBST (*loc, set);
1342
1343      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1344	     || GET_CODE (inner_dest) == SUBREG
1345	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
1346	inner_dest = XEXP (inner_dest, 0);
1347
1348  /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1349     was added.  */
1350#if 0
1351      while (GET_CODE (inner_src) == STRICT_LOW_PART
1352	     || GET_CODE (inner_src) == SUBREG
1353	     || GET_CODE (inner_src) == ZERO_EXTRACT)
1354	inner_src = XEXP (inner_src, 0);
1355
1356      /* If it is better that two different modes keep two different pseudos,
1357	 avoid combining them.  This avoids producing the following pattern
1358	 on a 386:
1359	  (set (subreg:SI (reg/v:QI 21) 0)
1360	       (lshiftrt:SI (reg/v:SI 20)
1361	           (const_int 24)))
1362	 If that were made, reload could not handle the pair of
1363	 reg 20/21, since it would try to get any GENERAL_REGS
1364	 but some of them don't handle QImode.  */
1365
1366      if (rtx_equal_p (inner_src, i2dest)
1367	  && GET_CODE (inner_dest) == REG
1368	  && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1369	return 0;
1370#endif
1371
1372      /* Check for the case where I3 modifies its output, as
1373	 discussed above.  */
1374      if ((inner_dest != dest
1375	   && (reg_overlap_mentioned_p (i2dest, inner_dest)
1376	       || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1377
1378	  /* This is the same test done in can_combine_p except we can't test
1379	     all_adjacent; we don't have to, since this instruction will stay
1380	     in place, thus we are not considering increasing the lifetime of
1381	     INNER_DEST.
1382
1383	     Also, if this insn sets a function argument, combining it with
1384	     something that might need a spill could clobber a previous
1385	     function argument; the all_adjacent test in can_combine_p also
1386	     checks this; here, we do a more specific test for this case.  */
1387
1388	  || (GET_CODE (inner_dest) == REG
1389	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1390	      && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1391					GET_MODE (inner_dest))))
1392	  || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1393	return 0;
1394
1395      /* If DEST is used in I3, it is being killed in this insn,
1396	 so record that for later.
1397	 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1398	 STACK_POINTER_REGNUM, since these are always considered to be
1399	 live.  Similarly for ARG_POINTER_REGNUM if it is fixed.  */
1400      if (pi3dest_killed && GET_CODE (dest) == REG
1401	  && reg_referenced_p (dest, PATTERN (i3))
1402	  && REGNO (dest) != FRAME_POINTER_REGNUM
1403#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1404	  && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1405#endif
1406#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1407	  && (REGNO (dest) != ARG_POINTER_REGNUM
1408	      || ! fixed_regs [REGNO (dest)])
1409#endif
1410	  && REGNO (dest) != STACK_POINTER_REGNUM)
1411	{
1412	  if (*pi3dest_killed)
1413	    return 0;
1414
1415	  *pi3dest_killed = dest;
1416	}
1417    }
1418
1419  else if (GET_CODE (x) == PARALLEL)
1420    {
1421      int i;
1422
1423      for (i = 0; i < XVECLEN (x, 0); i++)
1424	if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1425				i1_not_in_src, pi3dest_killed))
1426	  return 0;
1427    }
1428
1429  return 1;
1430}
1431
1432/* Return 1 if X is an arithmetic expression that contains a multiplication
1433   and division.  We don't count multiplications by powers of two here.  */
1434
1435static int
1436contains_muldiv (x)
1437     rtx x;
1438{
1439  switch (GET_CODE (x))
1440    {
1441    case MOD:  case DIV:  case UMOD:  case UDIV:
1442      return 1;
1443
1444    case MULT:
1445      return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1446		&& exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1447    default:
1448      switch (GET_RTX_CLASS (GET_CODE (x)))
1449	{
1450	case 'c':  case '<':  case '2':
1451	  return contains_muldiv (XEXP (x, 0))
1452	    || contains_muldiv (XEXP (x, 1));
1453
1454	case '1':
1455	  return contains_muldiv (XEXP (x, 0));
1456
1457	default:
1458	  return 0;
1459	}
1460    }
1461}
1462
1463/* Determine whether INSN can be used in a combination.  Return nonzero if
1464   not.  This is used in try_combine to detect early some cases where we
1465   can't perform combinations.  */
1466
1467static int
1468cant_combine_insn_p (insn)
1469     rtx insn;
1470{
1471  rtx set;
1472  rtx src, dest;
1473
1474  /* If this isn't really an insn, we can't do anything.
1475     This can occur when flow deletes an insn that it has merged into an
1476     auto-increment address.  */
1477  if (! INSN_P (insn))
1478    return 1;
1479
1480  /* Never combine loads and stores involving hard regs.  The register
1481     allocator can usually handle such reg-reg moves by tying.  If we allow
1482     the combiner to make substitutions of hard regs, we risk aborting in
1483     reload on machines that have SMALL_REGISTER_CLASSES.
1484     As an exception, we allow combinations involving fixed regs; these are
1485     not available to the register allocator so there's no risk involved.  */
1486
1487  set = single_set (insn);
1488  if (! set)
1489    return 0;
1490  src = SET_SRC (set);
1491  dest = SET_DEST (set);
1492  if (GET_CODE (src) == SUBREG)
1493    src = SUBREG_REG (src);
1494  if (GET_CODE (dest) == SUBREG)
1495    dest = SUBREG_REG (dest);
1496  if (REG_P (src) && REG_P (dest)
1497      && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1498	   && ! fixed_regs[REGNO (src)])
1499	  || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1500	      && ! fixed_regs[REGNO (dest)])))
1501    return 1;
1502
1503  return 0;
1504}
1505
1506/* Adjust INSN after we made a change to its destination.
1507
1508   Changing the destination can invalidate notes that say something about
1509   the results of the insn and a LOG_LINK pointing to the insn.  */
1510
1511static void
1512adjust_for_new_dest (insn)
1513     rtx insn;
1514{
1515  rtx *loc;
1516
1517  /* For notes, be conservative and simply remove them.  */
1518  loc = &REG_NOTES (insn);
1519  while (*loc)
1520    {
1521      enum reg_note kind = REG_NOTE_KIND (*loc);
1522      if (kind == REG_EQUAL || kind == REG_EQUIV)
1523	*loc = XEXP (*loc, 1);
1524      else
1525	loc = &XEXP (*loc, 1);
1526    }
1527
1528  /* The new insn will have a destination that was previously the destination
1529     of an insn just above it.  Call distribute_links to make a LOG_LINK from
1530     the next use of that destination.  */
1531  distribute_links (gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX));
1532}
1533
1534/* Try to combine the insns I1 and I2 into I3.
1535   Here I1 and I2 appear earlier than I3.
1536   I1 can be zero; then we combine just I2 into I3.
1537
1538   If we are combining three insns and the resulting insn is not recognized,
1539   try splitting it into two insns.  If that happens, I2 and I3 are retained
1540   and I1 is pseudo-deleted by turning it into a NOTE.  Otherwise, I1 and I2
1541   are pseudo-deleted.
1542
1543   Return 0 if the combination does not work.  Then nothing is changed.
1544   If we did the combination, return the insn at which combine should
1545   resume scanning.
1546
1547   Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
1548   new direct jump instruction.  */
1549
1550static rtx
1551try_combine (i3, i2, i1, new_direct_jump_p)
1552     rtx i3, i2, i1;
1553     int *new_direct_jump_p;
1554{
1555  /* New patterns for I3 and I2, respectively.  */
1556  rtx newpat, newi2pat = 0;
1557  int substed_i2 = 0, substed_i1 = 0;
1558  /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead.  */
1559  int added_sets_1, added_sets_2;
1560  /* Total number of SETs to put into I3.  */
1561  int total_sets;
1562  /* Nonzero is I2's body now appears in I3.  */
1563  int i2_is_used;
1564  /* INSN_CODEs for new I3, new I2, and user of condition code.  */
1565  int insn_code_number, i2_code_number = 0, other_code_number = 0;
1566  /* Contains I3 if the destination of I3 is used in its source, which means
1567     that the old life of I3 is being killed.  If that usage is placed into
1568     I2 and not in I3, a REG_DEAD note must be made.  */
1569  rtx i3dest_killed = 0;
1570  /* SET_DEST and SET_SRC of I2 and I1.  */
1571  rtx i2dest, i2src, i1dest = 0, i1src = 0;
1572  /* PATTERN (I2), or a copy of it in certain cases.  */
1573  rtx i2pat;
1574  /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC.  */
1575  int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1576  int i1_feeds_i3 = 0;
1577  /* Notes that must be added to REG_NOTES in I3 and I2.  */
1578  rtx new_i3_notes, new_i2_notes;
1579  /* Notes that we substituted I3 into I2 instead of the normal case.  */
1580  int i3_subst_into_i2 = 0;
1581  /* Notes that I1, I2 or I3 is a MULT operation.  */
1582  int have_mult = 0;
1583
1584  int maxreg;
1585  rtx temp;
1586  rtx link;
1587  int i;
1588
1589  /* Exit early if one of the insns involved can't be used for
1590     combinations.  */
1591  if (cant_combine_insn_p (i3)
1592      || cant_combine_insn_p (i2)
1593      || (i1 && cant_combine_insn_p (i1))
1594      /* We also can't do anything if I3 has a
1595	 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1596	 libcall.  */
1597#if 0
1598      /* ??? This gives worse code, and appears to be unnecessary, since no
1599	 pass after flow uses REG_LIBCALL/REG_RETVAL notes.  */
1600      || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1601#endif
1602      )
1603    return 0;
1604
1605  combine_attempts++;
1606  undobuf.other_insn = 0;
1607
1608  /* Reset the hard register usage information.  */
1609  CLEAR_HARD_REG_SET (newpat_used_regs);
1610
1611  /* If I1 and I2 both feed I3, they can be in any order.  To simplify the
1612     code below, set I1 to be the earlier of the two insns.  */
1613  if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1614    temp = i1, i1 = i2, i2 = temp;
1615
1616  added_links_insn = 0;
1617
1618  /* First check for one important special-case that the code below will
1619     not handle.  Namely, the case where I1 is zero, I2 is a PARALLEL
1620     and I3 is a SET whose SET_SRC is a SET_DEST in I2.  In that case,
1621     we may be able to replace that destination with the destination of I3.
1622     This occurs in the common code where we compute both a quotient and
1623     remainder into a structure, in which case we want to do the computation
1624     directly into the structure to avoid register-register copies.
1625
1626     Note that this case handles both multiple sets in I2 and also
1627     cases where I2 has a number of CLOBBER or PARALLELs.
1628
1629     We make very conservative checks below and only try to handle the
1630     most common cases of this.  For example, we only handle the case
1631     where I2 and I3 are adjacent to avoid making difficult register
1632     usage tests.  */
1633
1634  if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1635      && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1636      && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1637      && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1638      && GET_CODE (PATTERN (i2)) == PARALLEL
1639      && ! side_effects_p (SET_DEST (PATTERN (i3)))
1640      /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1641	 below would need to check what is inside (and reg_overlap_mentioned_p
1642	 doesn't support those codes anyway).  Don't allow those destinations;
1643	 the resulting insn isn't likely to be recognized anyway.  */
1644      && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1645      && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1646      && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1647				    SET_DEST (PATTERN (i3)))
1648      && next_real_insn (i2) == i3)
1649    {
1650      rtx p2 = PATTERN (i2);
1651
1652      /* Make sure that the destination of I3,
1653	 which we are going to substitute into one output of I2,
1654	 is not used within another output of I2.  We must avoid making this:
1655	 (parallel [(set (mem (reg 69)) ...)
1656		    (set (reg 69) ...)])
1657	 which is not well-defined as to order of actions.
1658	 (Besides, reload can't handle output reloads for this.)
1659
1660	 The problem can also happen if the dest of I3 is a memory ref,
1661	 if another dest in I2 is an indirect memory ref.  */
1662      for (i = 0; i < XVECLEN (p2, 0); i++)
1663	if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1664	     || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1665	    && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1666					SET_DEST (XVECEXP (p2, 0, i))))
1667	  break;
1668
1669      if (i == XVECLEN (p2, 0))
1670	for (i = 0; i < XVECLEN (p2, 0); i++)
1671	  if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1672	       || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1673	      && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1674	    {
1675	      combine_merges++;
1676
1677	      subst_insn = i3;
1678	      subst_low_cuid = INSN_CUID (i2);
1679
1680	      added_sets_2 = added_sets_1 = 0;
1681	      i2dest = SET_SRC (PATTERN (i3));
1682
1683	      /* Replace the dest in I2 with our dest and make the resulting
1684		 insn the new pattern for I3.  Then skip to where we
1685		 validate the pattern.  Everything was set up above.  */
1686	      SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1687		     SET_DEST (PATTERN (i3)));
1688
1689	      newpat = p2;
1690	      i3_subst_into_i2 = 1;
1691	      goto validate_replacement;
1692	    }
1693    }
1694
1695  /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1696     one of those words to another constant, merge them by making a new
1697     constant.  */
1698  if (i1 == 0
1699      && (temp = single_set (i2)) != 0
1700      && (GET_CODE (SET_SRC (temp)) == CONST_INT
1701	  || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1702      && GET_CODE (SET_DEST (temp)) == REG
1703      && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1704      && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1705      && GET_CODE (PATTERN (i3)) == SET
1706      && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1707      && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1708      && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1709      && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1710      && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1711    {
1712      HOST_WIDE_INT lo, hi;
1713
1714      if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1715	lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1716      else
1717	{
1718	  lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1719	  hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1720	}
1721
1722      if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
1723	{
1724	  /* We don't handle the case of the target word being wider
1725	     than a host wide int.  */
1726	  if (HOST_BITS_PER_WIDE_INT < BITS_PER_WORD)
1727	    abort ();
1728
1729	  lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
1730	  lo |= (INTVAL (SET_SRC (PATTERN (i3)))
1731		 & (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1732	}
1733      else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1734	hi = INTVAL (SET_SRC (PATTERN (i3)));
1735      else if (HOST_BITS_PER_WIDE_INT >= 2 * BITS_PER_WORD)
1736	{
1737	  int sign = -(int) ((unsigned HOST_WIDE_INT) lo
1738			     >> (HOST_BITS_PER_WIDE_INT - 1));
1739
1740	  lo &= ~ (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1741		   (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1742	  lo |= (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1743		 (INTVAL (SET_SRC (PATTERN (i3)))));
1744	  if (hi == sign)
1745	    hi = lo < 0 ? -1 : 0;
1746	}
1747      else
1748	/* We don't handle the case of the higher word not fitting
1749	   entirely in either hi or lo.  */
1750	abort ();
1751
1752      combine_merges++;
1753      subst_insn = i3;
1754      subst_low_cuid = INSN_CUID (i2);
1755      added_sets_2 = added_sets_1 = 0;
1756      i2dest = SET_DEST (temp);
1757
1758      SUBST (SET_SRC (temp),
1759	     immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1760
1761      newpat = PATTERN (i2);
1762      goto validate_replacement;
1763    }
1764
1765#ifndef HAVE_cc0
1766  /* If we have no I1 and I2 looks like:
1767	(parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1768		   (set Y OP)])
1769     make up a dummy I1 that is
1770	(set Y OP)
1771     and change I2 to be
1772        (set (reg:CC X) (compare:CC Y (const_int 0)))
1773
1774     (We can ignore any trailing CLOBBERs.)
1775
1776     This undoes a previous combination and allows us to match a branch-and-
1777     decrement insn.  */
1778
1779  if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1780      && XVECLEN (PATTERN (i2), 0) >= 2
1781      && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1782      && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1783	  == MODE_CC)
1784      && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1785      && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1786      && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1787      && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1788      && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1789		      SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1790    {
1791      for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1792	if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1793	  break;
1794
1795      if (i == 1)
1796	{
1797	  /* We make I1 with the same INSN_UID as I2.  This gives it
1798	     the same INSN_CUID for value tracking.  Our fake I1 will
1799	     never appear in the insn stream so giving it the same INSN_UID
1800	     as I2 will not cause a problem.  */
1801
1802	  subst_prev_insn = i1
1803	    = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1804			    BLOCK_FOR_INSN (i2), INSN_SCOPE (i2),
1805			    XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1806			    NULL_RTX);
1807
1808	  SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1809	  SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1810		 SET_DEST (PATTERN (i1)));
1811	}
1812    }
1813#endif
1814
1815  /* Verify that I2 and I1 are valid for combining.  */
1816  if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1817      || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1818    {
1819      undo_all ();
1820      return 0;
1821    }
1822
1823  /* Record whether I2DEST is used in I2SRC and similarly for the other
1824     cases.  Knowing this will help in register status updating below.  */
1825  i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1826  i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1827  i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1828
1829  /* See if I1 directly feeds into I3.  It does if I1DEST is not used
1830     in I2SRC.  */
1831  i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1832
1833  /* Ensure that I3's pattern can be the destination of combines.  */
1834  if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1835			  i1 && i2dest_in_i1src && i1_feeds_i3,
1836			  &i3dest_killed))
1837    {
1838      undo_all ();
1839      return 0;
1840    }
1841
1842  /* See if any of the insns is a MULT operation.  Unless one is, we will
1843     reject a combination that is, since it must be slower.  Be conservative
1844     here.  */
1845  if (GET_CODE (i2src) == MULT
1846      || (i1 != 0 && GET_CODE (i1src) == MULT)
1847      || (GET_CODE (PATTERN (i3)) == SET
1848	  && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1849    have_mult = 1;
1850
1851  /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1852     We used to do this EXCEPT in one case: I3 has a post-inc in an
1853     output operand.  However, that exception can give rise to insns like
1854	mov r3,(r3)+
1855     which is a famous insn on the PDP-11 where the value of r3 used as the
1856     source was model-dependent.  Avoid this sort of thing.  */
1857
1858#if 0
1859  if (!(GET_CODE (PATTERN (i3)) == SET
1860	&& GET_CODE (SET_SRC (PATTERN (i3))) == REG
1861	&& GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1862	&& (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1863	    || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1864    /* It's not the exception.  */
1865#endif
1866#ifdef AUTO_INC_DEC
1867    for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1868      if (REG_NOTE_KIND (link) == REG_INC
1869	  && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1870	      || (i1 != 0
1871		  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1872	{
1873	  undo_all ();
1874	  return 0;
1875	}
1876#endif
1877
1878  /* See if the SETs in I1 or I2 need to be kept around in the merged
1879     instruction: whenever the value set there is still needed past I3.
1880     For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1881
1882     For the SET in I1, we have two cases:  If I1 and I2 independently
1883     feed into I3, the set in I1 needs to be kept around if I1DEST dies
1884     or is set in I3.  Otherwise (if I1 feeds I2 which feeds I3), the set
1885     in I1 needs to be kept around unless I1DEST dies or is set in either
1886     I2 or I3.  We can distinguish these cases by seeing if I2SRC mentions
1887     I1DEST.  If so, we know I1 feeds into I2.  */
1888
1889  added_sets_2 = ! dead_or_set_p (i3, i2dest);
1890
1891  added_sets_1
1892    = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1893	       : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1894
1895  /* If the set in I2 needs to be kept around, we must make a copy of
1896     PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1897     PATTERN (I2), we are only substituting for the original I1DEST, not into
1898     an already-substituted copy.  This also prevents making self-referential
1899     rtx.  If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1900     I2DEST.  */
1901
1902  i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1903	   ? gen_rtx_SET (VOIDmode, i2dest, i2src)
1904	   : PATTERN (i2));
1905
1906  if (added_sets_2)
1907    i2pat = copy_rtx (i2pat);
1908
1909  combine_merges++;
1910
1911  /* Substitute in the latest insn for the regs set by the earlier ones.  */
1912
1913  maxreg = max_reg_num ();
1914
1915  subst_insn = i3;
1916
1917  /* It is possible that the source of I2 or I1 may be performing an
1918     unneeded operation, such as a ZERO_EXTEND of something that is known
1919     to have the high part zero.  Handle that case by letting subst look at
1920     the innermost one of them.
1921
1922     Another way to do this would be to have a function that tries to
1923     simplify a single insn instead of merging two or more insns.  We don't
1924     do this because of the potential of infinite loops and because
1925     of the potential extra memory required.  However, doing it the way
1926     we are is a bit of a kludge and doesn't catch all cases.
1927
1928     But only do this if -fexpensive-optimizations since it slows things down
1929     and doesn't usually win.  */
1930
1931  if (flag_expensive_optimizations)
1932    {
1933      /* Pass pc_rtx so no substitutions are done, just simplifications.
1934	 The cases that we are interested in here do not involve the few
1935	 cases were is_replaced is checked.  */
1936      if (i1)
1937	{
1938	  subst_low_cuid = INSN_CUID (i1);
1939	  i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1940	}
1941      else
1942	{
1943	  subst_low_cuid = INSN_CUID (i2);
1944	  i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1945	}
1946    }
1947
1948#ifndef HAVE_cc0
1949  /* Many machines that don't use CC0 have insns that can both perform an
1950     arithmetic operation and set the condition code.  These operations will
1951     be represented as a PARALLEL with the first element of the vector
1952     being a COMPARE of an arithmetic operation with the constant zero.
1953     The second element of the vector will set some pseudo to the result
1954     of the same arithmetic operation.  If we simplify the COMPARE, we won't
1955     match such a pattern and so will generate an extra insn.   Here we test
1956     for this case, where both the comparison and the operation result are
1957     needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1958     I2SRC.  Later we will make the PARALLEL that contains I2.  */
1959
1960  if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1961      && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1962      && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1963      && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1964    {
1965#ifdef EXTRA_CC_MODES
1966      rtx *cc_use;
1967      enum machine_mode compare_mode;
1968#endif
1969
1970      newpat = PATTERN (i3);
1971      SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1972
1973      i2_is_used = 1;
1974
1975#ifdef EXTRA_CC_MODES
1976      /* See if a COMPARE with the operand we substituted in should be done
1977	 with the mode that is currently being used.  If not, do the same
1978	 processing we do in `subst' for a SET; namely, if the destination
1979	 is used only once, try to replace it with a register of the proper
1980	 mode and also replace the COMPARE.  */
1981      if (undobuf.other_insn == 0
1982	  && (cc_use = find_single_use (SET_DEST (newpat), i3,
1983					&undobuf.other_insn))
1984	  && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1985					      i2src, const0_rtx))
1986	      != GET_MODE (SET_DEST (newpat))))
1987	{
1988	  unsigned int regno = REGNO (SET_DEST (newpat));
1989	  rtx new_dest = gen_rtx_REG (compare_mode, regno);
1990
1991	  if (regno < FIRST_PSEUDO_REGISTER
1992	      || (REG_N_SETS (regno) == 1 && ! added_sets_2
1993		  && ! REG_USERVAR_P (SET_DEST (newpat))))
1994	    {
1995	      if (regno >= FIRST_PSEUDO_REGISTER)
1996		SUBST (regno_reg_rtx[regno], new_dest);
1997
1998	      SUBST (SET_DEST (newpat), new_dest);
1999	      SUBST (XEXP (*cc_use, 0), new_dest);
2000	      SUBST (SET_SRC (newpat),
2001		     gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
2002	    }
2003	  else
2004	    undobuf.other_insn = 0;
2005	}
2006#endif
2007    }
2008  else
2009#endif
2010    {
2011      n_occurrences = 0;		/* `subst' counts here */
2012
2013      /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
2014	 need to make a unique copy of I2SRC each time we substitute it
2015	 to avoid self-referential rtl.  */
2016
2017      subst_low_cuid = INSN_CUID (i2);
2018      newpat = subst (PATTERN (i3), i2dest, i2src, 0,
2019		      ! i1_feeds_i3 && i1dest_in_i1src);
2020      substed_i2 = 1;
2021
2022      /* Record whether i2's body now appears within i3's body.  */
2023      i2_is_used = n_occurrences;
2024    }
2025
2026  /* If we already got a failure, don't try to do more.  Otherwise,
2027     try to substitute in I1 if we have it.  */
2028
2029  if (i1 && GET_CODE (newpat) != CLOBBER)
2030    {
2031      /* Before we can do this substitution, we must redo the test done
2032	 above (see detailed comments there) that ensures  that I1DEST
2033	 isn't mentioned in any SETs in NEWPAT that are field assignments.  */
2034
2035      if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
2036			      0, (rtx*) 0))
2037	{
2038	  undo_all ();
2039	  return 0;
2040	}
2041
2042      n_occurrences = 0;
2043      subst_low_cuid = INSN_CUID (i1);
2044      newpat = subst (newpat, i1dest, i1src, 0, 0);
2045      substed_i1 = 1;
2046    }
2047
2048  /* Fail if an autoincrement side-effect has been duplicated.  Be careful
2049     to count all the ways that I2SRC and I1SRC can be used.  */
2050  if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
2051       && i2_is_used + added_sets_2 > 1)
2052      || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2053	  && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
2054	      > 1))
2055      /* Fail if we tried to make a new register (we used to abort, but there's
2056	 really no reason to).  */
2057      || max_reg_num () != maxreg
2058      /* Fail if we couldn't do something and have a CLOBBER.  */
2059      || GET_CODE (newpat) == CLOBBER
2060      /* Fail if this new pattern is a MULT and we didn't have one before
2061	 at the outer level.  */
2062      || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
2063	  && ! have_mult))
2064    {
2065      undo_all ();
2066      return 0;
2067    }
2068
2069  /* If the actions of the earlier insns must be kept
2070     in addition to substituting them into the latest one,
2071     we must make a new PARALLEL for the latest insn
2072     to hold additional the SETs.  */
2073
2074  if (added_sets_1 || added_sets_2)
2075    {
2076      combine_extras++;
2077
2078      if (GET_CODE (newpat) == PARALLEL)
2079	{
2080	  rtvec old = XVEC (newpat, 0);
2081	  total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
2082	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2083	  memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
2084		  sizeof (old->elem[0]) * old->num_elem);
2085	}
2086      else
2087	{
2088	  rtx old = newpat;
2089	  total_sets = 1 + added_sets_1 + added_sets_2;
2090	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2091	  XVECEXP (newpat, 0, 0) = old;
2092	}
2093
2094      if (added_sets_1)
2095	XVECEXP (newpat, 0, --total_sets)
2096	  = (GET_CODE (PATTERN (i1)) == PARALLEL
2097	     ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
2098
2099      if (added_sets_2)
2100	{
2101	  /* If there is no I1, use I2's body as is.  We used to also not do
2102	     the subst call below if I2 was substituted into I3,
2103	     but that could lose a simplification.  */
2104	  if (i1 == 0)
2105	    XVECEXP (newpat, 0, --total_sets) = i2pat;
2106	  else
2107	    /* See comment where i2pat is assigned.  */
2108	    XVECEXP (newpat, 0, --total_sets)
2109	      = subst (i2pat, i1dest, i1src, 0, 0);
2110	}
2111    }
2112
2113  /* We come here when we are replacing a destination in I2 with the
2114     destination of I3.  */
2115 validate_replacement:
2116
2117  /* Note which hard regs this insn has as inputs.  */
2118  mark_used_regs_combine (newpat);
2119
2120  /* Is the result of combination a valid instruction?  */
2121  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2122
2123  /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2124     the second SET's destination is a register that is unused.  In that case,
2125     we just need the first SET.   This can occur when simplifying a divmod
2126     insn.  We *must* test for this case here because the code below that
2127     splits two independent SETs doesn't handle this case correctly when it
2128     updates the register status.  Also check the case where the first
2129     SET's destination is unused.  That would not cause incorrect code, but
2130     does cause an unneeded insn to remain.  */
2131
2132  if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2133      && XVECLEN (newpat, 0) == 2
2134      && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2135      && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2136      && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
2137      && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
2138      && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
2139      && asm_noperands (newpat) < 0)
2140    {
2141      newpat = XVECEXP (newpat, 0, 0);
2142      insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2143    }
2144
2145  else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2146	   && XVECLEN (newpat, 0) == 2
2147	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2148	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2149	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
2150	   && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
2151	   && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
2152	   && asm_noperands (newpat) < 0)
2153    {
2154      newpat = XVECEXP (newpat, 0, 1);
2155      insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2156
2157      if (insn_code_number >= 0)
2158	{
2159	  /* If we will be able to accept this, we have made a change to the
2160	     destination of I3.  This requires us to do a few adjustments.  */
2161	  PATTERN (i3) = newpat;
2162	  adjust_for_new_dest (i3);
2163	}
2164    }
2165
2166  /* If we were combining three insns and the result is a simple SET
2167     with no ASM_OPERANDS that wasn't recognized, try to split it into two
2168     insns.  There are two ways to do this.  It can be split using a
2169     machine-specific method (like when you have an addition of a large
2170     constant) or by combine in the function find_split_point.  */
2171
2172  if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2173      && asm_noperands (newpat) < 0)
2174    {
2175      rtx m_split, *split;
2176      rtx ni2dest = i2dest;
2177
2178      /* See if the MD file can split NEWPAT.  If it can't, see if letting it
2179	 use I2DEST as a scratch register will help.  In the latter case,
2180	 convert I2DEST to the mode of the source of NEWPAT if we can.  */
2181
2182      m_split = split_insns (newpat, i3);
2183
2184      /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2185	 inputs of NEWPAT.  */
2186
2187      /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2188	 possible to try that as a scratch reg.  This would require adding
2189	 more code to make it work though.  */
2190
2191      if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
2192	{
2193	  /* If I2DEST is a hard register or the only use of a pseudo,
2194	     we can change its mode.  */
2195	  if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
2196	      && GET_MODE (SET_DEST (newpat)) != VOIDmode
2197	      && GET_CODE (i2dest) == REG
2198	      && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2199		  || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2200		      && ! REG_USERVAR_P (i2dest))))
2201	    ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
2202				   REGNO (i2dest));
2203
2204	  m_split = split_insns (gen_rtx_PARALLEL
2205				 (VOIDmode,
2206				  gen_rtvec (2, newpat,
2207					     gen_rtx_CLOBBER (VOIDmode,
2208							      ni2dest))),
2209				 i3);
2210	  /* If the split with the mode-changed register didn't work, try
2211	     the original register.  */
2212	  if (! m_split && ni2dest != i2dest)
2213	    {
2214	      ni2dest = i2dest;
2215	      m_split = split_insns (gen_rtx_PARALLEL
2216				     (VOIDmode,
2217				      gen_rtvec (2, newpat,
2218						 gen_rtx_CLOBBER (VOIDmode,
2219								  i2dest))),
2220				     i3);
2221	    }
2222	}
2223
2224      if (m_split && NEXT_INSN (m_split) == NULL_RTX)
2225	{
2226	  m_split = PATTERN (m_split);
2227	  insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2228	  if (insn_code_number >= 0)
2229	    newpat = m_split;
2230	}
2231      else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
2232	       && (next_real_insn (i2) == i3
2233		   || ! use_crosses_set_p (PATTERN (m_split), INSN_CUID (i2))))
2234	{
2235	  rtx i2set, i3set;
2236	  rtx newi3pat = PATTERN (NEXT_INSN (m_split));
2237	  newi2pat = PATTERN (m_split);
2238
2239	  i3set = single_set (NEXT_INSN (m_split));
2240	  i2set = single_set (m_split);
2241
2242	  /* In case we changed the mode of I2DEST, replace it in the
2243	     pseudo-register table here.  We can't do it above in case this
2244	     code doesn't get executed and we do a split the other way.  */
2245
2246	  if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2247	    SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2248
2249	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2250
2251	  /* If I2 or I3 has multiple SETs, we won't know how to track
2252	     register status, so don't use these insns.  If I2's destination
2253	     is used between I2 and I3, we also can't use these insns.  */
2254
2255	  if (i2_code_number >= 0 && i2set && i3set
2256	      && (next_real_insn (i2) == i3
2257		  || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
2258	    insn_code_number = recog_for_combine (&newi3pat, i3,
2259						  &new_i3_notes);
2260	  if (insn_code_number >= 0)
2261	    newpat = newi3pat;
2262
2263	  /* It is possible that both insns now set the destination of I3.
2264	     If so, we must show an extra use of it.  */
2265
2266	  if (insn_code_number >= 0)
2267	    {
2268	      rtx new_i3_dest = SET_DEST (i3set);
2269	      rtx new_i2_dest = SET_DEST (i2set);
2270
2271	      while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2272		     || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2273		     || GET_CODE (new_i3_dest) == SUBREG)
2274		new_i3_dest = XEXP (new_i3_dest, 0);
2275
2276	      while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2277		     || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2278		     || GET_CODE (new_i2_dest) == SUBREG)
2279		new_i2_dest = XEXP (new_i2_dest, 0);
2280
2281	      if (GET_CODE (new_i3_dest) == REG
2282		  && GET_CODE (new_i2_dest) == REG
2283		  && REGNO (new_i3_dest) == REGNO (new_i2_dest))
2284		REG_N_SETS (REGNO (new_i2_dest))++;
2285	    }
2286	}
2287
2288      /* If we can split it and use I2DEST, go ahead and see if that
2289	 helps things be recognized.  Verify that none of the registers
2290	 are set between I2 and I3.  */
2291      if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
2292#ifdef HAVE_cc0
2293	  && GET_CODE (i2dest) == REG
2294#endif
2295	  /* We need I2DEST in the proper mode.  If it is a hard register
2296	     or the only use of a pseudo, we can change its mode.  */
2297	  && (GET_MODE (*split) == GET_MODE (i2dest)
2298	      || GET_MODE (*split) == VOIDmode
2299	      || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2300	      || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2301		  && ! REG_USERVAR_P (i2dest)))
2302	  && (next_real_insn (i2) == i3
2303	      || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2304	  /* We can't overwrite I2DEST if its value is still used by
2305	     NEWPAT.  */
2306	  && ! reg_referenced_p (i2dest, newpat))
2307	{
2308	  rtx newdest = i2dest;
2309	  enum rtx_code split_code = GET_CODE (*split);
2310	  enum machine_mode split_mode = GET_MODE (*split);
2311
2312	  /* Get NEWDEST as a register in the proper mode.  We have already
2313	     validated that we can do this.  */
2314	  if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2315	    {
2316	      newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
2317
2318	      if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2319		SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2320	    }
2321
2322	  /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2323	     an ASHIFT.  This can occur if it was inside a PLUS and hence
2324	     appeared to be a memory address.  This is a kludge.  */
2325	  if (split_code == MULT
2326	      && GET_CODE (XEXP (*split, 1)) == CONST_INT
2327	      && INTVAL (XEXP (*split, 1)) > 0
2328	      && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2329	    {
2330	      SUBST (*split, gen_rtx_ASHIFT (split_mode,
2331					     XEXP (*split, 0), GEN_INT (i)));
2332	      /* Update split_code because we may not have a multiply
2333		 anymore.  */
2334	      split_code = GET_CODE (*split);
2335	    }
2336
2337#ifdef INSN_SCHEDULING
2338	  /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2339	     be written as a ZERO_EXTEND.  */
2340	  if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2341	    {
2342#ifdef LOAD_EXTEND_OP
2343	      /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
2344		 what it really is.  */
2345	      if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split)))
2346		  == SIGN_EXTEND)
2347		SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
2348						    SUBREG_REG (*split)));
2349	      else
2350#endif
2351		SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
2352						    SUBREG_REG (*split)));
2353	    }
2354#endif
2355
2356	  newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
2357	  SUBST (*split, newdest);
2358	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2359
2360	  /* If the split point was a MULT and we didn't have one before,
2361	     don't use one now.  */
2362	  if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2363	    insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2364	}
2365    }
2366
2367  /* Check for a case where we loaded from memory in a narrow mode and
2368     then sign extended it, but we need both registers.  In that case,
2369     we have a PARALLEL with both loads from the same memory location.
2370     We can split this into a load from memory followed by a register-register
2371     copy.  This saves at least one insn, more if register allocation can
2372     eliminate the copy.
2373
2374     We cannot do this if the destination of the first assignment is a
2375     condition code register or cc0.  We eliminate this case by making sure
2376     the SET_DEST and SET_SRC have the same mode.
2377
2378     We cannot do this if the destination of the second assignment is
2379     a register that we have already assumed is zero-extended.  Similarly
2380     for a SUBREG of such a register.  */
2381
2382  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2383	   && GET_CODE (newpat) == PARALLEL
2384	   && XVECLEN (newpat, 0) == 2
2385	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2386	   && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2387	   && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
2388	       == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
2389	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2390	   && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2391			   XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2392	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2393				   INSN_CUID (i2))
2394	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2395	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2396	   && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2397		 (GET_CODE (temp) == REG
2398		  && reg_nonzero_bits[REGNO (temp)] != 0
2399		  && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2400		  && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2401		  && (reg_nonzero_bits[REGNO (temp)]
2402		      != GET_MODE_MASK (word_mode))))
2403	   && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2404		 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2405		     (GET_CODE (temp) == REG
2406		      && reg_nonzero_bits[REGNO (temp)] != 0
2407		      && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2408		      && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2409		      && (reg_nonzero_bits[REGNO (temp)]
2410			  != GET_MODE_MASK (word_mode)))))
2411	   && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2412					 SET_SRC (XVECEXP (newpat, 0, 1)))
2413	   && ! find_reg_note (i3, REG_UNUSED,
2414			       SET_DEST (XVECEXP (newpat, 0, 0))))
2415    {
2416      rtx ni2dest;
2417
2418      newi2pat = XVECEXP (newpat, 0, 0);
2419      ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2420      newpat = XVECEXP (newpat, 0, 1);
2421      SUBST (SET_SRC (newpat),
2422	     gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
2423      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2424
2425      if (i2_code_number >= 0)
2426	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2427
2428      if (insn_code_number >= 0)
2429	{
2430	  rtx insn;
2431	  rtx link;
2432
2433	  /* If we will be able to accept this, we have made a change to the
2434	     destination of I3.  This requires us to do a few adjustments.  */
2435	  PATTERN (i3) = newpat;
2436	  adjust_for_new_dest (i3);
2437
2438	  /* I3 now uses what used to be its destination and which is
2439	     now I2's destination.  That means we need a LOG_LINK from
2440	     I3 to I2.  But we used to have one, so we still will.
2441
2442	     However, some later insn might be using I2's dest and have
2443	     a LOG_LINK pointing at I3.  We must remove this link.
2444	     The simplest way to remove the link is to point it at I1,
2445	     which we know will be a NOTE.  */
2446
2447	  for (insn = NEXT_INSN (i3);
2448	       insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2449			|| insn != this_basic_block->next_bb->head);
2450	       insn = NEXT_INSN (insn))
2451	    {
2452	      if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
2453		{
2454		  for (link = LOG_LINKS (insn); link;
2455		       link = XEXP (link, 1))
2456		    if (XEXP (link, 0) == i3)
2457		      XEXP (link, 0) = i1;
2458
2459		  break;
2460		}
2461	    }
2462	}
2463    }
2464
2465  /* Similarly, check for a case where we have a PARALLEL of two independent
2466     SETs but we started with three insns.  In this case, we can do the sets
2467     as two separate insns.  This case occurs when some SET allows two
2468     other insns to combine, but the destination of that SET is still live.  */
2469
2470  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2471	   && GET_CODE (newpat) == PARALLEL
2472	   && XVECLEN (newpat, 0) == 2
2473	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2474	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2475	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2476	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2477	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2478	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2479	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2480				   INSN_CUID (i2))
2481	   /* Don't pass sets with (USE (MEM ...)) dests to the following.  */
2482	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2483	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2484	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2485				  XVECEXP (newpat, 0, 0))
2486	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2487				  XVECEXP (newpat, 0, 1))
2488	   && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2489		 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
2490    {
2491      /* Normally, it doesn't matter which of the two is done first,
2492	 but it does if one references cc0.  In that case, it has to
2493	 be first.  */
2494#ifdef HAVE_cc0
2495      if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2496	{
2497	  newi2pat = XVECEXP (newpat, 0, 0);
2498	  newpat = XVECEXP (newpat, 0, 1);
2499	}
2500      else
2501#endif
2502	{
2503	  newi2pat = XVECEXP (newpat, 0, 1);
2504	  newpat = XVECEXP (newpat, 0, 0);
2505	}
2506
2507      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2508
2509      if (i2_code_number >= 0)
2510	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2511    }
2512
2513  /* If it still isn't recognized, fail and change things back the way they
2514     were.  */
2515  if ((insn_code_number < 0
2516       /* Is the result a reasonable ASM_OPERANDS?  */
2517       && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2518    {
2519      undo_all ();
2520      return 0;
2521    }
2522
2523  /* If we had to change another insn, make sure it is valid also.  */
2524  if (undobuf.other_insn)
2525    {
2526      rtx other_pat = PATTERN (undobuf.other_insn);
2527      rtx new_other_notes;
2528      rtx note, next;
2529
2530      CLEAR_HARD_REG_SET (newpat_used_regs);
2531
2532      other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2533					     &new_other_notes);
2534
2535      if (other_code_number < 0 && ! check_asm_operands (other_pat))
2536	{
2537	  undo_all ();
2538	  return 0;
2539	}
2540
2541      PATTERN (undobuf.other_insn) = other_pat;
2542
2543      /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2544	 are still valid.  Then add any non-duplicate notes added by
2545	 recog_for_combine.  */
2546      for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2547	{
2548	  next = XEXP (note, 1);
2549
2550	  if (REG_NOTE_KIND (note) == REG_UNUSED
2551	      && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2552	    {
2553	      if (GET_CODE (XEXP (note, 0)) == REG)
2554		REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2555
2556	      remove_note (undobuf.other_insn, note);
2557	    }
2558	}
2559
2560      for (note = new_other_notes; note; note = XEXP (note, 1))
2561	if (GET_CODE (XEXP (note, 0)) == REG)
2562	  REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
2563
2564      distribute_notes (new_other_notes, undobuf.other_insn,
2565			undobuf.other_insn, NULL_RTX);
2566    }
2567#ifdef HAVE_cc0
2568  /* If I2 is the setter CC0 and I3 is the user CC0 then check whether
2569     they are adjacent to each other or not.  */
2570  {
2571    rtx p = prev_nonnote_insn (i3);
2572    if (p && p != i2 && GET_CODE (p) == INSN && newi2pat
2573	&& sets_cc0_p (newi2pat))
2574      {
2575	undo_all ();
2576	return 0;
2577      }
2578  }
2579#endif
2580
2581  /* We now know that we can do this combination.  Merge the insns and
2582     update the status of registers and LOG_LINKS.  */
2583
2584  {
2585    rtx i3notes, i2notes, i1notes = 0;
2586    rtx i3links, i2links, i1links = 0;
2587    rtx midnotes = 0;
2588    unsigned int regno;
2589
2590    /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2591       clear them.  */
2592    i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2593    i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2594    if (i1)
2595      i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2596
2597    /* Ensure that we do not have something that should not be shared but
2598       occurs multiple times in the new insns.  Check this by first
2599       resetting all the `used' flags and then copying anything is shared.  */
2600
2601    reset_used_flags (i3notes);
2602    reset_used_flags (i2notes);
2603    reset_used_flags (i1notes);
2604    reset_used_flags (newpat);
2605    reset_used_flags (newi2pat);
2606    if (undobuf.other_insn)
2607      reset_used_flags (PATTERN (undobuf.other_insn));
2608
2609    i3notes = copy_rtx_if_shared (i3notes);
2610    i2notes = copy_rtx_if_shared (i2notes);
2611    i1notes = copy_rtx_if_shared (i1notes);
2612    newpat = copy_rtx_if_shared (newpat);
2613    newi2pat = copy_rtx_if_shared (newi2pat);
2614    if (undobuf.other_insn)
2615      reset_used_flags (PATTERN (undobuf.other_insn));
2616
2617    INSN_CODE (i3) = insn_code_number;
2618    PATTERN (i3) = newpat;
2619
2620    if (GET_CODE (i3) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (i3))
2621      {
2622	rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
2623
2624	reset_used_flags (call_usage);
2625	call_usage = copy_rtx (call_usage);
2626
2627	if (substed_i2)
2628	  replace_rtx (call_usage, i2dest, i2src);
2629
2630	if (substed_i1)
2631	  replace_rtx (call_usage, i1dest, i1src);
2632
2633	CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
2634      }
2635
2636    if (undobuf.other_insn)
2637      INSN_CODE (undobuf.other_insn) = other_code_number;
2638
2639    /* We had one special case above where I2 had more than one set and
2640       we replaced a destination of one of those sets with the destination
2641       of I3.  In that case, we have to update LOG_LINKS of insns later
2642       in this basic block.  Note that this (expensive) case is rare.
2643
2644       Also, in this case, we must pretend that all REG_NOTEs for I2
2645       actually came from I3, so that REG_UNUSED notes from I2 will be
2646       properly handled.  */
2647
2648    if (i3_subst_into_i2)
2649      {
2650	for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2651	  if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != USE
2652	      && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2653	      && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2654	      && ! find_reg_note (i2, REG_UNUSED,
2655				  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2656	    for (temp = NEXT_INSN (i2);
2657		 temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2658			  || this_basic_block->head != temp);
2659		 temp = NEXT_INSN (temp))
2660	      if (temp != i3 && INSN_P (temp))
2661		for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2662		  if (XEXP (link, 0) == i2)
2663		    XEXP (link, 0) = i3;
2664
2665	if (i3notes)
2666	  {
2667	    rtx link = i3notes;
2668	    while (XEXP (link, 1))
2669	      link = XEXP (link, 1);
2670	    XEXP (link, 1) = i2notes;
2671	  }
2672	else
2673	  i3notes = i2notes;
2674	i2notes = 0;
2675      }
2676
2677    LOG_LINKS (i3) = 0;
2678    REG_NOTES (i3) = 0;
2679    LOG_LINKS (i2) = 0;
2680    REG_NOTES (i2) = 0;
2681
2682    if (newi2pat)
2683      {
2684	INSN_CODE (i2) = i2_code_number;
2685	PATTERN (i2) = newi2pat;
2686      }
2687    else
2688      {
2689	PUT_CODE (i2, NOTE);
2690	NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2691	NOTE_SOURCE_FILE (i2) = 0;
2692      }
2693
2694    if (i1)
2695      {
2696	LOG_LINKS (i1) = 0;
2697	REG_NOTES (i1) = 0;
2698	PUT_CODE (i1, NOTE);
2699	NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2700	NOTE_SOURCE_FILE (i1) = 0;
2701      }
2702
2703    /* Get death notes for everything that is now used in either I3 or
2704       I2 and used to die in a previous insn.  If we built two new
2705       patterns, move from I1 to I2 then I2 to I3 so that we get the
2706       proper movement on registers that I2 modifies.  */
2707
2708    if (newi2pat)
2709      {
2710	move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2711	move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2712      }
2713    else
2714      move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2715		   i3, &midnotes);
2716
2717    /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3.  */
2718    if (i3notes)
2719      distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX);
2720    if (i2notes)
2721      distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX);
2722    if (i1notes)
2723      distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX);
2724    if (midnotes)
2725      distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX);
2726
2727    /* Distribute any notes added to I2 or I3 by recog_for_combine.  We
2728       know these are REG_UNUSED and want them to go to the desired insn,
2729       so we always pass it as i3.  We have not counted the notes in
2730       reg_n_deaths yet, so we need to do so now.  */
2731
2732    if (newi2pat && new_i2_notes)
2733      {
2734	for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2735	  if (GET_CODE (XEXP (temp, 0)) == REG)
2736	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2737
2738	distribute_notes (new_i2_notes, i2, i2, NULL_RTX);
2739      }
2740
2741    if (new_i3_notes)
2742      {
2743	for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2744	  if (GET_CODE (XEXP (temp, 0)) == REG)
2745	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2746
2747	distribute_notes (new_i3_notes, i3, i3, NULL_RTX);
2748      }
2749
2750    /* If I3DEST was used in I3SRC, it really died in I3.  We may need to
2751       put a REG_DEAD note for it somewhere.  If NEWI2PAT exists and sets
2752       I3DEST, the death must be somewhere before I2, not I3.  If we passed I3
2753       in that case, it might delete I2.  Similarly for I2 and I1.
2754       Show an additional death due to the REG_DEAD note we make here.  If
2755       we discard it in distribute_notes, we will decrement it again.  */
2756
2757    if (i3dest_killed)
2758      {
2759	if (GET_CODE (i3dest_killed) == REG)
2760	  REG_N_DEATHS (REGNO (i3dest_killed))++;
2761
2762	if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
2763	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2764					       NULL_RTX),
2765			    NULL_RTX, i2, NULL_RTX);
2766	else
2767	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2768					       NULL_RTX),
2769			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX);
2770      }
2771
2772    if (i2dest_in_i2src)
2773      {
2774	if (GET_CODE (i2dest) == REG)
2775	  REG_N_DEATHS (REGNO (i2dest))++;
2776
2777	if (newi2pat && reg_set_p (i2dest, newi2pat))
2778	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2779			    NULL_RTX, i2, NULL_RTX);
2780	else
2781	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2782			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX);
2783      }
2784
2785    if (i1dest_in_i1src)
2786      {
2787	if (GET_CODE (i1dest) == REG)
2788	  REG_N_DEATHS (REGNO (i1dest))++;
2789
2790	if (newi2pat && reg_set_p (i1dest, newi2pat))
2791	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2792			    NULL_RTX, i2, NULL_RTX);
2793	else
2794	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2795			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX);
2796      }
2797
2798    distribute_links (i3links);
2799    distribute_links (i2links);
2800    distribute_links (i1links);
2801
2802    if (GET_CODE (i2dest) == REG)
2803      {
2804	rtx link;
2805	rtx i2_insn = 0, i2_val = 0, set;
2806
2807	/* The insn that used to set this register doesn't exist, and
2808	   this life of the register may not exist either.  See if one of
2809	   I3's links points to an insn that sets I2DEST.  If it does,
2810	   that is now the last known value for I2DEST. If we don't update
2811	   this and I2 set the register to a value that depended on its old
2812	   contents, we will get confused.  If this insn is used, thing
2813	   will be set correctly in combine_instructions.  */
2814
2815	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2816	  if ((set = single_set (XEXP (link, 0))) != 0
2817	      && rtx_equal_p (i2dest, SET_DEST (set)))
2818	    i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2819
2820	record_value_for_reg (i2dest, i2_insn, i2_val);
2821
2822	/* If the reg formerly set in I2 died only once and that was in I3,
2823	   zero its use count so it won't make `reload' do any work.  */
2824	if (! added_sets_2
2825	    && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2826	    && ! i2dest_in_i2src)
2827	  {
2828	    regno = REGNO (i2dest);
2829	    REG_N_SETS (regno)--;
2830	  }
2831      }
2832
2833    if (i1 && GET_CODE (i1dest) == REG)
2834      {
2835	rtx link;
2836	rtx i1_insn = 0, i1_val = 0, set;
2837
2838	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2839	  if ((set = single_set (XEXP (link, 0))) != 0
2840	      && rtx_equal_p (i1dest, SET_DEST (set)))
2841	    i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2842
2843	record_value_for_reg (i1dest, i1_insn, i1_val);
2844
2845	regno = REGNO (i1dest);
2846	if (! added_sets_1 && ! i1dest_in_i1src)
2847	  REG_N_SETS (regno)--;
2848      }
2849
2850    /* Update reg_nonzero_bits et al for any changes that may have been made
2851       to this insn.  The order of set_nonzero_bits_and_sign_copies() is
2852       important.  Because newi2pat can affect nonzero_bits of newpat */
2853    if (newi2pat)
2854      note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
2855    note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
2856
2857    /* Set new_direct_jump_p if a new return or simple jump instruction
2858       has been created.
2859
2860       If I3 is now an unconditional jump, ensure that it has a
2861       BARRIER following it since it may have initially been a
2862       conditional jump.  It may also be the last nonnote insn.  */
2863
2864    if (returnjump_p (i3) || any_uncondjump_p (i3))
2865      {
2866	*new_direct_jump_p = 1;
2867
2868	if ((temp = next_nonnote_insn (i3)) == NULL_RTX
2869	    || GET_CODE (temp) != BARRIER)
2870	  emit_barrier_after (i3);
2871      }
2872
2873    if (undobuf.other_insn != NULL_RTX
2874	&& (returnjump_p (undobuf.other_insn)
2875	    || any_uncondjump_p (undobuf.other_insn)))
2876      {
2877	*new_direct_jump_p = 1;
2878
2879	if ((temp = next_nonnote_insn (undobuf.other_insn)) == NULL_RTX
2880	    || GET_CODE (temp) != BARRIER)
2881	  emit_barrier_after (undobuf.other_insn);
2882      }
2883
2884    /* An NOOP jump does not need barrier, but it does need cleaning up
2885       of CFG.  */
2886    if (GET_CODE (newpat) == SET
2887	&& SET_SRC (newpat) == pc_rtx
2888	&& SET_DEST (newpat) == pc_rtx)
2889      *new_direct_jump_p = 1;
2890  }
2891
2892  combine_successes++;
2893  undo_commit ();
2894
2895  /* Clear this here, so that subsequent get_last_value calls are not
2896     affected.  */
2897  subst_prev_insn = NULL_RTX;
2898
2899  if (added_links_insn
2900      && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2901      && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2902    return added_links_insn;
2903  else
2904    return newi2pat ? i2 : i3;
2905}
2906
2907/* Undo all the modifications recorded in undobuf.  */
2908
2909static void
2910undo_all ()
2911{
2912  struct undo *undo, *next;
2913
2914  for (undo = undobuf.undos; undo; undo = next)
2915    {
2916      next = undo->next;
2917      if (undo->is_int)
2918	*undo->where.i = undo->old_contents.i;
2919      else
2920	*undo->where.r = undo->old_contents.r;
2921
2922      undo->next = undobuf.frees;
2923      undobuf.frees = undo;
2924    }
2925
2926  undobuf.undos = 0;
2927
2928  /* Clear this here, so that subsequent get_last_value calls are not
2929     affected.  */
2930  subst_prev_insn = NULL_RTX;
2931}
2932
2933/* We've committed to accepting the changes we made.  Move all
2934   of the undos to the free list.  */
2935
2936static void
2937undo_commit ()
2938{
2939  struct undo *undo, *next;
2940
2941  for (undo = undobuf.undos; undo; undo = next)
2942    {
2943      next = undo->next;
2944      undo->next = undobuf.frees;
2945      undobuf.frees = undo;
2946    }
2947  undobuf.undos = 0;
2948}
2949
2950
2951/* Find the innermost point within the rtx at LOC, possibly LOC itself,
2952   where we have an arithmetic expression and return that point.  LOC will
2953   be inside INSN.
2954
2955   try_combine will call this function to see if an insn can be split into
2956   two insns.  */
2957
2958static rtx *
2959find_split_point (loc, insn)
2960     rtx *loc;
2961     rtx insn;
2962{
2963  rtx x = *loc;
2964  enum rtx_code code = GET_CODE (x);
2965  rtx *split;
2966  unsigned HOST_WIDE_INT len = 0;
2967  HOST_WIDE_INT pos = 0;
2968  int unsignedp = 0;
2969  rtx inner = NULL_RTX;
2970
2971  /* First special-case some codes.  */
2972  switch (code)
2973    {
2974    case SUBREG:
2975#ifdef INSN_SCHEDULING
2976      /* If we are making a paradoxical SUBREG invalid, it becomes a split
2977	 point.  */
2978      if (GET_CODE (SUBREG_REG (x)) == MEM)
2979	return loc;
2980#endif
2981      return find_split_point (&SUBREG_REG (x), insn);
2982
2983    case MEM:
2984#ifdef HAVE_lo_sum
2985      /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2986	 using LO_SUM and HIGH.  */
2987      if (GET_CODE (XEXP (x, 0)) == CONST
2988	  || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2989	{
2990	  SUBST (XEXP (x, 0),
2991		 gen_rtx_LO_SUM (Pmode,
2992				 gen_rtx_HIGH (Pmode, XEXP (x, 0)),
2993				 XEXP (x, 0)));
2994	  return &XEXP (XEXP (x, 0), 0);
2995	}
2996#endif
2997
2998      /* If we have a PLUS whose second operand is a constant and the
2999	 address is not valid, perhaps will can split it up using
3000	 the machine-specific way to split large constants.  We use
3001	 the first pseudo-reg (one of the virtual regs) as a placeholder;
3002	 it will not remain in the result.  */
3003      if (GET_CODE (XEXP (x, 0)) == PLUS
3004	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3005	  && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
3006	{
3007	  rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
3008	  rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
3009				 subst_insn);
3010
3011	  /* This should have produced two insns, each of which sets our
3012	     placeholder.  If the source of the second is a valid address,
3013	     we can make put both sources together and make a split point
3014	     in the middle.  */
3015
3016	  if (seq
3017	      && NEXT_INSN (seq) != NULL_RTX
3018	      && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
3019	      && GET_CODE (seq) == INSN
3020	      && GET_CODE (PATTERN (seq)) == SET
3021	      && SET_DEST (PATTERN (seq)) == reg
3022	      && ! reg_mentioned_p (reg,
3023				    SET_SRC (PATTERN (seq)))
3024	      && GET_CODE (NEXT_INSN (seq)) == INSN
3025	      && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
3026	      && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
3027	      && memory_address_p (GET_MODE (x),
3028				   SET_SRC (PATTERN (NEXT_INSN (seq)))))
3029	    {
3030	      rtx src1 = SET_SRC (PATTERN (seq));
3031	      rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
3032
3033	      /* Replace the placeholder in SRC2 with SRC1.  If we can
3034		 find where in SRC2 it was placed, that can become our
3035		 split point and we can replace this address with SRC2.
3036		 Just try two obvious places.  */
3037
3038	      src2 = replace_rtx (src2, reg, src1);
3039	      split = 0;
3040	      if (XEXP (src2, 0) == src1)
3041		split = &XEXP (src2, 0);
3042	      else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
3043		       && XEXP (XEXP (src2, 0), 0) == src1)
3044		split = &XEXP (XEXP (src2, 0), 0);
3045
3046	      if (split)
3047		{
3048		  SUBST (XEXP (x, 0), src2);
3049		  return split;
3050		}
3051	    }
3052
3053	  /* If that didn't work, perhaps the first operand is complex and
3054	     needs to be computed separately, so make a split point there.
3055	     This will occur on machines that just support REG + CONST
3056	     and have a constant moved through some previous computation.  */
3057
3058	  else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
3059		   && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
3060			 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
3061			     == 'o')))
3062	    return &XEXP (XEXP (x, 0), 0);
3063	}
3064      break;
3065
3066    case SET:
3067#ifdef HAVE_cc0
3068      /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
3069	 ZERO_EXTRACT, the most likely reason why this doesn't match is that
3070	 we need to put the operand into a register.  So split at that
3071	 point.  */
3072
3073      if (SET_DEST (x) == cc0_rtx
3074	  && GET_CODE (SET_SRC (x)) != COMPARE
3075	  && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
3076	  && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
3077	  && ! (GET_CODE (SET_SRC (x)) == SUBREG
3078		&& GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
3079	return &SET_SRC (x);
3080#endif
3081
3082      /* See if we can split SET_SRC as it stands.  */
3083      split = find_split_point (&SET_SRC (x), insn);
3084      if (split && split != &SET_SRC (x))
3085	return split;
3086
3087      /* See if we can split SET_DEST as it stands.  */
3088      split = find_split_point (&SET_DEST (x), insn);
3089      if (split && split != &SET_DEST (x))
3090	return split;
3091
3092      /* See if this is a bitfield assignment with everything constant.  If
3093	 so, this is an IOR of an AND, so split it into that.  */
3094      if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
3095	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
3096	      <= HOST_BITS_PER_WIDE_INT)
3097	  && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
3098	  && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
3099	  && GET_CODE (SET_SRC (x)) == CONST_INT
3100	  && ((INTVAL (XEXP (SET_DEST (x), 1))
3101	       + INTVAL (XEXP (SET_DEST (x), 2)))
3102	      <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
3103	  && ! side_effects_p (XEXP (SET_DEST (x), 0)))
3104	{
3105	  HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
3106	  unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
3107	  unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
3108	  rtx dest = XEXP (SET_DEST (x), 0);
3109	  enum machine_mode mode = GET_MODE (dest);
3110	  unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
3111
3112	  if (BITS_BIG_ENDIAN)
3113	    pos = GET_MODE_BITSIZE (mode) - len - pos;
3114
3115	  if (src == mask)
3116	    SUBST (SET_SRC (x),
3117		   gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
3118	  else
3119	    SUBST (SET_SRC (x),
3120		   gen_binary (IOR, mode,
3121			       gen_binary (AND, mode, dest,
3122					   gen_int_mode (~(mask << pos),
3123							 mode)),
3124			       GEN_INT (src << pos)));
3125
3126	  SUBST (SET_DEST (x), dest);
3127
3128	  split = find_split_point (&SET_SRC (x), insn);
3129	  if (split && split != &SET_SRC (x))
3130	    return split;
3131	}
3132
3133      /* Otherwise, see if this is an operation that we can split into two.
3134	 If so, try to split that.  */
3135      code = GET_CODE (SET_SRC (x));
3136
3137      switch (code)
3138	{
3139	case AND:
3140	  /* If we are AND'ing with a large constant that is only a single
3141	     bit and the result is only being used in a context where we
3142	     need to know if it is zero or nonzero, replace it with a bit
3143	     extraction.  This will avoid the large constant, which might
3144	     have taken more than one insn to make.  If the constant were
3145	     not a valid argument to the AND but took only one insn to make,
3146	     this is no worse, but if it took more than one insn, it will
3147	     be better.  */
3148
3149	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3150	      && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
3151	      && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3152	      && GET_CODE (SET_DEST (x)) == REG
3153	      && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
3154	      && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3155	      && XEXP (*split, 0) == SET_DEST (x)
3156	      && XEXP (*split, 1) == const0_rtx)
3157	    {
3158	      rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3159						XEXP (SET_SRC (x), 0),
3160						pos, NULL_RTX, 1, 1, 0, 0);
3161	      if (extraction != 0)
3162		{
3163		  SUBST (SET_SRC (x), extraction);
3164		  return find_split_point (loc, insn);
3165		}
3166	    }
3167	  break;
3168
3169	case NE:
3170	  /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3171	     is known to be on, this can be converted into a NEG of a shift.  */
3172	  if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3173	      && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
3174	      && 1 <= (pos = exact_log2
3175		       (nonzero_bits (XEXP (SET_SRC (x), 0),
3176				      GET_MODE (XEXP (SET_SRC (x), 0))))))
3177	    {
3178	      enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3179
3180	      SUBST (SET_SRC (x),
3181		     gen_rtx_NEG (mode,
3182				  gen_rtx_LSHIFTRT (mode,
3183						    XEXP (SET_SRC (x), 0),
3184						    GEN_INT (pos))));
3185
3186	      split = find_split_point (&SET_SRC (x), insn);
3187	      if (split && split != &SET_SRC (x))
3188		return split;
3189	    }
3190	  break;
3191
3192	case SIGN_EXTEND:
3193	  inner = XEXP (SET_SRC (x), 0);
3194
3195	  /* We can't optimize if either mode is a partial integer
3196	     mode as we don't know how many bits are significant
3197	     in those modes.  */
3198	  if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3199	      || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3200	    break;
3201
3202	  pos = 0;
3203	  len = GET_MODE_BITSIZE (GET_MODE (inner));
3204	  unsignedp = 0;
3205	  break;
3206
3207	case SIGN_EXTRACT:
3208	case ZERO_EXTRACT:
3209	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3210	      && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3211	    {
3212	      inner = XEXP (SET_SRC (x), 0);
3213	      len = INTVAL (XEXP (SET_SRC (x), 1));
3214	      pos = INTVAL (XEXP (SET_SRC (x), 2));
3215
3216	      if (BITS_BIG_ENDIAN)
3217		pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
3218	      unsignedp = (code == ZERO_EXTRACT);
3219	    }
3220	  break;
3221
3222	default:
3223	  break;
3224	}
3225
3226      if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3227	{
3228	  enum machine_mode mode = GET_MODE (SET_SRC (x));
3229
3230	  /* For unsigned, we have a choice of a shift followed by an
3231	     AND or two shifts.  Use two shifts for field sizes where the
3232	     constant might be too large.  We assume here that we can
3233	     always at least get 8-bit constants in an AND insn, which is
3234	     true for every current RISC.  */
3235
3236	  if (unsignedp && len <= 8)
3237	    {
3238	      SUBST (SET_SRC (x),
3239		     gen_rtx_AND (mode,
3240				  gen_rtx_LSHIFTRT
3241				  (mode, gen_lowpart_for_combine (mode, inner),
3242				   GEN_INT (pos)),
3243				  GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
3244
3245	      split = find_split_point (&SET_SRC (x), insn);
3246	      if (split && split != &SET_SRC (x))
3247		return split;
3248	    }
3249	  else
3250	    {
3251	      SUBST (SET_SRC (x),
3252		     gen_rtx_fmt_ee
3253		     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
3254		      gen_rtx_ASHIFT (mode,
3255				      gen_lowpart_for_combine (mode, inner),
3256				      GEN_INT (GET_MODE_BITSIZE (mode)
3257					       - len - pos)),
3258		      GEN_INT (GET_MODE_BITSIZE (mode) - len)));
3259
3260	      split = find_split_point (&SET_SRC (x), insn);
3261	      if (split && split != &SET_SRC (x))
3262		return split;
3263	    }
3264	}
3265
3266      /* See if this is a simple operation with a constant as the second
3267	 operand.  It might be that this constant is out of range and hence
3268	 could be used as a split point.  */
3269      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3270	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3271	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
3272	  && CONSTANT_P (XEXP (SET_SRC (x), 1))
3273	  && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
3274	      || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3275		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
3276		      == 'o'))))
3277	return &XEXP (SET_SRC (x), 1);
3278
3279      /* Finally, see if this is a simple operation with its first operand
3280	 not in a register.  The operation might require this operand in a
3281	 register, so return it as a split point.  We can always do this
3282	 because if the first operand were another operation, we would have
3283	 already found it as a split point.  */
3284      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3285	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3286	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
3287	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
3288	  && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3289	return &XEXP (SET_SRC (x), 0);
3290
3291      return 0;
3292
3293    case AND:
3294    case IOR:
3295      /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3296	 it is better to write this as (not (ior A B)) so we can split it.
3297	 Similarly for IOR.  */
3298      if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3299	{
3300	  SUBST (*loc,
3301		 gen_rtx_NOT (GET_MODE (x),
3302			      gen_rtx_fmt_ee (code == IOR ? AND : IOR,
3303					      GET_MODE (x),
3304					      XEXP (XEXP (x, 0), 0),
3305					      XEXP (XEXP (x, 1), 0))));
3306	  return find_split_point (loc, insn);
3307	}
3308
3309      /* Many RISC machines have a large set of logical insns.  If the
3310	 second operand is a NOT, put it first so we will try to split the
3311	 other operand first.  */
3312      if (GET_CODE (XEXP (x, 1)) == NOT)
3313	{
3314	  rtx tem = XEXP (x, 0);
3315	  SUBST (XEXP (x, 0), XEXP (x, 1));
3316	  SUBST (XEXP (x, 1), tem);
3317	}
3318      break;
3319
3320    default:
3321      break;
3322    }
3323
3324  /* Otherwise, select our actions depending on our rtx class.  */
3325  switch (GET_RTX_CLASS (code))
3326    {
3327    case 'b':			/* This is ZERO_EXTRACT and SIGN_EXTRACT.  */
3328    case '3':
3329      split = find_split_point (&XEXP (x, 2), insn);
3330      if (split)
3331	return split;
3332      /* ... fall through ...  */
3333    case '2':
3334    case 'c':
3335    case '<':
3336      split = find_split_point (&XEXP (x, 1), insn);
3337      if (split)
3338	return split;
3339      /* ... fall through ...  */
3340    case '1':
3341      /* Some machines have (and (shift ...) ...) insns.  If X is not
3342	 an AND, but XEXP (X, 0) is, use it as our split point.  */
3343      if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3344	return &XEXP (x, 0);
3345
3346      split = find_split_point (&XEXP (x, 0), insn);
3347      if (split)
3348	return split;
3349      return loc;
3350    }
3351
3352  /* Otherwise, we don't have a split point.  */
3353  return 0;
3354}
3355
3356/* Throughout X, replace FROM with TO, and return the result.
3357   The result is TO if X is FROM;
3358   otherwise the result is X, but its contents may have been modified.
3359   If they were modified, a record was made in undobuf so that
3360   undo_all will (among other things) return X to its original state.
3361
3362   If the number of changes necessary is too much to record to undo,
3363   the excess changes are not made, so the result is invalid.
3364   The changes already made can still be undone.
3365   undobuf.num_undo is incremented for such changes, so by testing that
3366   the caller can tell whether the result is valid.
3367
3368   `n_occurrences' is incremented each time FROM is replaced.
3369
3370   IN_DEST is nonzero if we are processing the SET_DEST of a SET.
3371
3372   UNIQUE_COPY is nonzero if each substitution must be unique.  We do this
3373   by copying if `n_occurrences' is nonzero.  */
3374
3375static rtx
3376subst (x, from, to, in_dest, unique_copy)
3377     rtx x, from, to;
3378     int in_dest;
3379     int unique_copy;
3380{
3381  enum rtx_code code = GET_CODE (x);
3382  enum machine_mode op0_mode = VOIDmode;
3383  const char *fmt;
3384  int len, i;
3385  rtx new;
3386
3387/* Two expressions are equal if they are identical copies of a shared
3388   RTX or if they are both registers with the same register number
3389   and mode.  */
3390
3391#define COMBINE_RTX_EQUAL_P(X,Y)			\
3392  ((X) == (Y)						\
3393   || (GET_CODE (X) == REG && GET_CODE (Y) == REG	\
3394       && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3395
3396  if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3397    {
3398      n_occurrences++;
3399      return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3400    }
3401
3402  /* If X and FROM are the same register but different modes, they will
3403     not have been seen as equal above.  However, flow.c will make a
3404     LOG_LINKS entry for that case.  If we do nothing, we will try to
3405     rerecognize our original insn and, when it succeeds, we will
3406     delete the feeding insn, which is incorrect.
3407
3408     So force this insn not to match in this (rare) case.  */
3409  if (! in_dest && code == REG && GET_CODE (from) == REG
3410      && REGNO (x) == REGNO (from))
3411    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
3412
3413  /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3414     of which may contain things that can be combined.  */
3415  if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3416    return x;
3417
3418  /* It is possible to have a subexpression appear twice in the insn.
3419     Suppose that FROM is a register that appears within TO.
3420     Then, after that subexpression has been scanned once by `subst',
3421     the second time it is scanned, TO may be found.  If we were
3422     to scan TO here, we would find FROM within it and create a
3423     self-referent rtl structure which is completely wrong.  */
3424  if (COMBINE_RTX_EQUAL_P (x, to))
3425    return to;
3426
3427  /* Parallel asm_operands need special attention because all of the
3428     inputs are shared across the arms.  Furthermore, unsharing the
3429     rtl results in recognition failures.  Failure to handle this case
3430     specially can result in circular rtl.
3431
3432     Solve this by doing a normal pass across the first entry of the
3433     parallel, and only processing the SET_DESTs of the subsequent
3434     entries.  Ug.  */
3435
3436  if (code == PARALLEL
3437      && GET_CODE (XVECEXP (x, 0, 0)) == SET
3438      && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
3439    {
3440      new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3441
3442      /* If this substitution failed, this whole thing fails.  */
3443      if (GET_CODE (new) == CLOBBER
3444	  && XEXP (new, 0) == const0_rtx)
3445	return new;
3446
3447      SUBST (XVECEXP (x, 0, 0), new);
3448
3449      for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
3450	{
3451	  rtx dest = SET_DEST (XVECEXP (x, 0, i));
3452
3453	  if (GET_CODE (dest) != REG
3454	      && GET_CODE (dest) != CC0
3455	      && GET_CODE (dest) != PC)
3456	    {
3457	      new = subst (dest, from, to, 0, unique_copy);
3458
3459	      /* If this substitution failed, this whole thing fails.  */
3460	      if (GET_CODE (new) == CLOBBER
3461		  && XEXP (new, 0) == const0_rtx)
3462		return new;
3463
3464	      SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
3465	    }
3466	}
3467    }
3468  else
3469    {
3470      len = GET_RTX_LENGTH (code);
3471      fmt = GET_RTX_FORMAT (code);
3472
3473      /* We don't need to process a SET_DEST that is a register, CC0,
3474	 or PC, so set up to skip this common case.  All other cases
3475	 where we want to suppress replacing something inside a
3476	 SET_SRC are handled via the IN_DEST operand.  */
3477      if (code == SET
3478	  && (GET_CODE (SET_DEST (x)) == REG
3479	      || GET_CODE (SET_DEST (x)) == CC0
3480	      || GET_CODE (SET_DEST (x)) == PC))
3481	fmt = "ie";
3482
3483      /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3484	 constant.  */
3485      if (fmt[0] == 'e')
3486	op0_mode = GET_MODE (XEXP (x, 0));
3487
3488      for (i = 0; i < len; i++)
3489	{
3490	  if (fmt[i] == 'E')
3491	    {
3492	      int j;
3493	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3494		{
3495		  if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3496		    {
3497		      new = (unique_copy && n_occurrences
3498			     ? copy_rtx (to) : to);
3499		      n_occurrences++;
3500		    }
3501		  else
3502		    {
3503		      new = subst (XVECEXP (x, i, j), from, to, 0,
3504				   unique_copy);
3505
3506		      /* If this substitution failed, this whole thing
3507			 fails.  */
3508		      if (GET_CODE (new) == CLOBBER
3509			  && XEXP (new, 0) == const0_rtx)
3510			return new;
3511		    }
3512
3513		  SUBST (XVECEXP (x, i, j), new);
3514		}
3515	    }
3516	  else if (fmt[i] == 'e')
3517	    {
3518	      /* If this is a register being set, ignore it.  */
3519	      new = XEXP (x, i);
3520	      if (in_dest
3521		  && (code == SUBREG || code == STRICT_LOW_PART
3522		      || code == ZERO_EXTRACT)
3523		  && i == 0
3524		  && GET_CODE (new) == REG)
3525		;
3526
3527	      else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3528		{
3529		  /* In general, don't install a subreg involving two
3530		     modes not tieable.  It can worsen register
3531		     allocation, and can even make invalid reload
3532		     insns, since the reg inside may need to be copied
3533		     from in the outside mode, and that may be invalid
3534		     if it is an fp reg copied in integer mode.
3535
3536		     We allow two exceptions to this: It is valid if
3537		     it is inside another SUBREG and the mode of that
3538		     SUBREG and the mode of the inside of TO is
3539		     tieable and it is valid if X is a SET that copies
3540		     FROM to CC0.  */
3541
3542		  if (GET_CODE (to) == SUBREG
3543		      && ! MODES_TIEABLE_P (GET_MODE (to),
3544					    GET_MODE (SUBREG_REG (to)))
3545		      && ! (code == SUBREG
3546			    && MODES_TIEABLE_P (GET_MODE (x),
3547						GET_MODE (SUBREG_REG (to))))
3548#ifdef HAVE_cc0
3549		      && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3550#endif
3551		      )
3552		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3553
3554#ifdef CANNOT_CHANGE_MODE_CLASS
3555		  if (code == SUBREG
3556		      && GET_CODE (to) == REG
3557		      && REGNO (to) < FIRST_PSEUDO_REGISTER
3558		      && REG_CANNOT_CHANGE_MODE_P (REGNO (to),
3559						   GET_MODE (to),
3560						   GET_MODE (x)))
3561		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3562#endif
3563
3564		  new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3565		  n_occurrences++;
3566		}
3567	      else
3568		/* If we are in a SET_DEST, suppress most cases unless we
3569		   have gone inside a MEM, in which case we want to
3570		   simplify the address.  We assume here that things that
3571		   are actually part of the destination have their inner
3572		   parts in the first expression.  This is true for SUBREG,
3573		   STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3574		   things aside from REG and MEM that should appear in a
3575		   SET_DEST.  */
3576		new = subst (XEXP (x, i), from, to,
3577			     (((in_dest
3578				&& (code == SUBREG || code == STRICT_LOW_PART
3579				    || code == ZERO_EXTRACT))
3580			       || code == SET)
3581			      && i == 0), unique_copy);
3582
3583	      /* If we found that we will have to reject this combination,
3584		 indicate that by returning the CLOBBER ourselves, rather than
3585		 an expression containing it.  This will speed things up as
3586		 well as prevent accidents where two CLOBBERs are considered
3587		 to be equal, thus producing an incorrect simplification.  */
3588
3589	      if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3590		return new;
3591
3592	      if (GET_CODE (new) == CONST_INT && GET_CODE (x) == SUBREG)
3593		{
3594		  enum machine_mode mode = GET_MODE (x);
3595
3596		  x = simplify_subreg (GET_MODE (x), new,
3597				       GET_MODE (SUBREG_REG (x)),
3598				       SUBREG_BYTE (x));
3599		  if (! x)
3600		    x = gen_rtx_CLOBBER (mode, const0_rtx);
3601		}
3602	      else if (GET_CODE (new) == CONST_INT
3603		       && GET_CODE (x) == ZERO_EXTEND)
3604		{
3605		  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
3606						new, GET_MODE (XEXP (x, 0)));
3607		  if (! x)
3608		    abort ();
3609		}
3610	      else
3611		SUBST (XEXP (x, i), new);
3612	    }
3613	}
3614    }
3615
3616  /* Try to simplify X.  If the simplification changed the code, it is likely
3617     that further simplification will help, so loop, but limit the number
3618     of repetitions that will be performed.  */
3619
3620  for (i = 0; i < 4; i++)
3621    {
3622      /* If X is sufficiently simple, don't bother trying to do anything
3623	 with it.  */
3624      if (code != CONST_INT && code != REG && code != CLOBBER)
3625	x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest);
3626
3627      if (GET_CODE (x) == code)
3628	break;
3629
3630      code = GET_CODE (x);
3631
3632      /* We no longer know the original mode of operand 0 since we
3633	 have changed the form of X)  */
3634      op0_mode = VOIDmode;
3635    }
3636
3637  return x;
3638}
3639
3640/* Simplify X, a piece of RTL.  We just operate on the expression at the
3641   outer level; call `subst' to simplify recursively.  Return the new
3642   expression.
3643
3644   OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3645   will be the iteration even if an expression with a code different from
3646   X is returned; IN_DEST is nonzero if we are inside a SET_DEST.  */
3647
3648static rtx
3649combine_simplify_rtx (x, op0_mode, last, in_dest)
3650     rtx x;
3651     enum machine_mode op0_mode;
3652     int last;
3653     int in_dest;
3654{
3655  enum rtx_code code = GET_CODE (x);
3656  enum machine_mode mode = GET_MODE (x);
3657  rtx temp;
3658  rtx reversed;
3659  int i;
3660
3661  /* If this is a commutative operation, put a constant last and a complex
3662     expression first.  We don't need to do this for comparisons here.  */
3663  if (GET_RTX_CLASS (code) == 'c'
3664      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
3665    {
3666      temp = XEXP (x, 0);
3667      SUBST (XEXP (x, 0), XEXP (x, 1));
3668      SUBST (XEXP (x, 1), temp);
3669    }
3670
3671  /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3672     sign extension of a PLUS with a constant, reverse the order of the sign
3673     extension and the addition. Note that this not the same as the original
3674     code, but overflow is undefined for signed values.  Also note that the
3675     PLUS will have been partially moved "inside" the sign-extension, so that
3676     the first operand of X will really look like:
3677         (ashiftrt (plus (ashift A C4) C5) C4).
3678     We convert this to
3679         (plus (ashiftrt (ashift A C4) C2) C4)
3680     and replace the first operand of X with that expression.  Later parts
3681     of this function may simplify the expression further.
3682
3683     For example, if we start with (mult (sign_extend (plus A C1)) C2),
3684     we swap the SIGN_EXTEND and PLUS.  Later code will apply the
3685     distributive law to produce (plus (mult (sign_extend X) C1) C3).
3686
3687     We do this to simplify address expressions.  */
3688
3689  if ((code == PLUS || code == MINUS || code == MULT)
3690      && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3691      && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3692      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3693      && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3694      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3695      && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3696      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3697      && (temp = simplify_binary_operation (ASHIFTRT, mode,
3698					    XEXP (XEXP (XEXP (x, 0), 0), 1),
3699					    XEXP (XEXP (x, 0), 1))) != 0)
3700    {
3701      rtx new
3702	= simplify_shift_const (NULL_RTX, ASHIFT, mode,
3703				XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3704				INTVAL (XEXP (XEXP (x, 0), 1)));
3705
3706      new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3707				  INTVAL (XEXP (XEXP (x, 0), 1)));
3708
3709      SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3710    }
3711
3712  /* If this is a simple operation applied to an IF_THEN_ELSE, try
3713     applying it to the arms of the IF_THEN_ELSE.  This often simplifies
3714     things.  Check for cases where both arms are testing the same
3715     condition.
3716
3717     Don't do anything if all operands are very simple.  */
3718
3719  if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3720	|| GET_RTX_CLASS (code) == '<')
3721       && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3722	    && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3723		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3724		      == 'o')))
3725	   || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3726	       && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3727		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3728			 == 'o')))))
3729      || (GET_RTX_CLASS (code) == '1'
3730	  && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3731	       && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3732		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3733			 == 'o'))))))
3734    {
3735      rtx cond, true_rtx, false_rtx;
3736
3737      cond = if_then_else_cond (x, &true_rtx, &false_rtx);
3738      if (cond != 0
3739	  /* If everything is a comparison, what we have is highly unlikely
3740	     to be simpler, so don't use it.  */
3741	  && ! (GET_RTX_CLASS (code) == '<'
3742		&& (GET_RTX_CLASS (GET_CODE (true_rtx)) == '<'
3743		    || GET_RTX_CLASS (GET_CODE (false_rtx)) == '<')))
3744	{
3745	  rtx cop1 = const0_rtx;
3746	  enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3747
3748	  if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3749	    return x;
3750
3751	  /* Simplify the alternative arms; this may collapse the true and
3752	     false arms to store-flag values.  */
3753	  true_rtx = subst (true_rtx, pc_rtx, pc_rtx, 0, 0);
3754	  false_rtx = subst (false_rtx, pc_rtx, pc_rtx, 0, 0);
3755
3756	  /* If true_rtx and false_rtx are not general_operands, an if_then_else
3757	     is unlikely to be simpler.  */
3758	  if (general_operand (true_rtx, VOIDmode)
3759	      && general_operand (false_rtx, VOIDmode))
3760	    {
3761	      /* Restarting if we generate a store-flag expression will cause
3762		 us to loop.  Just drop through in this case.  */
3763
3764	      /* If the result values are STORE_FLAG_VALUE and zero, we can
3765		 just make the comparison operation.  */
3766	      if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
3767		x = gen_binary (cond_code, mode, cond, cop1);
3768	      else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
3769		       && reverse_condition (cond_code) != UNKNOWN)
3770		x = gen_binary (reverse_condition (cond_code),
3771				mode, cond, cop1);
3772
3773	      /* Likewise, we can make the negate of a comparison operation
3774		 if the result values are - STORE_FLAG_VALUE and zero.  */
3775	      else if (GET_CODE (true_rtx) == CONST_INT
3776		       && INTVAL (true_rtx) == - STORE_FLAG_VALUE
3777		       && false_rtx == const0_rtx)
3778		x = simplify_gen_unary (NEG, mode,
3779					gen_binary (cond_code, mode, cond,
3780						    cop1),
3781					mode);
3782	      else if (GET_CODE (false_rtx) == CONST_INT
3783		       && INTVAL (false_rtx) == - STORE_FLAG_VALUE
3784		       && true_rtx == const0_rtx)
3785		x = simplify_gen_unary (NEG, mode,
3786					gen_binary (reverse_condition
3787						    (cond_code),
3788						    mode, cond, cop1),
3789					mode);
3790	      else
3791		return gen_rtx_IF_THEN_ELSE (mode,
3792					     gen_binary (cond_code, VOIDmode,
3793							 cond, cop1),
3794					     true_rtx, false_rtx);
3795
3796	      code = GET_CODE (x);
3797	      op0_mode = VOIDmode;
3798	    }
3799	}
3800    }
3801
3802  /* Try to fold this expression in case we have constants that weren't
3803     present before.  */
3804  temp = 0;
3805  switch (GET_RTX_CLASS (code))
3806    {
3807    case '1':
3808      temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3809      break;
3810    case '<':
3811      {
3812	enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
3813	if (cmp_mode == VOIDmode)
3814	  {
3815	    cmp_mode = GET_MODE (XEXP (x, 1));
3816	    if (cmp_mode == VOIDmode)
3817	      cmp_mode = op0_mode;
3818	  }
3819	temp = simplify_relational_operation (code, cmp_mode,
3820					      XEXP (x, 0), XEXP (x, 1));
3821      }
3822#ifdef FLOAT_STORE_FLAG_VALUE
3823      if (temp != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3824	{
3825	  if (temp == const0_rtx)
3826	    temp = CONST0_RTX (mode);
3827	  else
3828	    temp = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE (mode),
3829						 mode);
3830	}
3831#endif
3832      break;
3833    case 'c':
3834    case '2':
3835      temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3836      break;
3837    case 'b':
3838    case '3':
3839      temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3840					 XEXP (x, 1), XEXP (x, 2));
3841      break;
3842    }
3843
3844  if (temp)
3845    {
3846      x = temp;
3847      code = GET_CODE (temp);
3848      op0_mode = VOIDmode;
3849      mode = GET_MODE (temp);
3850    }
3851
3852  /* First see if we can apply the inverse distributive law.  */
3853  if (code == PLUS || code == MINUS
3854      || code == AND || code == IOR || code == XOR)
3855    {
3856      x = apply_distributive_law (x);
3857      code = GET_CODE (x);
3858      op0_mode = VOIDmode;
3859    }
3860
3861  /* If CODE is an associative operation not otherwise handled, see if we
3862     can associate some operands.  This can win if they are constants or
3863     if they are logically related (i.e. (a & b) & a).  */
3864  if ((code == PLUS || code == MINUS || code == MULT || code == DIV
3865       || code == AND || code == IOR || code == XOR
3866       || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3867      && ((INTEGRAL_MODE_P (mode) && code != DIV)
3868	  || (flag_unsafe_math_optimizations && FLOAT_MODE_P (mode))))
3869    {
3870      if (GET_CODE (XEXP (x, 0)) == code)
3871	{
3872	  rtx other = XEXP (XEXP (x, 0), 0);
3873	  rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3874	  rtx inner_op1 = XEXP (x, 1);
3875	  rtx inner;
3876
3877#ifndef FRAME_GROWS_DOWNWARD
3878	  if (flag_propolice_protection
3879	      && code == PLUS
3880	      && other == frame_pointer_rtx
3881	      && GET_CODE (inner_op0) == CONST_INT
3882	      && GET_CODE (inner_op1) == CONST_INT
3883	      && INTVAL (inner_op0) > 0
3884	      && INTVAL (inner_op0) + INTVAL (inner_op1) <= 0)
3885	    return x;
3886#endif
3887	  /* Make sure we pass the constant operand if any as the second
3888	     one if this is a commutative operation.  */
3889	  if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3890	    {
3891	      rtx tem = inner_op0;
3892	      inner_op0 = inner_op1;
3893	      inner_op1 = tem;
3894	    }
3895	  inner = simplify_binary_operation (code == MINUS ? PLUS
3896					     : code == DIV ? MULT
3897					     : code,
3898					     mode, inner_op0, inner_op1);
3899
3900	  /* For commutative operations, try the other pair if that one
3901	     didn't simplify.  */
3902	  if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3903	    {
3904	      other = XEXP (XEXP (x, 0), 1);
3905	      inner = simplify_binary_operation (code, mode,
3906						 XEXP (XEXP (x, 0), 0),
3907						 XEXP (x, 1));
3908	    }
3909
3910	  if (inner)
3911	    return gen_binary (code, mode, other, inner);
3912	}
3913    }
3914
3915  /* A little bit of algebraic simplification here.  */
3916  switch (code)
3917    {
3918    case MEM:
3919      /* Ensure that our address has any ASHIFTs converted to MULT in case
3920	 address-recognizing predicates are called later.  */
3921      temp = make_compound_operation (XEXP (x, 0), MEM);
3922      SUBST (XEXP (x, 0), temp);
3923      break;
3924
3925    case SUBREG:
3926      if (op0_mode == VOIDmode)
3927	op0_mode = GET_MODE (SUBREG_REG (x));
3928
3929      /* simplify_subreg can't use gen_lowpart_for_combine.  */
3930      if (CONSTANT_P (SUBREG_REG (x))
3931	  && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
3932	     /* Don't call gen_lowpart_for_combine if the inner mode
3933		is VOIDmode and we cannot simplify it, as SUBREG without
3934		inner mode is invalid.  */
3935	  && (GET_MODE (SUBREG_REG (x)) != VOIDmode
3936	      || gen_lowpart_common (mode, SUBREG_REG (x))))
3937	return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3938
3939      if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
3940        break;
3941      {
3942	rtx temp;
3943	temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
3944				SUBREG_BYTE (x));
3945	if (temp)
3946	  return temp;
3947      }
3948
3949      /* Don't change the mode of the MEM if that would change the meaning
3950	 of the address.  */
3951      if (GET_CODE (SUBREG_REG (x)) == MEM
3952	  && (MEM_VOLATILE_P (SUBREG_REG (x))
3953	      || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
3954	return gen_rtx_CLOBBER (mode, const0_rtx);
3955
3956      /* Note that we cannot do any narrowing for non-constants since
3957	 we might have been counting on using the fact that some bits were
3958	 zero.  We now do this in the SET.  */
3959
3960      break;
3961
3962    case NOT:
3963      /* (not (plus X -1)) can become (neg X).  */
3964      if (GET_CODE (XEXP (x, 0)) == PLUS
3965	  && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3966	return gen_rtx_NEG (mode, XEXP (XEXP (x, 0), 0));
3967
3968      /* Similarly, (not (neg X)) is (plus X -1).  */
3969      if (GET_CODE (XEXP (x, 0)) == NEG)
3970	return gen_rtx_PLUS (mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3971
3972      /* (not (xor X C)) for C constant is (xor X D) with D = ~C.  */
3973      if (GET_CODE (XEXP (x, 0)) == XOR
3974	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3975	  && (temp = simplify_unary_operation (NOT, mode,
3976					       XEXP (XEXP (x, 0), 1),
3977					       mode)) != 0)
3978	return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
3979
3980      /* (not (ashift 1 X)) is (rotate ~1 X).  We used to do this for operands
3981	 other than 1, but that is not valid.  We could do a similar
3982	 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3983	 but this doesn't seem common enough to bother with.  */
3984      if (GET_CODE (XEXP (x, 0)) == ASHIFT
3985	  && XEXP (XEXP (x, 0), 0) == const1_rtx)
3986	return gen_rtx_ROTATE (mode, simplify_gen_unary (NOT, mode,
3987							 const1_rtx, mode),
3988			       XEXP (XEXP (x, 0), 1));
3989
3990      if (GET_CODE (XEXP (x, 0)) == SUBREG
3991	  && subreg_lowpart_p (XEXP (x, 0))
3992	  && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3993	      < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3994	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3995	  && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3996	{
3997	  enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3998
3999	  x = gen_rtx_ROTATE (inner_mode,
4000			      simplify_gen_unary (NOT, inner_mode, const1_rtx,
4001						  inner_mode),
4002			      XEXP (SUBREG_REG (XEXP (x, 0)), 1));
4003	  return gen_lowpart_for_combine (mode, x);
4004	}
4005
4006      /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
4007	 reversing the comparison code if valid.  */
4008      if (STORE_FLAG_VALUE == -1
4009	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4010	  && (reversed = reversed_comparison (x, mode, XEXP (XEXP (x, 0), 0),
4011					      XEXP (XEXP (x, 0), 1))))
4012	return reversed;
4013
4014      /* (not (ashiftrt foo C)) where C is the number of bits in FOO minus 1
4015	 is (ge foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
4016	 perform the above simplification.  */
4017
4018      if (STORE_FLAG_VALUE == -1
4019	  && GET_CODE (XEXP (x, 0)) == ASHIFTRT
4020	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4021	  && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
4022	return gen_rtx_GE (mode, XEXP (XEXP (x, 0), 0), const0_rtx);
4023
4024      /* Apply De Morgan's laws to reduce number of patterns for machines
4025	 with negating logical insns (and-not, nand, etc.).  If result has
4026	 only one NOT, put it first, since that is how the patterns are
4027	 coded.  */
4028
4029      if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
4030	{
4031	  rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
4032	  enum machine_mode op_mode;
4033
4034	  op_mode = GET_MODE (in1);
4035	  in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
4036
4037	  op_mode = GET_MODE (in2);
4038	  if (op_mode == VOIDmode)
4039	    op_mode = mode;
4040	  in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
4041
4042	  if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
4043	    {
4044	      rtx tem = in2;
4045	      in2 = in1; in1 = tem;
4046	    }
4047
4048	  return gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
4049				 mode, in1, in2);
4050	}
4051      break;
4052
4053    case NEG:
4054      /* (neg (plus X 1)) can become (not X).  */
4055      if (GET_CODE (XEXP (x, 0)) == PLUS
4056	  && XEXP (XEXP (x, 0), 1) == const1_rtx)
4057	return gen_rtx_NOT (mode, XEXP (XEXP (x, 0), 0));
4058
4059      /* Similarly, (neg (not X)) is (plus X 1).  */
4060      if (GET_CODE (XEXP (x, 0)) == NOT)
4061	return plus_constant (XEXP (XEXP (x, 0), 0), 1);
4062
4063      /* (neg (minus X Y)) can become (minus Y X).  This transformation
4064	 isn't safe for modes with signed zeros, since if X and Y are
4065	 both +0, (minus Y X) is the same as (minus X Y).  If the rounding
4066	 mode is towards +infinity (or -infinity) then the two expressions
4067	 will be rounded differently.  */
4068      if (GET_CODE (XEXP (x, 0)) == MINUS
4069	  && !HONOR_SIGNED_ZEROS (mode)
4070	  && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
4071	return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
4072			   XEXP (XEXP (x, 0), 0));
4073
4074      /* (neg (plus A B)) is canonicalized to (minus (neg A) B).  */
4075      if (GET_CODE (XEXP (x, 0)) == PLUS
4076	  && !HONOR_SIGNED_ZEROS (mode)
4077	  && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
4078	{
4079	  temp = simplify_gen_unary (NEG, mode, XEXP (XEXP (x, 0), 0), mode);
4080	  temp = combine_simplify_rtx (temp, mode, last, in_dest);
4081	  return gen_binary (MINUS, mode, temp, XEXP (XEXP (x, 0), 1));
4082	}
4083
4084      /* (neg (mult A B)) becomes (mult (neg A) B).
4085         This works even for floating-point values.  */
4086      if (GET_CODE (XEXP (x, 0)) == MULT)
4087	{
4088	  temp = simplify_gen_unary (NEG, mode, XEXP (XEXP (x, 0), 0), mode);
4089	  return gen_binary (MULT, mode, temp, XEXP (XEXP (x, 0), 1));
4090	}
4091
4092      /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1.  */
4093      if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
4094	  && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
4095	return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
4096
4097      /* NEG commutes with ASHIFT since it is multiplication.  Only do this
4098	 if we can then eliminate the NEG (e.g.,
4099	 if the operand is a constant).  */
4100
4101      if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4102	{
4103	  temp = simplify_unary_operation (NEG, mode,
4104					   XEXP (XEXP (x, 0), 0), mode);
4105	  if (temp)
4106	    return gen_binary (ASHIFT, mode, temp, XEXP (XEXP (x, 0), 1));
4107	}
4108
4109      temp = expand_compound_operation (XEXP (x, 0));
4110
4111      /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
4112	 replaced by (lshiftrt X C).  This will convert
4113	 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y).  */
4114
4115      if (GET_CODE (temp) == ASHIFTRT
4116	  && GET_CODE (XEXP (temp, 1)) == CONST_INT
4117	  && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
4118	return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
4119				     INTVAL (XEXP (temp, 1)));
4120
4121      /* If X has only a single bit that might be nonzero, say, bit I, convert
4122	 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
4123	 MODE minus 1.  This will convert (neg (zero_extract X 1 Y)) to
4124	 (sign_extract X 1 Y).  But only do this if TEMP isn't a register
4125	 or a SUBREG of one since we'd be making the expression more
4126	 complex if it was just a register.  */
4127
4128      if (GET_CODE (temp) != REG
4129	  && ! (GET_CODE (temp) == SUBREG
4130		&& GET_CODE (SUBREG_REG (temp)) == REG)
4131	  && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
4132	{
4133	  rtx temp1 = simplify_shift_const
4134	    (NULL_RTX, ASHIFTRT, mode,
4135	     simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
4136				   GET_MODE_BITSIZE (mode) - 1 - i),
4137	     GET_MODE_BITSIZE (mode) - 1 - i);
4138
4139	  /* If all we did was surround TEMP with the two shifts, we
4140	     haven't improved anything, so don't use it.  Otherwise,
4141	     we are better off with TEMP1.  */
4142	  if (GET_CODE (temp1) != ASHIFTRT
4143	      || GET_CODE (XEXP (temp1, 0)) != ASHIFT
4144	      || XEXP (XEXP (temp1, 0), 0) != temp)
4145	    return temp1;
4146	}
4147      break;
4148
4149    case TRUNCATE:
4150      /* We can't handle truncation to a partial integer mode here
4151	 because we don't know the real bitsize of the partial
4152	 integer mode.  */
4153      if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
4154	break;
4155
4156      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4157	  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4158				    GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4159	SUBST (XEXP (x, 0),
4160	       force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
4161			      GET_MODE_MASK (mode), NULL_RTX, 0));
4162
4163      /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI.  */
4164      if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4165	   || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4166	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4167	return XEXP (XEXP (x, 0), 0);
4168
4169      /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
4170	 (OP:SI foo:SI) if OP is NEG or ABS.  */
4171      if ((GET_CODE (XEXP (x, 0)) == ABS
4172	   || GET_CODE (XEXP (x, 0)) == NEG)
4173	  && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
4174	      || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
4175	  && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4176	return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4177				   XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4178
4179      /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
4180	 (truncate:SI x).  */
4181      if (GET_CODE (XEXP (x, 0)) == SUBREG
4182	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
4183	  && subreg_lowpart_p (XEXP (x, 0)))
4184	return SUBREG_REG (XEXP (x, 0));
4185
4186      /* If we know that the value is already truncated, we can
4187         replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
4188         is nonzero for the corresponding modes.  But don't do this
4189         for an (LSHIFTRT (MULT ...)) since this will cause problems
4190         with the umulXi3_highpart patterns.  */
4191      if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4192				 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4193	  && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4194	     >= (unsigned int) (GET_MODE_BITSIZE (mode) + 1)
4195	  && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4196		&& GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
4197	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4198
4199      /* A truncate of a comparison can be replaced with a subreg if
4200         STORE_FLAG_VALUE permits.  This is like the previous test,
4201         but it works even if the comparison is done in a mode larger
4202         than HOST_BITS_PER_WIDE_INT.  */
4203      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4204	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4205	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
4206	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4207
4208      /* Similarly, a truncate of a register whose value is a
4209         comparison can be replaced with a subreg if STORE_FLAG_VALUE
4210         permits.  */
4211      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4212	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
4213	  && (temp = get_last_value (XEXP (x, 0)))
4214	  && GET_RTX_CLASS (GET_CODE (temp)) == '<')
4215	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4216
4217      break;
4218
4219    case FLOAT_TRUNCATE:
4220      /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF.  */
4221      if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4222	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4223	return XEXP (XEXP (x, 0), 0);
4224
4225      /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4226	 (OP:SF foo:SF) if OP is NEG or ABS.  */
4227      if ((GET_CODE (XEXP (x, 0)) == ABS
4228	   || GET_CODE (XEXP (x, 0)) == NEG)
4229	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4230	  && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4231	return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4232				   XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4233
4234      /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4235	 is (float_truncate:SF x).  */
4236      if (GET_CODE (XEXP (x, 0)) == SUBREG
4237	  && subreg_lowpart_p (XEXP (x, 0))
4238	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4239	return SUBREG_REG (XEXP (x, 0));
4240      break;
4241
4242#ifdef HAVE_cc0
4243    case COMPARE:
4244      /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4245	 using cc0, in which case we want to leave it as a COMPARE
4246	 so we can distinguish it from a register-register-copy.  */
4247      if (XEXP (x, 1) == const0_rtx)
4248	return XEXP (x, 0);
4249
4250      /* x - 0 is the same as x unless x's mode has signed zeros and
4251	 allows rounding towards -infinity.  Under those conditions,
4252	 0 - 0 is -0.  */
4253      if (!(HONOR_SIGNED_ZEROS (GET_MODE (XEXP (x, 0)))
4254	    && HONOR_SIGN_DEPENDENT_ROUNDING (GET_MODE (XEXP (x, 0))))
4255	  && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4256	return XEXP (x, 0);
4257      break;
4258#endif
4259
4260    case CONST:
4261      /* (const (const X)) can become (const X).  Do it this way rather than
4262	 returning the inner CONST since CONST can be shared with a
4263	 REG_EQUAL note.  */
4264      if (GET_CODE (XEXP (x, 0)) == CONST)
4265	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4266      break;
4267
4268#ifdef HAVE_lo_sum
4269    case LO_SUM:
4270      /* Convert (lo_sum (high FOO) FOO) to FOO.  This is necessary so we
4271	 can add in an offset.  find_split_point will split this address up
4272	 again if it doesn't match.  */
4273      if (GET_CODE (XEXP (x, 0)) == HIGH
4274	  && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4275	return XEXP (x, 1);
4276      break;
4277#endif
4278
4279    case PLUS:
4280      /* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)).
4281       */
4282      if (GET_CODE (XEXP (x, 0)) == MULT
4283	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == NEG)
4284	{
4285	  rtx in1, in2;
4286
4287	  in1 = XEXP (XEXP (XEXP (x, 0), 0), 0);
4288	  in2 = XEXP (XEXP (x, 0), 1);
4289	  return gen_binary (MINUS, mode, XEXP (x, 1),
4290			     gen_binary (MULT, mode, in1, in2));
4291	}
4292
4293      /* If we have (plus (plus (A const) B)), associate it so that CONST is
4294	 outermost.  That's because that's the way indexed addresses are
4295	 supposed to appear.  This code used to check many more cases, but
4296	 they are now checked elsewhere.  */
4297      if (GET_CODE (XEXP (x, 0)) == PLUS
4298	  && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4299#ifndef FRAME_GROWS_DOWNWARD
4300	if (! (flag_propolice_protection
4301	       && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx
4302	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4303#endif
4304	return gen_binary (PLUS, mode,
4305			   gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4306				       XEXP (x, 1)),
4307			   XEXP (XEXP (x, 0), 1));
4308
4309      /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4310	 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4311	 bit-field and can be replaced by either a sign_extend or a
4312	 sign_extract.  The `and' may be a zero_extend and the two
4313	 <c>, -<c> constants may be reversed.  */
4314      if (GET_CODE (XEXP (x, 0)) == XOR
4315	  && GET_CODE (XEXP (x, 1)) == CONST_INT
4316	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4317	  && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
4318	  && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4319	      || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4320	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4321	  && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4322	       && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4323	       && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4324		   == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
4325	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4326		  && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
4327		      == (unsigned int) i + 1))))
4328	return simplify_shift_const
4329	  (NULL_RTX, ASHIFTRT, mode,
4330	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
4331				 XEXP (XEXP (XEXP (x, 0), 0), 0),
4332				 GET_MODE_BITSIZE (mode) - (i + 1)),
4333	   GET_MODE_BITSIZE (mode) - (i + 1));
4334
4335      /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4336	 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4337	 is 1.  This produces better code than the alternative immediately
4338	 below.  */
4339      if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4340	  && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
4341	      || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))
4342	  && (reversed = reversed_comparison (XEXP (x, 0), mode,
4343					      XEXP (XEXP (x, 0), 0),
4344					      XEXP (XEXP (x, 0), 1))))
4345	return
4346	  simplify_gen_unary (NEG, mode, reversed, mode);
4347
4348      /* If only the low-order bit of X is possibly nonzero, (plus x -1)
4349	 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4350	 the bitsize of the mode - 1.  This allows simplification of
4351	 "a = (b & 8) == 0;"  */
4352      if (XEXP (x, 1) == constm1_rtx
4353	  && GET_CODE (XEXP (x, 0)) != REG
4354	  && ! (GET_CODE (XEXP (x,0)) == SUBREG
4355		&& GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
4356	  && nonzero_bits (XEXP (x, 0), mode) == 1)
4357	return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4358	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
4359				 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
4360				 GET_MODE_BITSIZE (mode) - 1),
4361	   GET_MODE_BITSIZE (mode) - 1);
4362
4363      /* If we are adding two things that have no bits in common, convert
4364	 the addition into an IOR.  This will often be further simplified,
4365	 for example in cases like ((a & 1) + (a & 2)), which can
4366	 become a & 3.  */
4367
4368      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4369	  && (nonzero_bits (XEXP (x, 0), mode)
4370	      & nonzero_bits (XEXP (x, 1), mode)) == 0)
4371	{
4372	  /* Try to simplify the expression further.  */
4373	  rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4374	  temp = combine_simplify_rtx (tor, mode, last, in_dest);
4375
4376	  /* If we could, great.  If not, do not go ahead with the IOR
4377	     replacement, since PLUS appears in many special purpose
4378	     address arithmetic instructions.  */
4379	  if (GET_CODE (temp) != CLOBBER && temp != tor)
4380	    return temp;
4381	}
4382      break;
4383
4384    case MINUS:
4385      /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4386	 by reversing the comparison code if valid.  */
4387      if (STORE_FLAG_VALUE == 1
4388	  && XEXP (x, 0) == const1_rtx
4389	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
4390	  && (reversed = reversed_comparison (XEXP (x, 1), mode,
4391					      XEXP (XEXP (x, 1), 0),
4392					      XEXP (XEXP (x, 1), 1))))
4393	return reversed;
4394
4395      /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4396	 (and <foo> (const_int pow2-1))  */
4397      if (GET_CODE (XEXP (x, 1)) == AND
4398	  && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4399	  && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4400	  && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4401	return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4402				       -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4403
4404      /* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A).
4405       */
4406      if (GET_CODE (XEXP (x, 1)) == MULT
4407	  && GET_CODE (XEXP (XEXP (x, 1), 0)) == NEG)
4408	{
4409	  rtx in1, in2;
4410
4411	  in1 = XEXP (XEXP (XEXP (x, 1), 0), 0);
4412	  in2 = XEXP (XEXP (x, 1), 1);
4413	  return gen_binary (PLUS, mode, gen_binary (MULT, mode, in1, in2),
4414			     XEXP (x, 0));
4415	}
4416
4417       /* Canonicalize (minus (neg A) (mult B C)) to
4418	  (minus (mult (neg B) C) A). */
4419      if (GET_CODE (XEXP (x, 1)) == MULT
4420	  && GET_CODE (XEXP (x, 0)) == NEG)
4421	{
4422	  rtx in1, in2;
4423
4424	  in1 = simplify_gen_unary (NEG, mode, XEXP (XEXP (x, 1), 0), mode);
4425	  in2 = XEXP (XEXP (x, 1), 1);
4426	  return gen_binary (MINUS, mode, gen_binary (MULT, mode, in1, in2),
4427			     XEXP (XEXP (x, 0), 0));
4428	}
4429
4430      /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4431	 integers.  */
4432      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode)
4433	  && (! (flag_propolice_protection
4434		 && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
4435		 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)))
4436	return gen_binary (MINUS, mode,
4437			   gen_binary (MINUS, mode, XEXP (x, 0),
4438				       XEXP (XEXP (x, 1), 0)),
4439			   XEXP (XEXP (x, 1), 1));
4440      break;
4441
4442    case MULT:
4443      /* If we have (mult (plus A B) C), apply the distributive law and then
4444	 the inverse distributive law to see if things simplify.  This
4445	 occurs mostly in addresses, often when unrolling loops.  */
4446
4447      if (GET_CODE (XEXP (x, 0)) == PLUS)
4448	{
4449	  x = apply_distributive_law
4450	    (gen_binary (PLUS, mode,
4451			 gen_binary (MULT, mode,
4452				     XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4453			 gen_binary (MULT, mode,
4454				     XEXP (XEXP (x, 0), 1),
4455				     copy_rtx (XEXP (x, 1)))));
4456
4457	  if (GET_CODE (x) != MULT)
4458	    return x;
4459	}
4460      /* Try simplify a*(b/c) as (a*b)/c.  */
4461      if (FLOAT_MODE_P (mode) && flag_unsafe_math_optimizations
4462	  && GET_CODE (XEXP (x, 0)) == DIV)
4463	{
4464	  rtx tem = simplify_binary_operation (MULT, mode,
4465					       XEXP (XEXP (x, 0), 0),
4466					       XEXP (x, 1));
4467	  if (tem)
4468	    return gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
4469	}
4470      break;
4471
4472    case UDIV:
4473      /* If this is a divide by a power of two, treat it as a shift if
4474	 its first operand is a shift.  */
4475      if (GET_CODE (XEXP (x, 1)) == CONST_INT
4476	  && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4477	  && (GET_CODE (XEXP (x, 0)) == ASHIFT
4478	      || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4479	      || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4480	      || GET_CODE (XEXP (x, 0)) == ROTATE
4481	      || GET_CODE (XEXP (x, 0)) == ROTATERT))
4482	return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4483      break;
4484
4485    case EQ:  case NE:
4486    case GT:  case GTU:  case GE:  case GEU:
4487    case LT:  case LTU:  case LE:  case LEU:
4488    case UNEQ:  case LTGT:
4489    case UNGT:  case UNGE:
4490    case UNLT:  case UNLE:
4491    case UNORDERED: case ORDERED:
4492      /* If the first operand is a condition code, we can't do anything
4493	 with it.  */
4494      if (GET_CODE (XEXP (x, 0)) == COMPARE
4495	  || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4496#ifdef HAVE_cc0
4497	      && XEXP (x, 0) != cc0_rtx
4498#endif
4499	      ))
4500	{
4501	  rtx op0 = XEXP (x, 0);
4502	  rtx op1 = XEXP (x, 1);
4503	  enum rtx_code new_code;
4504
4505	  if (GET_CODE (op0) == COMPARE)
4506	    op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4507
4508	  /* Simplify our comparison, if possible.  */
4509	  new_code = simplify_comparison (code, &op0, &op1);
4510
4511	  /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4512	     if only the low-order bit is possibly nonzero in X (such as when
4513	     X is a ZERO_EXTRACT of one bit).  Similarly, we can convert EQ to
4514	     (xor X 1) or (minus 1 X); we use the former.  Finally, if X is
4515	     known to be either 0 or -1, NE becomes a NEG and EQ becomes
4516	     (plus X 1).
4517
4518	     Remove any ZERO_EXTRACT we made when thinking this was a
4519	     comparison.  It may now be simpler to use, e.g., an AND.  If a
4520	     ZERO_EXTRACT is indeed appropriate, it will be placed back by
4521	     the call to make_compound_operation in the SET case.  */
4522
4523	  if (STORE_FLAG_VALUE == 1
4524	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4525	      && op1 == const0_rtx
4526	      && mode == GET_MODE (op0)
4527	      && nonzero_bits (op0, mode) == 1)
4528	    return gen_lowpart_for_combine (mode,
4529					    expand_compound_operation (op0));
4530
4531	  else if (STORE_FLAG_VALUE == 1
4532		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4533		   && op1 == const0_rtx
4534		   && mode == GET_MODE (op0)
4535		   && (num_sign_bit_copies (op0, mode)
4536		       == GET_MODE_BITSIZE (mode)))
4537	    {
4538	      op0 = expand_compound_operation (op0);
4539	      return simplify_gen_unary (NEG, mode,
4540					 gen_lowpart_for_combine (mode, op0),
4541					 mode);
4542	    }
4543
4544	  else if (STORE_FLAG_VALUE == 1
4545		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4546		   && op1 == const0_rtx
4547		   && mode == GET_MODE (op0)
4548		   && nonzero_bits (op0, mode) == 1)
4549	    {
4550	      op0 = expand_compound_operation (op0);
4551	      return gen_binary (XOR, mode,
4552				 gen_lowpart_for_combine (mode, op0),
4553				 const1_rtx);
4554	    }
4555
4556	  else if (STORE_FLAG_VALUE == 1
4557		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4558		   && op1 == const0_rtx
4559		   && mode == GET_MODE (op0)
4560		   && (num_sign_bit_copies (op0, mode)
4561		       == GET_MODE_BITSIZE (mode)))
4562	    {
4563	      op0 = expand_compound_operation (op0);
4564	      return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
4565	    }
4566
4567	  /* If STORE_FLAG_VALUE is -1, we have cases similar to
4568	     those above.  */
4569	  if (STORE_FLAG_VALUE == -1
4570	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4571	      && op1 == const0_rtx
4572	      && (num_sign_bit_copies (op0, mode)
4573		  == GET_MODE_BITSIZE (mode)))
4574	    return gen_lowpart_for_combine (mode,
4575					    expand_compound_operation (op0));
4576
4577	  else if (STORE_FLAG_VALUE == -1
4578		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4579		   && op1 == const0_rtx
4580		   && mode == GET_MODE (op0)
4581		   && nonzero_bits (op0, mode) == 1)
4582	    {
4583	      op0 = expand_compound_operation (op0);
4584	      return simplify_gen_unary (NEG, mode,
4585					 gen_lowpart_for_combine (mode, op0),
4586					 mode);
4587	    }
4588
4589	  else if (STORE_FLAG_VALUE == -1
4590		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4591		   && op1 == const0_rtx
4592		   && mode == GET_MODE (op0)
4593		   && (num_sign_bit_copies (op0, mode)
4594		       == GET_MODE_BITSIZE (mode)))
4595	    {
4596	      op0 = expand_compound_operation (op0);
4597	      return simplify_gen_unary (NOT, mode,
4598					 gen_lowpart_for_combine (mode, op0),
4599					 mode);
4600	    }
4601
4602	  /* If X is 0/1, (eq X 0) is X-1.  */
4603	  else if (STORE_FLAG_VALUE == -1
4604		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4605		   && op1 == const0_rtx
4606		   && mode == GET_MODE (op0)
4607		   && nonzero_bits (op0, mode) == 1)
4608	    {
4609	      op0 = expand_compound_operation (op0);
4610	      return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
4611	    }
4612
4613	  /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4614	     one bit that might be nonzero, we can convert (ne x 0) to
4615	     (ashift x c) where C puts the bit in the sign bit.  Remove any
4616	     AND with STORE_FLAG_VALUE when we are done, since we are only
4617	     going to test the sign bit.  */
4618	  if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4619	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4620	      && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4621		  == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE(mode)-1))
4622	      && op1 == const0_rtx
4623	      && mode == GET_MODE (op0)
4624	      && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4625	    {
4626	      x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4627					expand_compound_operation (op0),
4628					GET_MODE_BITSIZE (mode) - 1 - i);
4629	      if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4630		return XEXP (x, 0);
4631	      else
4632		return x;
4633	    }
4634
4635	  /* If the code changed, return a whole new comparison.  */
4636	  if (new_code != code)
4637	    return gen_rtx_fmt_ee (new_code, mode, op0, op1);
4638
4639	  /* Otherwise, keep this operation, but maybe change its operands.
4640	     This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR).  */
4641	  SUBST (XEXP (x, 0), op0);
4642	  SUBST (XEXP (x, 1), op1);
4643	}
4644      break;
4645
4646    case IF_THEN_ELSE:
4647      return simplify_if_then_else (x);
4648
4649    case ZERO_EXTRACT:
4650    case SIGN_EXTRACT:
4651    case ZERO_EXTEND:
4652    case SIGN_EXTEND:
4653      /* If we are processing SET_DEST, we are done.  */
4654      if (in_dest)
4655	return x;
4656
4657      return expand_compound_operation (x);
4658
4659    case SET:
4660      return simplify_set (x);
4661
4662    case AND:
4663    case IOR:
4664    case XOR:
4665      return simplify_logical (x, last);
4666
4667    case ABS:
4668      /* (abs (neg <foo>)) -> (abs <foo>) */
4669      if (GET_CODE (XEXP (x, 0)) == NEG)
4670	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4671
4672      /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4673         do nothing.  */
4674      if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4675	break;
4676
4677      /* If operand is something known to be positive, ignore the ABS.  */
4678      if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4679	  || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4680	       <= HOST_BITS_PER_WIDE_INT)
4681	      && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4682		   & ((HOST_WIDE_INT) 1
4683		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4684		  == 0)))
4685	return XEXP (x, 0);
4686
4687      /* If operand is known to be only -1 or 0, convert ABS to NEG.  */
4688      if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4689	return gen_rtx_NEG (mode, XEXP (x, 0));
4690
4691      break;
4692
4693    case FFS:
4694      /* (ffs (*_extend <X>)) = (ffs <X>) */
4695      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4696	  || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4697	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4698      break;
4699
4700    case FLOAT:
4701      /* (float (sign_extend <X>)) = (float <X>).  */
4702      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4703	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4704      break;
4705
4706    case ASHIFT:
4707    case LSHIFTRT:
4708    case ASHIFTRT:
4709    case ROTATE:
4710    case ROTATERT:
4711      /* If this is a shift by a constant amount, simplify it.  */
4712      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4713	return simplify_shift_const (x, code, mode, XEXP (x, 0),
4714				     INTVAL (XEXP (x, 1)));
4715
4716      else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4717	SUBST (XEXP (x, 1),
4718	       force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
4719			      ((HOST_WIDE_INT) 1
4720			       << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4721			      - 1,
4722			      NULL_RTX, 0));
4723      break;
4724
4725    case VEC_SELECT:
4726      {
4727	rtx op0 = XEXP (x, 0);
4728	rtx op1 = XEXP (x, 1);
4729	int len;
4730
4731	if (GET_CODE (op1) != PARALLEL)
4732	  abort ();
4733	len = XVECLEN (op1, 0);
4734	if (len == 1
4735	    && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
4736	    && GET_CODE (op0) == VEC_CONCAT)
4737	  {
4738	    int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
4739
4740	    /* Try to find the element in the VEC_CONCAT.  */
4741	    for (;;)
4742	      {
4743		if (GET_MODE (op0) == GET_MODE (x))
4744		  return op0;
4745		if (GET_CODE (op0) == VEC_CONCAT)
4746		  {
4747		    HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
4748		    if (op0_size < offset)
4749		      op0 = XEXP (op0, 0);
4750		    else
4751		      {
4752			offset -= op0_size;
4753			op0 = XEXP (op0, 1);
4754		      }
4755		  }
4756		else
4757		  break;
4758	      }
4759	  }
4760      }
4761
4762      break;
4763
4764    default:
4765      break;
4766    }
4767
4768  return x;
4769}
4770
4771/* Simplify X, an IF_THEN_ELSE expression.  Return the new expression.  */
4772
4773static rtx
4774simplify_if_then_else (x)
4775     rtx x;
4776{
4777  enum machine_mode mode = GET_MODE (x);
4778  rtx cond = XEXP (x, 0);
4779  rtx true_rtx = XEXP (x, 1);
4780  rtx false_rtx = XEXP (x, 2);
4781  enum rtx_code true_code = GET_CODE (cond);
4782  int comparison_p = GET_RTX_CLASS (true_code) == '<';
4783  rtx temp;
4784  int i;
4785  enum rtx_code false_code;
4786  rtx reversed;
4787
4788  /* Simplify storing of the truth value.  */
4789  if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
4790    return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4791
4792  /* Also when the truth value has to be reversed.  */
4793  if (comparison_p
4794      && true_rtx == const0_rtx && false_rtx == const_true_rtx
4795      && (reversed = reversed_comparison (cond, mode, XEXP (cond, 0),
4796					  XEXP (cond, 1))))
4797    return reversed;
4798
4799  /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4800     in it is being compared against certain values.  Get the true and false
4801     comparisons and see if that says anything about the value of each arm.  */
4802
4803  if (comparison_p
4804      && ((false_code = combine_reversed_comparison_code (cond))
4805	  != UNKNOWN)
4806      && GET_CODE (XEXP (cond, 0)) == REG)
4807    {
4808      HOST_WIDE_INT nzb;
4809      rtx from = XEXP (cond, 0);
4810      rtx true_val = XEXP (cond, 1);
4811      rtx false_val = true_val;
4812      int swapped = 0;
4813
4814      /* If FALSE_CODE is EQ, swap the codes and arms.  */
4815
4816      if (false_code == EQ)
4817	{
4818	  swapped = 1, true_code = EQ, false_code = NE;
4819	  temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4820	}
4821
4822      /* If we are comparing against zero and the expression being tested has
4823	 only a single bit that might be nonzero, that is its value when it is
4824	 not equal to zero.  Similarly if it is known to be -1 or 0.  */
4825
4826      if (true_code == EQ && true_val == const0_rtx
4827	  && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4828	false_code = EQ, false_val = GEN_INT (nzb);
4829      else if (true_code == EQ && true_val == const0_rtx
4830	       && (num_sign_bit_copies (from, GET_MODE (from))
4831		   == GET_MODE_BITSIZE (GET_MODE (from))))
4832	false_code = EQ, false_val = constm1_rtx;
4833
4834      /* Now simplify an arm if we know the value of the register in the
4835	 branch and it is used in the arm.  Be careful due to the potential
4836	 of locally-shared RTL.  */
4837
4838      if (reg_mentioned_p (from, true_rtx))
4839	true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
4840				      from, true_val),
4841		      pc_rtx, pc_rtx, 0, 0);
4842      if (reg_mentioned_p (from, false_rtx))
4843	false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
4844				   from, false_val),
4845		       pc_rtx, pc_rtx, 0, 0);
4846
4847      SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
4848      SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
4849
4850      true_rtx = XEXP (x, 1);
4851      false_rtx = XEXP (x, 2);
4852      true_code = GET_CODE (cond);
4853    }
4854
4855  /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4856     reversed, do so to avoid needing two sets of patterns for
4857     subtract-and-branch insns.  Similarly if we have a constant in the true
4858     arm, the false arm is the same as the first operand of the comparison, or
4859     the false arm is more complicated than the true arm.  */
4860
4861  if (comparison_p
4862      && combine_reversed_comparison_code (cond) != UNKNOWN
4863      && (true_rtx == pc_rtx
4864	  || (CONSTANT_P (true_rtx)
4865	      && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx)
4866	  || true_rtx == const0_rtx
4867	  || (GET_RTX_CLASS (GET_CODE (true_rtx)) == 'o'
4868	      && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4869	  || (GET_CODE (true_rtx) == SUBREG
4870	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true_rtx))) == 'o'
4871	      && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4872	  || reg_mentioned_p (true_rtx, false_rtx)
4873	  || rtx_equal_p (false_rtx, XEXP (cond, 0))))
4874    {
4875      true_code = reversed_comparison_code (cond, NULL);
4876      SUBST (XEXP (x, 0),
4877	     reversed_comparison (cond, GET_MODE (cond), XEXP (cond, 0),
4878				  XEXP (cond, 1)));
4879
4880      SUBST (XEXP (x, 1), false_rtx);
4881      SUBST (XEXP (x, 2), true_rtx);
4882
4883      temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4884      cond = XEXP (x, 0);
4885
4886      /* It is possible that the conditional has been simplified out.  */
4887      true_code = GET_CODE (cond);
4888      comparison_p = GET_RTX_CLASS (true_code) == '<';
4889    }
4890
4891  /* If the two arms are identical, we don't need the comparison.  */
4892
4893  if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
4894    return true_rtx;
4895
4896  /* Convert a == b ? b : a to "a".  */
4897  if (true_code == EQ && ! side_effects_p (cond)
4898      && !HONOR_NANS (mode)
4899      && rtx_equal_p (XEXP (cond, 0), false_rtx)
4900      && rtx_equal_p (XEXP (cond, 1), true_rtx))
4901    return false_rtx;
4902  else if (true_code == NE && ! side_effects_p (cond)
4903	   && !HONOR_NANS (mode)
4904	   && rtx_equal_p (XEXP (cond, 0), true_rtx)
4905	   && rtx_equal_p (XEXP (cond, 1), false_rtx))
4906    return true_rtx;
4907
4908  /* Look for cases where we have (abs x) or (neg (abs X)).  */
4909
4910  if (GET_MODE_CLASS (mode) == MODE_INT
4911      && GET_CODE (false_rtx) == NEG
4912      && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
4913      && comparison_p
4914      && rtx_equal_p (true_rtx, XEXP (cond, 0))
4915      && ! side_effects_p (true_rtx))
4916    switch (true_code)
4917      {
4918      case GT:
4919      case GE:
4920	return simplify_gen_unary (ABS, mode, true_rtx, mode);
4921      case LT:
4922      case LE:
4923	return
4924	  simplify_gen_unary (NEG, mode,
4925			      simplify_gen_unary (ABS, mode, true_rtx, mode),
4926			      mode);
4927      default:
4928	break;
4929      }
4930
4931  /* Look for MIN or MAX.  */
4932
4933  if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
4934      && comparison_p
4935      && rtx_equal_p (XEXP (cond, 0), true_rtx)
4936      && rtx_equal_p (XEXP (cond, 1), false_rtx)
4937      && ! side_effects_p (cond))
4938    switch (true_code)
4939      {
4940      case GE:
4941      case GT:
4942	return gen_binary (SMAX, mode, true_rtx, false_rtx);
4943      case LE:
4944      case LT:
4945	return gen_binary (SMIN, mode, true_rtx, false_rtx);
4946      case GEU:
4947      case GTU:
4948	return gen_binary (UMAX, mode, true_rtx, false_rtx);
4949      case LEU:
4950      case LTU:
4951	return gen_binary (UMIN, mode, true_rtx, false_rtx);
4952      default:
4953	break;
4954      }
4955
4956  /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4957     second operand is zero, this can be done as (OP Z (mult COND C2)) where
4958     C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4959     SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4960     We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4961     neither 1 or -1, but it isn't worth checking for.  */
4962
4963  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4964      && comparison_p
4965      && GET_MODE_CLASS (mode) == MODE_INT
4966      && ! side_effects_p (x))
4967    {
4968      rtx t = make_compound_operation (true_rtx, SET);
4969      rtx f = make_compound_operation (false_rtx, SET);
4970      rtx cond_op0 = XEXP (cond, 0);
4971      rtx cond_op1 = XEXP (cond, 1);
4972      enum rtx_code op = NIL, extend_op = NIL;
4973      enum machine_mode m = mode;
4974      rtx z = 0, c1 = NULL_RTX;
4975
4976      if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4977	   || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4978	   || GET_CODE (t) == ASHIFT
4979	   || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4980	  && rtx_equal_p (XEXP (t, 0), f))
4981	c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4982
4983      /* If an identity-zero op is commutative, check whether there
4984	 would be a match if we swapped the operands.  */
4985      else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4986		|| GET_CODE (t) == XOR)
4987	       && rtx_equal_p (XEXP (t, 1), f))
4988	c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4989      else if (GET_CODE (t) == SIGN_EXTEND
4990	       && (GET_CODE (XEXP (t, 0)) == PLUS
4991		   || GET_CODE (XEXP (t, 0)) == MINUS
4992		   || GET_CODE (XEXP (t, 0)) == IOR
4993		   || GET_CODE (XEXP (t, 0)) == XOR
4994		   || GET_CODE (XEXP (t, 0)) == ASHIFT
4995		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4996		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4997	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4998	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4999	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5000	       && (num_sign_bit_copies (f, GET_MODE (f))
5001		   > (unsigned int)
5002		     (GET_MODE_BITSIZE (mode)
5003		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
5004	{
5005	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5006	  extend_op = SIGN_EXTEND;
5007	  m = GET_MODE (XEXP (t, 0));
5008	}
5009      else if (GET_CODE (t) == SIGN_EXTEND
5010	       && (GET_CODE (XEXP (t, 0)) == PLUS
5011		   || GET_CODE (XEXP (t, 0)) == IOR
5012		   || GET_CODE (XEXP (t, 0)) == XOR)
5013	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5014	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5015	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5016	       && (num_sign_bit_copies (f, GET_MODE (f))
5017		   > (unsigned int)
5018		     (GET_MODE_BITSIZE (mode)
5019		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
5020	{
5021	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5022	  extend_op = SIGN_EXTEND;
5023	  m = GET_MODE (XEXP (t, 0));
5024	}
5025      else if (GET_CODE (t) == ZERO_EXTEND
5026	       && (GET_CODE (XEXP (t, 0)) == PLUS
5027		   || GET_CODE (XEXP (t, 0)) == MINUS
5028		   || GET_CODE (XEXP (t, 0)) == IOR
5029		   || GET_CODE (XEXP (t, 0)) == XOR
5030		   || GET_CODE (XEXP (t, 0)) == ASHIFT
5031		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5032		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5033	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5034	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5035	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5036	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5037	       && ((nonzero_bits (f, GET_MODE (f))
5038		    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
5039		   == 0))
5040	{
5041	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5042	  extend_op = ZERO_EXTEND;
5043	  m = GET_MODE (XEXP (t, 0));
5044	}
5045      else if (GET_CODE (t) == ZERO_EXTEND
5046	       && (GET_CODE (XEXP (t, 0)) == PLUS
5047		   || GET_CODE (XEXP (t, 0)) == IOR
5048		   || GET_CODE (XEXP (t, 0)) == XOR)
5049	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5050	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5051	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5052	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5053	       && ((nonzero_bits (f, GET_MODE (f))
5054		    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
5055		   == 0))
5056	{
5057	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5058	  extend_op = ZERO_EXTEND;
5059	  m = GET_MODE (XEXP (t, 0));
5060	}
5061
5062      if (z)
5063	{
5064	  temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
5065			pc_rtx, pc_rtx, 0, 0);
5066	  temp = gen_binary (MULT, m, temp,
5067			     gen_binary (MULT, m, c1, const_true_rtx));
5068	  temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
5069	  temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
5070
5071	  if (extend_op != NIL)
5072	    temp = simplify_gen_unary (extend_op, mode, temp, m);
5073
5074	  return temp;
5075	}
5076    }
5077
5078  /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
5079     1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
5080     negation of a single bit, we can convert this operation to a shift.  We
5081     can actually do this more generally, but it doesn't seem worth it.  */
5082
5083  if (true_code == NE && XEXP (cond, 1) == const0_rtx
5084      && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
5085      && ((1 == nonzero_bits (XEXP (cond, 0), mode)
5086	   && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
5087	  || ((num_sign_bit_copies (XEXP (cond, 0), mode)
5088	       == GET_MODE_BITSIZE (mode))
5089	      && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
5090    return
5091      simplify_shift_const (NULL_RTX, ASHIFT, mode,
5092			    gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
5093
5094  return x;
5095}
5096
5097/* Simplify X, a SET expression.  Return the new expression.  */
5098
5099static rtx
5100simplify_set (x)
5101     rtx x;
5102{
5103  rtx src = SET_SRC (x);
5104  rtx dest = SET_DEST (x);
5105  enum machine_mode mode
5106    = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
5107  rtx other_insn;
5108  rtx *cc_use;
5109
5110  /* (set (pc) (return)) gets written as (return).  */
5111  if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
5112    return src;
5113
5114  /* Now that we know for sure which bits of SRC we are using, see if we can
5115     simplify the expression for the object knowing that we only need the
5116     low-order bits.  */
5117
5118  if (GET_MODE_CLASS (mode) == MODE_INT
5119      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5120    {
5121      src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
5122      SUBST (SET_SRC (x), src);
5123    }
5124
5125  /* If we are setting CC0 or if the source is a COMPARE, look for the use of
5126     the comparison result and try to simplify it unless we already have used
5127     undobuf.other_insn.  */
5128  if ((GET_MODE_CLASS (mode) == MODE_CC
5129       || GET_CODE (src) == COMPARE
5130       || CC0_P (dest))
5131      && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
5132      && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
5133      && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
5134      && rtx_equal_p (XEXP (*cc_use, 0), dest))
5135    {
5136      enum rtx_code old_code = GET_CODE (*cc_use);
5137      enum rtx_code new_code;
5138      rtx op0, op1, tmp;
5139      int other_changed = 0;
5140      enum machine_mode compare_mode = GET_MODE (dest);
5141      enum machine_mode tmp_mode;
5142
5143      if (GET_CODE (src) == COMPARE)
5144	op0 = XEXP (src, 0), op1 = XEXP (src, 1);
5145      else
5146	op0 = src, op1 = const0_rtx;
5147
5148      /* Check whether the comparison is known at compile time.  */
5149      if (GET_MODE (op0) != VOIDmode)
5150	tmp_mode = GET_MODE (op0);
5151      else if (GET_MODE (op1) != VOIDmode)
5152	tmp_mode = GET_MODE (op1);
5153      else
5154	tmp_mode = compare_mode;
5155      tmp = simplify_relational_operation (old_code, tmp_mode, op0, op1);
5156      if (tmp != NULL_RTX)
5157	{
5158	  rtx pat = PATTERN (other_insn);
5159	  undobuf.other_insn = other_insn;
5160	  SUBST (*cc_use, tmp);
5161
5162	  /* Attempt to simplify CC user.  */
5163	  if (GET_CODE (pat) == SET)
5164	    {
5165	      rtx new = simplify_rtx (SET_SRC (pat));
5166	      if (new != NULL_RTX)
5167		SUBST (SET_SRC (pat), new);
5168	    }
5169
5170	  /* Convert X into a no-op move.  */
5171	  SUBST (SET_DEST (x), pc_rtx);
5172	  SUBST (SET_SRC (x), pc_rtx);
5173	  return x;
5174	}
5175
5176      /* Simplify our comparison, if possible.  */
5177      new_code = simplify_comparison (old_code, &op0, &op1);
5178
5179#ifdef EXTRA_CC_MODES
5180      /* If this machine has CC modes other than CCmode, check to see if we
5181	 need to use a different CC mode here.  */
5182      compare_mode = SELECT_CC_MODE (new_code, op0, op1);
5183#endif /* EXTRA_CC_MODES */
5184
5185#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
5186      /* If the mode changed, we have to change SET_DEST, the mode in the
5187	 compare, and the mode in the place SET_DEST is used.  If SET_DEST is
5188	 a hard register, just build new versions with the proper mode.  If it
5189	 is a pseudo, we lose unless it is only time we set the pseudo, in
5190	 which case we can safely change its mode.  */
5191      if (compare_mode != GET_MODE (dest))
5192	{
5193	  unsigned int regno = REGNO (dest);
5194	  rtx new_dest = gen_rtx_REG (compare_mode, regno);
5195
5196	  if (regno < FIRST_PSEUDO_REGISTER
5197	      || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
5198	    {
5199	      if (regno >= FIRST_PSEUDO_REGISTER)
5200		SUBST (regno_reg_rtx[regno], new_dest);
5201
5202	      SUBST (SET_DEST (x), new_dest);
5203	      SUBST (XEXP (*cc_use, 0), new_dest);
5204	      other_changed = 1;
5205
5206	      dest = new_dest;
5207	    }
5208	}
5209#endif
5210
5211      /* If the code changed, we have to build a new comparison in
5212	 undobuf.other_insn.  */
5213      if (new_code != old_code)
5214	{
5215	  unsigned HOST_WIDE_INT mask;
5216
5217	  SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
5218					  dest, const0_rtx));
5219
5220	  /* If the only change we made was to change an EQ into an NE or
5221	     vice versa, OP0 has only one bit that might be nonzero, and OP1
5222	     is zero, check if changing the user of the condition code will
5223	     produce a valid insn.  If it won't, we can keep the original code
5224	     in that insn by surrounding our operation with an XOR.  */
5225
5226	  if (((old_code == NE && new_code == EQ)
5227	       || (old_code == EQ && new_code == NE))
5228	      && ! other_changed && op1 == const0_rtx
5229	      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
5230	      && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
5231	    {
5232	      rtx pat = PATTERN (other_insn), note = 0;
5233
5234	      if ((recog_for_combine (&pat, other_insn, &note) < 0
5235		   && ! check_asm_operands (pat)))
5236		{
5237		  PUT_CODE (*cc_use, old_code);
5238		  other_insn = 0;
5239
5240		  op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
5241		}
5242	    }
5243
5244	  other_changed = 1;
5245	}
5246
5247      if (other_changed)
5248	undobuf.other_insn = other_insn;
5249
5250#ifdef HAVE_cc0
5251      /* If we are now comparing against zero, change our source if
5252	 needed.  If we do not use cc0, we always have a COMPARE.  */
5253      if (op1 == const0_rtx && dest == cc0_rtx)
5254	{
5255	  SUBST (SET_SRC (x), op0);
5256	  src = op0;
5257	}
5258      else
5259#endif
5260
5261      /* Otherwise, if we didn't previously have a COMPARE in the
5262	 correct mode, we need one.  */
5263      if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5264	{
5265	  SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
5266	  src = SET_SRC (x);
5267	}
5268      else
5269	{
5270	  /* Otherwise, update the COMPARE if needed.  */
5271	  SUBST (XEXP (src, 0), op0);
5272	  SUBST (XEXP (src, 1), op1);
5273	}
5274    }
5275  else
5276    {
5277      /* Get SET_SRC in a form where we have placed back any
5278	 compound expressions.  Then do the checks below.  */
5279      src = make_compound_operation (src, SET);
5280      SUBST (SET_SRC (x), src);
5281    }
5282
5283  /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5284     and X being a REG or (subreg (reg)), we may be able to convert this to
5285     (set (subreg:m2 x) (op)).
5286
5287     We can always do this if M1 is narrower than M2 because that means that
5288     we only care about the low bits of the result.
5289
5290     However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
5291     perform a narrower operation than requested since the high-order bits will
5292     be undefined.  On machine where it is defined, this transformation is safe
5293     as long as M1 and M2 have the same number of words.  */
5294
5295  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5296      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
5297      && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5298	   / UNITS_PER_WORD)
5299	  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5300	       + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
5301#ifndef WORD_REGISTER_OPERATIONS
5302      && (GET_MODE_SIZE (GET_MODE (src))
5303	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5304#endif
5305#ifdef CANNOT_CHANGE_MODE_CLASS
5306      && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
5307	    && REG_CANNOT_CHANGE_MODE_P (REGNO (dest),
5308					 GET_MODE (SUBREG_REG (src)),
5309					 GET_MODE (src)))
5310#endif
5311      && (GET_CODE (dest) == REG
5312	  || (GET_CODE (dest) == SUBREG
5313	      && GET_CODE (SUBREG_REG (dest)) == REG)))
5314    {
5315      SUBST (SET_DEST (x),
5316	     gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
5317				      dest));
5318      SUBST (SET_SRC (x), SUBREG_REG (src));
5319
5320      src = SET_SRC (x), dest = SET_DEST (x);
5321    }
5322
5323#ifdef HAVE_cc0
5324  /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
5325     in SRC.  */
5326  if (dest == cc0_rtx
5327      && GET_CODE (src) == SUBREG
5328      && subreg_lowpart_p (src)
5329      && (GET_MODE_BITSIZE (GET_MODE (src))
5330	  < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src)))))
5331    {
5332      rtx inner = SUBREG_REG (src);
5333      enum machine_mode inner_mode = GET_MODE (inner);
5334
5335      /* Here we make sure that we don't have a sign bit on.  */
5336      if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT
5337	  && (nonzero_bits (inner, inner_mode)
5338	      < ((unsigned HOST_WIDE_INT) 1
5339		 << (GET_MODE_BITSIZE (GET_MODE (src)) - 1))))
5340	{
5341	  SUBST (SET_SRC (x), inner);
5342	  src = SET_SRC (x);
5343	}
5344    }
5345#endif
5346
5347#ifdef LOAD_EXTEND_OP
5348  /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5349     would require a paradoxical subreg.  Replace the subreg with a
5350     zero_extend to avoid the reload that would otherwise be required.  */
5351
5352  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5353      && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
5354      && SUBREG_BYTE (src) == 0
5355      && (GET_MODE_SIZE (GET_MODE (src))
5356	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5357      && GET_CODE (SUBREG_REG (src)) == MEM)
5358    {
5359      SUBST (SET_SRC (x),
5360	     gen_rtx (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
5361		      GET_MODE (src), SUBREG_REG (src)));
5362
5363      src = SET_SRC (x);
5364    }
5365#endif
5366
5367  /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5368     are comparing an item known to be 0 or -1 against 0, use a logical
5369     operation instead. Check for one of the arms being an IOR of the other
5370     arm with some value.  We compute three terms to be IOR'ed together.  In
5371     practice, at most two will be nonzero.  Then we do the IOR's.  */
5372
5373  if (GET_CODE (dest) != PC
5374      && GET_CODE (src) == IF_THEN_ELSE
5375      && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
5376      && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5377      && XEXP (XEXP (src, 0), 1) == const0_rtx
5378      && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
5379#ifdef HAVE_conditional_move
5380      && ! can_conditionally_move_p (GET_MODE (src))
5381#endif
5382      && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5383			       GET_MODE (XEXP (XEXP (src, 0), 0)))
5384	  == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5385      && ! side_effects_p (src))
5386    {
5387      rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
5388		      ? XEXP (src, 1) : XEXP (src, 2));
5389      rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
5390		   ? XEXP (src, 2) : XEXP (src, 1));
5391      rtx term1 = const0_rtx, term2, term3;
5392
5393      if (GET_CODE (true_rtx) == IOR
5394	  && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
5395	term1 = false_rtx, true_rtx = XEXP(true_rtx, 1), false_rtx = const0_rtx;
5396      else if (GET_CODE (true_rtx) == IOR
5397	       && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
5398	term1 = false_rtx, true_rtx = XEXP(true_rtx, 0), false_rtx = const0_rtx;
5399      else if (GET_CODE (false_rtx) == IOR
5400	       && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
5401	term1 = true_rtx, false_rtx = XEXP(false_rtx, 1), true_rtx = const0_rtx;
5402      else if (GET_CODE (false_rtx) == IOR
5403	       && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
5404	term1 = true_rtx, false_rtx = XEXP(false_rtx, 0), true_rtx = const0_rtx;
5405
5406      term2 = gen_binary (AND, GET_MODE (src),
5407			  XEXP (XEXP (src, 0), 0), true_rtx);
5408      term3 = gen_binary (AND, GET_MODE (src),
5409			  simplify_gen_unary (NOT, GET_MODE (src),
5410					      XEXP (XEXP (src, 0), 0),
5411					      GET_MODE (src)),
5412			  false_rtx);
5413
5414      SUBST (SET_SRC (x),
5415	     gen_binary (IOR, GET_MODE (src),
5416			 gen_binary (IOR, GET_MODE (src), term1, term2),
5417			 term3));
5418
5419      src = SET_SRC (x);
5420    }
5421
5422  /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5423     whole thing fail.  */
5424  if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5425    return src;
5426  else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5427    return dest;
5428  else
5429    /* Convert this into a field assignment operation, if possible.  */
5430    return make_field_assignment (x);
5431}
5432
5433/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5434   result.  LAST is nonzero if this is the last retry.  */
5435
5436static rtx
5437simplify_logical (x, last)
5438     rtx x;
5439     int last;
5440{
5441  enum machine_mode mode = GET_MODE (x);
5442  rtx op0 = XEXP (x, 0);
5443  rtx op1 = XEXP (x, 1);
5444  rtx reversed;
5445
5446  switch (GET_CODE (x))
5447    {
5448    case AND:
5449      /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
5450	 insn (and may simplify more).  */
5451      if (GET_CODE (op0) == XOR
5452	  && rtx_equal_p (XEXP (op0, 0), op1)
5453	  && ! side_effects_p (op1))
5454	x = gen_binary (AND, mode,
5455			simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5456			op1);
5457
5458      if (GET_CODE (op0) == XOR
5459	  && rtx_equal_p (XEXP (op0, 1), op1)
5460	  && ! side_effects_p (op1))
5461	x = gen_binary (AND, mode,
5462			simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5463			op1);
5464
5465      /* Similarly for (~(A ^ B)) & A.  */
5466      if (GET_CODE (op0) == NOT
5467	  && GET_CODE (XEXP (op0, 0)) == XOR
5468	  && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5469	  && ! side_effects_p (op1))
5470	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5471
5472      if (GET_CODE (op0) == NOT
5473	  && GET_CODE (XEXP (op0, 0)) == XOR
5474	  && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5475	  && ! side_effects_p (op1))
5476	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5477
5478      /* We can call simplify_and_const_int only if we don't lose
5479	 any (sign) bits when converting INTVAL (op1) to
5480	 "unsigned HOST_WIDE_INT".  */
5481      if (GET_CODE (op1) == CONST_INT
5482	  && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5483	      || INTVAL (op1) > 0))
5484	{
5485	  x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
5486
5487	  /* If we have (ior (and (X C1) C2)) and the next restart would be
5488	     the last, simplify this by making C1 as small as possible
5489	     and then exit.  */
5490	  if (last
5491	      && GET_CODE (x) == IOR && GET_CODE (op0) == AND
5492	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
5493	      && GET_CODE (op1) == CONST_INT)
5494	    return gen_binary (IOR, mode,
5495			       gen_binary (AND, mode, XEXP (op0, 0),
5496					   GEN_INT (INTVAL (XEXP (op0, 1))
5497						    & ~INTVAL (op1))), op1);
5498
5499	  if (GET_CODE (x) != AND)
5500	    return x;
5501
5502	  if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
5503	      || GET_RTX_CLASS (GET_CODE (x)) == '2')
5504	    op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5505	}
5506
5507      /* Convert (A | B) & A to A.  */
5508      if (GET_CODE (op0) == IOR
5509	  && (rtx_equal_p (XEXP (op0, 0), op1)
5510	      || rtx_equal_p (XEXP (op0, 1), op1))
5511	  && ! side_effects_p (XEXP (op0, 0))
5512	  && ! side_effects_p (XEXP (op0, 1)))
5513	return op1;
5514
5515      /* In the following group of tests (and those in case IOR below),
5516	 we start with some combination of logical operations and apply
5517	 the distributive law followed by the inverse distributive law.
5518	 Most of the time, this results in no change.  However, if some of
5519	 the operands are the same or inverses of each other, simplifications
5520	 will result.
5521
5522	 For example, (and (ior A B) (not B)) can occur as the result of
5523	 expanding a bit field assignment.  When we apply the distributive
5524	 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
5525	 which then simplifies to (and (A (not B))).
5526
5527	 If we have (and (ior A B) C), apply the distributive law and then
5528	 the inverse distributive law to see if things simplify.  */
5529
5530      if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
5531	{
5532	  x = apply_distributive_law
5533	    (gen_binary (GET_CODE (op0), mode,
5534			 gen_binary (AND, mode, XEXP (op0, 0), op1),
5535			 gen_binary (AND, mode, XEXP (op0, 1),
5536				     copy_rtx (op1))));
5537	  if (GET_CODE (x) != AND)
5538	    return x;
5539	}
5540
5541      if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5542	return apply_distributive_law
5543	  (gen_binary (GET_CODE (op1), mode,
5544		       gen_binary (AND, mode, XEXP (op1, 0), op0),
5545		       gen_binary (AND, mode, XEXP (op1, 1),
5546				   copy_rtx (op0))));
5547
5548      /* Similarly, taking advantage of the fact that
5549	 (and (not A) (xor B C)) == (xor (ior A B) (ior A C))  */
5550
5551      if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
5552	return apply_distributive_law
5553	  (gen_binary (XOR, mode,
5554		       gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
5555		       gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)),
5556				   XEXP (op1, 1))));
5557
5558      else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
5559	return apply_distributive_law
5560	  (gen_binary (XOR, mode,
5561		       gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
5562		       gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1))));
5563      break;
5564
5565    case IOR:
5566      /* (ior A C) is C if all bits of A that might be nonzero are on in C.  */
5567      if (GET_CODE (op1) == CONST_INT
5568	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5569	  && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
5570	return op1;
5571
5572      /* Convert (A & B) | A to A.  */
5573      if (GET_CODE (op0) == AND
5574	  && (rtx_equal_p (XEXP (op0, 0), op1)
5575	      || rtx_equal_p (XEXP (op0, 1), op1))
5576	  && ! side_effects_p (XEXP (op0, 0))
5577	  && ! side_effects_p (XEXP (op0, 1)))
5578	return op1;
5579
5580      /* If we have (ior (and A B) C), apply the distributive law and then
5581	 the inverse distributive law to see if things simplify.  */
5582
5583      if (GET_CODE (op0) == AND)
5584	{
5585	  x = apply_distributive_law
5586	    (gen_binary (AND, mode,
5587			 gen_binary (IOR, mode, XEXP (op0, 0), op1),
5588			 gen_binary (IOR, mode, XEXP (op0, 1),
5589				     copy_rtx (op1))));
5590
5591	  if (GET_CODE (x) != IOR)
5592	    return x;
5593	}
5594
5595      if (GET_CODE (op1) == AND)
5596	{
5597	  x = apply_distributive_law
5598	    (gen_binary (AND, mode,
5599			 gen_binary (IOR, mode, XEXP (op1, 0), op0),
5600			 gen_binary (IOR, mode, XEXP (op1, 1),
5601				     copy_rtx (op0))));
5602
5603	  if (GET_CODE (x) != IOR)
5604	    return x;
5605	}
5606
5607      /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5608	 mode size to (rotate A CX).  */
5609
5610      if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5611	   || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5612	  && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5613	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5614	  && GET_CODE (XEXP (op1, 1)) == CONST_INT
5615	  && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
5616	      == GET_MODE_BITSIZE (mode)))
5617	return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5618			       (GET_CODE (op0) == ASHIFT
5619				? XEXP (op0, 1) : XEXP (op1, 1)));
5620
5621      /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5622	 a (sign_extend (plus ...)).  If so, OP1 is a CONST_INT, and the PLUS
5623	 does not affect any of the bits in OP1, it can really be done
5624	 as a PLUS and we can associate.  We do this by seeing if OP1
5625	 can be safely shifted left C bits.  */
5626      if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5627	  && GET_CODE (XEXP (op0, 0)) == PLUS
5628	  && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5629	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5630	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5631	{
5632	  int count = INTVAL (XEXP (op0, 1));
5633	  HOST_WIDE_INT mask = INTVAL (op1) << count;
5634
5635	  if (mask >> count == INTVAL (op1)
5636	      && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5637	    {
5638	      SUBST (XEXP (XEXP (op0, 0), 1),
5639		     GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5640	      return op0;
5641	    }
5642	}
5643      break;
5644
5645    case XOR:
5646      /* If we are XORing two things that have no bits in common,
5647	 convert them into an IOR.  This helps to detect rotation encoded
5648	 using those methods and possibly other simplifications.  */
5649
5650      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5651	  && (nonzero_bits (op0, mode)
5652	      & nonzero_bits (op1, mode)) == 0)
5653	return (gen_binary (IOR, mode, op0, op1));
5654
5655      /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5656	 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5657	 (NOT y).  */
5658      {
5659	int num_negated = 0;
5660
5661	if (GET_CODE (op0) == NOT)
5662	  num_negated++, op0 = XEXP (op0, 0);
5663	if (GET_CODE (op1) == NOT)
5664	  num_negated++, op1 = XEXP (op1, 0);
5665
5666	if (num_negated == 2)
5667	  {
5668	    SUBST (XEXP (x, 0), op0);
5669	    SUBST (XEXP (x, 1), op1);
5670	  }
5671	else if (num_negated == 1)
5672	  return
5673	    simplify_gen_unary (NOT, mode, gen_binary (XOR, mode, op0, op1),
5674				mode);
5675      }
5676
5677      /* Convert (xor (and A B) B) to (and (not A) B).  The latter may
5678	 correspond to a machine insn or result in further simplifications
5679	 if B is a constant.  */
5680
5681      if (GET_CODE (op0) == AND
5682	  && rtx_equal_p (XEXP (op0, 1), op1)
5683	  && ! side_effects_p (op1))
5684	return gen_binary (AND, mode,
5685			   simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5686			   op1);
5687
5688      else if (GET_CODE (op0) == AND
5689	       && rtx_equal_p (XEXP (op0, 0), op1)
5690	       && ! side_effects_p (op1))
5691	return gen_binary (AND, mode,
5692			   simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5693			   op1);
5694
5695      /* (xor (comparison foo bar) (const_int 1)) can become the reversed
5696	 comparison if STORE_FLAG_VALUE is 1.  */
5697      if (STORE_FLAG_VALUE == 1
5698	  && op1 == const1_rtx
5699	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5700	  && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5701					      XEXP (op0, 1))))
5702	return reversed;
5703
5704      /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5705	 is (lt foo (const_int 0)), so we can perform the above
5706	 simplification if STORE_FLAG_VALUE is 1.  */
5707
5708      if (STORE_FLAG_VALUE == 1
5709	  && op1 == const1_rtx
5710	  && GET_CODE (op0) == LSHIFTRT
5711	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5712	  && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5713	return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
5714
5715      /* (xor (comparison foo bar) (const_int sign-bit))
5716	 when STORE_FLAG_VALUE is the sign bit.  */
5717      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5718	  && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5719	      == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5720	  && op1 == const_true_rtx
5721	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5722	  && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5723					      XEXP (op0, 1))))
5724	return reversed;
5725
5726      break;
5727
5728    default:
5729      abort ();
5730    }
5731
5732  return x;
5733}
5734
5735/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5736   operations" because they can be replaced with two more basic operations.
5737   ZERO_EXTEND is also considered "compound" because it can be replaced with
5738   an AND operation, which is simpler, though only one operation.
5739
5740   The function expand_compound_operation is called with an rtx expression
5741   and will convert it to the appropriate shifts and AND operations,
5742   simplifying at each stage.
5743
5744   The function make_compound_operation is called to convert an expression
5745   consisting of shifts and ANDs into the equivalent compound expression.
5746   It is the inverse of this function, loosely speaking.  */
5747
5748static rtx
5749expand_compound_operation (x)
5750     rtx x;
5751{
5752  unsigned HOST_WIDE_INT pos = 0, len;
5753  int unsignedp = 0;
5754  unsigned int modewidth;
5755  rtx tem;
5756
5757  switch (GET_CODE (x))
5758    {
5759    case ZERO_EXTEND:
5760      unsignedp = 1;
5761    case SIGN_EXTEND:
5762      /* We can't necessarily use a const_int for a multiword mode;
5763	 it depends on implicitly extending the value.
5764	 Since we don't know the right way to extend it,
5765	 we can't tell whether the implicit way is right.
5766
5767	 Even for a mode that is no wider than a const_int,
5768	 we can't win, because we need to sign extend one of its bits through
5769	 the rest of it, and we don't know which bit.  */
5770      if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5771	return x;
5772
5773      /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5774	 (zero_extend:MODE FROM) or (sign_extend:MODE FROM).  It is for any MEM
5775	 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5776	 reloaded. If not for that, MEM's would very rarely be safe.
5777
5778	 Reject MODEs bigger than a word, because we might not be able
5779	 to reference a two-register group starting with an arbitrary register
5780	 (and currently gen_lowpart might crash for a SUBREG).  */
5781
5782      if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5783	return x;
5784
5785      /* Reject MODEs that aren't scalar integers because turning vector
5786	 or complex modes into shifts causes problems.  */
5787
5788      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5789	return x;
5790
5791      len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5792      /* If the inner object has VOIDmode (the only way this can happen
5793	 is if it is an ASM_OPERANDS), we can't do anything since we don't
5794	 know how much masking to do.  */
5795      if (len == 0)
5796	return x;
5797
5798      break;
5799
5800    case ZERO_EXTRACT:
5801      unsignedp = 1;
5802    case SIGN_EXTRACT:
5803      /* If the operand is a CLOBBER, just return it.  */
5804      if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5805	return XEXP (x, 0);
5806
5807      if (GET_CODE (XEXP (x, 1)) != CONST_INT
5808	  || GET_CODE (XEXP (x, 2)) != CONST_INT
5809	  || GET_MODE (XEXP (x, 0)) == VOIDmode)
5810	return x;
5811
5812      /* Reject MODEs that aren't scalar integers because turning vector
5813	 or complex modes into shifts causes problems.  */
5814
5815      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5816	return x;
5817
5818      len = INTVAL (XEXP (x, 1));
5819      pos = INTVAL (XEXP (x, 2));
5820
5821      /* If this goes outside the object being extracted, replace the object
5822	 with a (use (mem ...)) construct that only combine understands
5823	 and is used only for this purpose.  */
5824      if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5825	SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
5826
5827      if (BITS_BIG_ENDIAN)
5828	pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5829
5830      break;
5831
5832    default:
5833      return x;
5834    }
5835  /* Convert sign extension to zero extension, if we know that the high
5836     bit is not set, as this is easier to optimize.  It will be converted
5837     back to cheaper alternative in make_extraction.  */
5838  if (GET_CODE (x) == SIGN_EXTEND
5839      && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5840	  && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5841		& ~(((unsigned HOST_WIDE_INT)
5842		      GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5843		     >> 1))
5844	       == 0)))
5845    {
5846      rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5847      return expand_compound_operation (temp);
5848    }
5849
5850  /* We can optimize some special cases of ZERO_EXTEND.  */
5851  if (GET_CODE (x) == ZERO_EXTEND)
5852    {
5853      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5854         know that the last value didn't have any inappropriate bits
5855         set.  */
5856      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5857	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5858	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5859	  && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5860	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5861	return XEXP (XEXP (x, 0), 0);
5862
5863      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5864      if (GET_CODE (XEXP (x, 0)) == SUBREG
5865	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5866	  && subreg_lowpart_p (XEXP (x, 0))
5867	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5868	  && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5869	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5870	return SUBREG_REG (XEXP (x, 0));
5871
5872      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5873         is a comparison and STORE_FLAG_VALUE permits.  This is like
5874         the first case, but it works even when GET_MODE (x) is larger
5875         than HOST_WIDE_INT.  */
5876      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5877	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5878	  && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5879	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5880	      <= HOST_BITS_PER_WIDE_INT)
5881	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5882	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5883	return XEXP (XEXP (x, 0), 0);
5884
5885      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5886      if (GET_CODE (XEXP (x, 0)) == SUBREG
5887	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5888	  && subreg_lowpart_p (XEXP (x, 0))
5889	  && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5890	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5891	      <= HOST_BITS_PER_WIDE_INT)
5892	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5893	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5894	return SUBREG_REG (XEXP (x, 0));
5895
5896    }
5897
5898  /* If we reach here, we want to return a pair of shifts.  The inner
5899     shift is a left shift of BITSIZE - POS - LEN bits.  The outer
5900     shift is a right shift of BITSIZE - LEN bits.  It is arithmetic or
5901     logical depending on the value of UNSIGNEDP.
5902
5903     If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5904     converted into an AND of a shift.
5905
5906     We must check for the case where the left shift would have a negative
5907     count.  This can happen in a case like (x >> 31) & 255 on machines
5908     that can't shift by a constant.  On those machines, we would first
5909     combine the shift with the AND to produce a variable-position
5910     extraction.  Then the constant of 31 would be substituted in to produce
5911     a such a position.  */
5912
5913  modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5914  if (modewidth + len >= pos)
5915    tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
5916				GET_MODE (x),
5917				simplify_shift_const (NULL_RTX, ASHIFT,
5918						      GET_MODE (x),
5919						      XEXP (x, 0),
5920						      modewidth - pos - len),
5921				modewidth - len);
5922
5923  else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5924    tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5925				  simplify_shift_const (NULL_RTX, LSHIFTRT,
5926							GET_MODE (x),
5927							XEXP (x, 0), pos),
5928				  ((HOST_WIDE_INT) 1 << len) - 1);
5929  else
5930    /* Any other cases we can't handle.  */
5931    return x;
5932
5933  /* If we couldn't do this for some reason, return the original
5934     expression.  */
5935  if (GET_CODE (tem) == CLOBBER)
5936    return x;
5937
5938  return tem;
5939}
5940
5941/* X is a SET which contains an assignment of one object into
5942   a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5943   or certain SUBREGS). If possible, convert it into a series of
5944   logical operations.
5945
5946   We half-heartedly support variable positions, but do not at all
5947   support variable lengths.  */
5948
5949static rtx
5950expand_field_assignment (x)
5951     rtx x;
5952{
5953  rtx inner;
5954  rtx pos;			/* Always counts from low bit.  */
5955  int len;
5956  rtx mask;
5957  enum machine_mode compute_mode;
5958
5959  /* Loop until we find something we can't simplify.  */
5960  while (1)
5961    {
5962      if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5963	  && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5964	{
5965	  inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5966	  len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
5967	  pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
5968	}
5969      else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5970	       && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5971	{
5972	  inner = XEXP (SET_DEST (x), 0);
5973	  len = INTVAL (XEXP (SET_DEST (x), 1));
5974	  pos = XEXP (SET_DEST (x), 2);
5975
5976	  /* If the position is constant and spans the width of INNER,
5977	     surround INNER  with a USE to indicate this.  */
5978	  if (GET_CODE (pos) == CONST_INT
5979	      && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5980	    inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
5981
5982	  if (BITS_BIG_ENDIAN)
5983	    {
5984	      if (GET_CODE (pos) == CONST_INT)
5985		pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5986			       - INTVAL (pos));
5987	      else if (GET_CODE (pos) == MINUS
5988		       && GET_CODE (XEXP (pos, 1)) == CONST_INT
5989		       && (INTVAL (XEXP (pos, 1))
5990			   == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5991		/* If position is ADJUST - X, new position is X.  */
5992		pos = XEXP (pos, 0);
5993	      else
5994		pos = gen_binary (MINUS, GET_MODE (pos),
5995				  GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5996					   - len),
5997				  pos);
5998	    }
5999	}
6000
6001      /* A SUBREG between two modes that occupy the same numbers of words
6002	 can be done by moving the SUBREG to the source.  */
6003      else if (GET_CODE (SET_DEST (x)) == SUBREG
6004	       /* We need SUBREGs to compute nonzero_bits properly.  */
6005	       && nonzero_sign_valid
6006	       && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
6007		     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6008		   == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
6009			+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
6010	{
6011	  x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
6012			   gen_lowpart_for_combine
6013			   (GET_MODE (SUBREG_REG (SET_DEST (x))),
6014			    SET_SRC (x)));
6015	  continue;
6016	}
6017      else
6018	break;
6019
6020      while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6021	inner = SUBREG_REG (inner);
6022
6023      compute_mode = GET_MODE (inner);
6024
6025      /* Don't attempt bitwise arithmetic on non scalar integer modes.  */
6026      if (! SCALAR_INT_MODE_P (compute_mode))
6027	{
6028	  enum machine_mode imode;
6029
6030	  /* Don't do anything for vector or complex integral types.  */
6031	  if (! FLOAT_MODE_P (compute_mode))
6032	    break;
6033
6034	  /* Try to find an integral mode to pun with.  */
6035	  imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
6036	  if (imode == BLKmode)
6037	    break;
6038
6039	  compute_mode = imode;
6040	  inner = gen_lowpart_for_combine (imode, inner);
6041	}
6042
6043      /* Compute a mask of LEN bits, if we can do this on the host machine.  */
6044      if (len < HOST_BITS_PER_WIDE_INT)
6045	mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
6046      else
6047	break;
6048
6049      /* Now compute the equivalent expression.  Make a copy of INNER
6050	 for the SET_DEST in case it is a MEM into which we will substitute;
6051	 we don't want shared RTL in that case.  */
6052      x = gen_rtx_SET
6053	(VOIDmode, copy_rtx (inner),
6054	 gen_binary (IOR, compute_mode,
6055		     gen_binary (AND, compute_mode,
6056				 simplify_gen_unary (NOT, compute_mode,
6057						     gen_binary (ASHIFT,
6058								 compute_mode,
6059								 mask, pos),
6060						     compute_mode),
6061				 inner),
6062		     gen_binary (ASHIFT, compute_mode,
6063				 gen_binary (AND, compute_mode,
6064					     gen_lowpart_for_combine
6065					     (compute_mode, SET_SRC (x)),
6066					     mask),
6067				 pos)));
6068    }
6069
6070  return x;
6071}
6072
6073/* Return an RTX for a reference to LEN bits of INNER.  If POS_RTX is nonzero,
6074   it is an RTX that represents a variable starting position; otherwise,
6075   POS is the (constant) starting bit position (counted from the LSB).
6076
6077   INNER may be a USE.  This will occur when we started with a bitfield
6078   that went outside the boundary of the object in memory, which is
6079   allowed on most machines.  To isolate this case, we produce a USE
6080   whose mode is wide enough and surround the MEM with it.  The only
6081   code that understands the USE is this routine.  If it is not removed,
6082   it will cause the resulting insn not to match.
6083
6084   UNSIGNEDP is nonzero for an unsigned reference and zero for a
6085   signed reference.
6086
6087   IN_DEST is nonzero if this is a reference in the destination of a
6088   SET.  This is used when a ZERO_ or SIGN_EXTRACT isn't needed.  If nonzero,
6089   a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
6090   be used.
6091
6092   IN_COMPARE is nonzero if we are in a COMPARE.  This means that a
6093   ZERO_EXTRACT should be built even for bits starting at bit 0.
6094
6095   MODE is the desired mode of the result (if IN_DEST == 0).
6096
6097   The result is an RTX for the extraction or NULL_RTX if the target
6098   can't handle it.  */
6099
6100static rtx
6101make_extraction (mode, inner, pos, pos_rtx, len,
6102		 unsignedp, in_dest, in_compare)
6103     enum machine_mode mode;
6104     rtx inner;
6105     HOST_WIDE_INT pos;
6106     rtx pos_rtx;
6107     unsigned HOST_WIDE_INT len;
6108     int unsignedp;
6109     int in_dest, in_compare;
6110{
6111  /* This mode describes the size of the storage area
6112     to fetch the overall value from.  Within that, we
6113     ignore the POS lowest bits, etc.  */
6114  enum machine_mode is_mode = GET_MODE (inner);
6115  enum machine_mode inner_mode;
6116  enum machine_mode wanted_inner_mode = byte_mode;
6117  enum machine_mode wanted_inner_reg_mode = word_mode;
6118  enum machine_mode pos_mode = word_mode;
6119  enum machine_mode extraction_mode = word_mode;
6120  enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
6121  int spans_byte = 0;
6122  rtx new = 0;
6123  rtx orig_pos_rtx = pos_rtx;
6124  HOST_WIDE_INT orig_pos;
6125
6126  /* Get some information about INNER and get the innermost object.  */
6127  if (GET_CODE (inner) == USE)
6128    /* (use:SI (mem:QI foo)) stands for (mem:SI foo).  */
6129    /* We don't need to adjust the position because we set up the USE
6130       to pretend that it was a full-word object.  */
6131    spans_byte = 1, inner = XEXP (inner, 0);
6132  else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6133    {
6134      /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
6135	 consider just the QI as the memory to extract from.
6136	 The subreg adds or removes high bits; its mode is
6137	 irrelevant to the meaning of this extraction,
6138	 since POS and LEN count from the lsb.  */
6139      if (GET_CODE (SUBREG_REG (inner)) == MEM)
6140	is_mode = GET_MODE (SUBREG_REG (inner));
6141      inner = SUBREG_REG (inner);
6142    }
6143  else if (GET_CODE (inner) == ASHIFT
6144	   && GET_CODE (XEXP (inner, 1)) == CONST_INT
6145	   && pos_rtx == 0 && pos == 0
6146	   && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1)))
6147    {
6148      /* We're extracting the least significant bits of an rtx
6149	 (ashift X (const_int C)), where LEN > C.  Extract the
6150	 least significant (LEN - C) bits of X, giving an rtx
6151	 whose mode is MODE, then shift it left C times.  */
6152      new = make_extraction (mode, XEXP (inner, 0),
6153			     0, 0, len - INTVAL (XEXP (inner, 1)),
6154			     unsignedp, in_dest, in_compare);
6155      if (new != 0)
6156	return gen_rtx_ASHIFT (mode, new, XEXP (inner, 1));
6157    }
6158
6159  inner_mode = GET_MODE (inner);
6160
6161  if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
6162    pos = INTVAL (pos_rtx), pos_rtx = 0;
6163
6164  /* See if this can be done without an extraction.  We never can if the
6165     width of the field is not the same as that of some integer mode. For
6166     registers, we can only avoid the extraction if the position is at the
6167     low-order bit and this is either not in the destination or we have the
6168     appropriate STRICT_LOW_PART operation available.
6169
6170     For MEM, we can avoid an extract if the field starts on an appropriate
6171     boundary and we can change the mode of the memory reference.  However,
6172     we cannot directly access the MEM if we have a USE and the underlying
6173     MEM is not TMODE.  This combination means that MEM was being used in a
6174     context where bits outside its mode were being referenced; that is only
6175     valid in bit-field insns.  */
6176
6177  if (tmode != BLKmode
6178      && ! (spans_byte && inner_mode != tmode)
6179      && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
6180	   && GET_CODE (inner) != MEM
6181	   && (! in_dest
6182	       || (GET_CODE (inner) == REG
6183		   && have_insn_for (STRICT_LOW_PART, tmode))))
6184	  || (GET_CODE (inner) == MEM && pos_rtx == 0
6185	      && (pos
6186		  % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
6187		     : BITS_PER_UNIT)) == 0
6188	      /* We can't do this if we are widening INNER_MODE (it
6189		 may not be aligned, for one thing).  */
6190	      && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
6191	      && (inner_mode == tmode
6192		  || (! mode_dependent_address_p (XEXP (inner, 0))
6193		      && ! MEM_VOLATILE_P (inner))))))
6194    {
6195      /* If INNER is a MEM, make a new MEM that encompasses just the desired
6196	 field.  If the original and current mode are the same, we need not
6197	 adjust the offset.  Otherwise, we do if bytes big endian.
6198
6199	 If INNER is not a MEM, get a piece consisting of just the field
6200	 of interest (in this case POS % BITS_PER_WORD must be 0).  */
6201
6202      if (GET_CODE (inner) == MEM)
6203	{
6204	  HOST_WIDE_INT offset;
6205
6206	  /* POS counts from lsb, but make OFFSET count in memory order.  */
6207	  if (BYTES_BIG_ENDIAN)
6208	    offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
6209	  else
6210	    offset = pos / BITS_PER_UNIT;
6211
6212	  new = adjust_address_nv (inner, tmode, offset);
6213	}
6214      else if (GET_CODE (inner) == REG)
6215	{
6216	  /* We can't call gen_lowpart_for_combine here since we always want
6217	     a SUBREG and it would sometimes return a new hard register.  */
6218	  if (tmode != inner_mode)
6219	    {
6220	      HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6221
6222	      if (WORDS_BIG_ENDIAN
6223		  && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6224		final_word = ((GET_MODE_SIZE (inner_mode)
6225			       - GET_MODE_SIZE (tmode))
6226			      / UNITS_PER_WORD) - final_word;
6227
6228	      final_word *= UNITS_PER_WORD;
6229	      if (BYTES_BIG_ENDIAN &&
6230		  GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6231		final_word += (GET_MODE_SIZE (inner_mode)
6232			       - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6233
6234	      /* Avoid creating invalid subregs, for example when
6235		 simplifying (x>>32)&255.  */
6236	      if (final_word >= GET_MODE_SIZE (inner_mode))
6237		return NULL_RTX;
6238
6239	      new = gen_rtx_SUBREG (tmode, inner, final_word);
6240	    }
6241	  else
6242	    new = inner;
6243	}
6244      else
6245	new = force_to_mode (inner, tmode,
6246			     len >= HOST_BITS_PER_WIDE_INT
6247			     ? ~(unsigned HOST_WIDE_INT) 0
6248			     : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6249			     NULL_RTX, 0);
6250
6251      /* If this extraction is going into the destination of a SET,
6252	 make a STRICT_LOW_PART unless we made a MEM.  */
6253
6254      if (in_dest)
6255	return (GET_CODE (new) == MEM ? new
6256		: (GET_CODE (new) != SUBREG
6257		   ? gen_rtx_CLOBBER (tmode, const0_rtx)
6258		   : gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
6259
6260      if (mode == tmode)
6261	return new;
6262
6263      if (GET_CODE (new) == CONST_INT)
6264	return gen_int_mode (INTVAL (new), mode);
6265
6266      /* If we know that no extraneous bits are set, and that the high
6267	 bit is not set, convert the extraction to the cheaper of
6268	 sign and zero extension, that are equivalent in these cases.  */
6269      if (flag_expensive_optimizations
6270	  && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6271	      && ((nonzero_bits (new, tmode)
6272		   & ~(((unsigned HOST_WIDE_INT)
6273			GET_MODE_MASK (tmode))
6274		       >> 1))
6275		  == 0)))
6276	{
6277	  rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
6278	  rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
6279
6280	  /* Prefer ZERO_EXTENSION, since it gives more information to
6281	     backends.  */
6282	  if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
6283	    return temp;
6284	  return temp1;
6285	}
6286
6287      /* Otherwise, sign- or zero-extend unless we already are in the
6288	 proper mode.  */
6289
6290      return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6291			     mode, new));
6292    }
6293
6294  /* Unless this is a COMPARE or we have a funny memory reference,
6295     don't do anything with zero-extending field extracts starting at
6296     the low-order bit since they are simple AND operations.  */
6297  if (pos_rtx == 0 && pos == 0 && ! in_dest
6298      && ! in_compare && ! spans_byte && unsignedp)
6299    return 0;
6300
6301  /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
6302     we would be spanning bytes or if the position is not a constant and the
6303     length is not 1.  In all other cases, we would only be going outside
6304     our object in cases when an original shift would have been
6305     undefined.  */
6306  if (! spans_byte && GET_CODE (inner) == MEM
6307      && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6308	  || (pos_rtx != 0 && len != 1)))
6309    return 0;
6310
6311  /* Get the mode to use should INNER not be a MEM, the mode for the position,
6312     and the mode for the result.  */
6313  if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
6314    {
6315      wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6316      pos_mode = mode_for_extraction (EP_insv, 2);
6317      extraction_mode = mode_for_extraction (EP_insv, 3);
6318    }
6319
6320  if (! in_dest && unsignedp
6321      && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
6322    {
6323      wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6324      pos_mode = mode_for_extraction (EP_extzv, 3);
6325      extraction_mode = mode_for_extraction (EP_extzv, 0);
6326    }
6327
6328  if (! in_dest && ! unsignedp
6329      && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
6330    {
6331      wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6332      pos_mode = mode_for_extraction (EP_extv, 3);
6333      extraction_mode = mode_for_extraction (EP_extv, 0);
6334    }
6335
6336  /* Never narrow an object, since that might not be safe.  */
6337
6338  if (mode != VOIDmode
6339      && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6340    extraction_mode = mode;
6341
6342  if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6343      && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6344    pos_mode = GET_MODE (pos_rtx);
6345
6346  /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6347     if we have to change the mode of memory and cannot, the desired mode is
6348     EXTRACTION_MODE.  */
6349  if (GET_CODE (inner) != MEM)
6350    wanted_inner_mode = wanted_inner_reg_mode;
6351  else if (inner_mode != wanted_inner_mode
6352	   && (mode_dependent_address_p (XEXP (inner, 0))
6353	       || MEM_VOLATILE_P (inner)))
6354    wanted_inner_mode = extraction_mode;
6355
6356  orig_pos = pos;
6357
6358  if (BITS_BIG_ENDIAN)
6359    {
6360      /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6361	 BITS_BIG_ENDIAN style.  If position is constant, compute new
6362	 position.  Otherwise, build subtraction.
6363	 Note that POS is relative to the mode of the original argument.
6364	 If it's a MEM we need to recompute POS relative to that.
6365	 However, if we're extracting from (or inserting into) a register,
6366	 we want to recompute POS relative to wanted_inner_mode.  */
6367      int width = (GET_CODE (inner) == MEM
6368		   ? GET_MODE_BITSIZE (is_mode)
6369		   : GET_MODE_BITSIZE (wanted_inner_mode));
6370
6371      if (pos_rtx == 0)
6372	pos = width - len - pos;
6373      else
6374	pos_rtx
6375	  = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6376      /* POS may be less than 0 now, but we check for that below.
6377	 Note that it can only be less than 0 if GET_CODE (inner) != MEM.  */
6378    }
6379
6380  /* If INNER has a wider mode, make it smaller.  If this is a constant
6381     extract, try to adjust the byte to point to the byte containing
6382     the value.  */
6383  if (wanted_inner_mode != VOIDmode
6384      && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6385      && ((GET_CODE (inner) == MEM
6386	   && (inner_mode == wanted_inner_mode
6387	       || (! mode_dependent_address_p (XEXP (inner, 0))
6388		   && ! MEM_VOLATILE_P (inner))))))
6389    {
6390      int offset = 0;
6391
6392      /* The computations below will be correct if the machine is big
6393	 endian in both bits and bytes or little endian in bits and bytes.
6394	 If it is mixed, we must adjust.  */
6395
6396      /* If bytes are big endian and we had a paradoxical SUBREG, we must
6397	 adjust OFFSET to compensate.  */
6398      if (BYTES_BIG_ENDIAN
6399	  && ! spans_byte
6400	  && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6401	offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6402
6403      /* If this is a constant position, we can move to the desired byte.  */
6404      if (pos_rtx == 0)
6405	{
6406	  offset += pos / BITS_PER_UNIT;
6407	  pos %= GET_MODE_BITSIZE (wanted_inner_mode);
6408	}
6409
6410      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6411	  && ! spans_byte
6412	  && is_mode != wanted_inner_mode)
6413	offset = (GET_MODE_SIZE (is_mode)
6414		  - GET_MODE_SIZE (wanted_inner_mode) - offset);
6415
6416      if (offset != 0 || inner_mode != wanted_inner_mode)
6417	inner = adjust_address_nv (inner, wanted_inner_mode, offset);
6418    }
6419
6420  /* If INNER is not memory, we can always get it into the proper mode.  If we
6421     are changing its mode, POS must be a constant and smaller than the size
6422     of the new mode.  */
6423  else if (GET_CODE (inner) != MEM)
6424    {
6425      if (GET_MODE (inner) != wanted_inner_mode
6426	  && (pos_rtx != 0
6427	      || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6428	return 0;
6429
6430      inner = force_to_mode (inner, wanted_inner_mode,
6431			     pos_rtx
6432			     || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6433			     ? ~(unsigned HOST_WIDE_INT) 0
6434			     : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6435				<< orig_pos),
6436			     NULL_RTX, 0);
6437    }
6438
6439  /* Adjust mode of POS_RTX, if needed.  If we want a wider mode, we
6440     have to zero extend.  Otherwise, we can just use a SUBREG.  */
6441  if (pos_rtx != 0
6442      && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
6443    {
6444      rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
6445
6446      /* If we know that no extraneous bits are set, and that the high
6447	 bit is not set, convert extraction to cheaper one - either
6448	 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6449	 cases.  */
6450      if (flag_expensive_optimizations
6451	  && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6452	      && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
6453		   & ~(((unsigned HOST_WIDE_INT)
6454			GET_MODE_MASK (GET_MODE (pos_rtx)))
6455		       >> 1))
6456		  == 0)))
6457	{
6458	  rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6459
6460	  /* Prefer ZERO_EXTENSION, since it gives more information to
6461	     backends.  */
6462	  if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6463	    temp = temp1;
6464	}
6465      pos_rtx = temp;
6466    }
6467  else if (pos_rtx != 0
6468	   && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6469    pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
6470
6471  /* Make POS_RTX unless we already have it and it is correct.  If we don't
6472     have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
6473     be a CONST_INT.  */
6474  if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6475    pos_rtx = orig_pos_rtx;
6476
6477  else if (pos_rtx == 0)
6478    pos_rtx = GEN_INT (pos);
6479
6480  /* Make the required operation.  See if we can use existing rtx.  */
6481  new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
6482			 extraction_mode, inner, GEN_INT (len), pos_rtx);
6483  if (! in_dest)
6484    new = gen_lowpart_for_combine (mode, new);
6485
6486  return new;
6487}
6488
6489/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6490   with any other operations in X.  Return X without that shift if so.  */
6491
6492static rtx
6493extract_left_shift (x, count)
6494     rtx x;
6495     int count;
6496{
6497  enum rtx_code code = GET_CODE (x);
6498  enum machine_mode mode = GET_MODE (x);
6499  rtx tem;
6500
6501  switch (code)
6502    {
6503    case ASHIFT:
6504      /* This is the shift itself.  If it is wide enough, we will return
6505	 either the value being shifted if the shift count is equal to
6506	 COUNT or a shift for the difference.  */
6507      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6508	  && INTVAL (XEXP (x, 1)) >= count)
6509	return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6510				     INTVAL (XEXP (x, 1)) - count);
6511      break;
6512
6513    case NEG:  case NOT:
6514      if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6515	return simplify_gen_unary (code, mode, tem, mode);
6516
6517      break;
6518
6519    case PLUS:  case IOR:  case XOR:  case AND:
6520      /* If we can safely shift this constant and we find the inner shift,
6521	 make a new operation.  */
6522      if (GET_CODE (XEXP (x,1)) == CONST_INT
6523	  && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
6524	  && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6525	return gen_binary (code, mode, tem,
6526			   GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6527
6528      break;
6529
6530    default:
6531      break;
6532    }
6533
6534  return 0;
6535}
6536
6537/* Look at the expression rooted at X.  Look for expressions
6538   equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6539   Form these expressions.
6540
6541   Return the new rtx, usually just X.
6542
6543   Also, for machines like the VAX that don't have logical shift insns,
6544   try to convert logical to arithmetic shift operations in cases where
6545   they are equivalent.  This undoes the canonicalizations to logical
6546   shifts done elsewhere.
6547
6548   We try, as much as possible, to re-use rtl expressions to save memory.
6549
6550   IN_CODE says what kind of expression we are processing.  Normally, it is
6551   SET.  In a memory address (inside a MEM, PLUS or minus, the latter two
6552   being kludges), it is MEM.  When processing the arguments of a comparison
6553   or a COMPARE against zero, it is COMPARE.  */
6554
6555static rtx
6556make_compound_operation (x, in_code)
6557     rtx x;
6558     enum rtx_code in_code;
6559{
6560  enum rtx_code code = GET_CODE (x);
6561  enum machine_mode mode = GET_MODE (x);
6562  int mode_width = GET_MODE_BITSIZE (mode);
6563  rtx rhs, lhs;
6564  enum rtx_code next_code;
6565  int i;
6566  rtx new = 0;
6567  rtx tem;
6568  const char *fmt;
6569
6570  /* Select the code to be used in recursive calls.  Once we are inside an
6571     address, we stay there.  If we have a comparison, set to COMPARE,
6572     but once inside, go back to our default of SET.  */
6573
6574  next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
6575	       : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
6576		  && XEXP (x, 1) == const0_rtx) ? COMPARE
6577	       : in_code == COMPARE ? SET : in_code);
6578
6579  /* Process depending on the code of this operation.  If NEW is set
6580     nonzero, it will be returned.  */
6581
6582  switch (code)
6583    {
6584    case ASHIFT:
6585      /* Convert shifts by constants into multiplications if inside
6586	 an address.  */
6587      if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
6588	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6589	  && INTVAL (XEXP (x, 1)) >= 0)
6590	{
6591	  new = make_compound_operation (XEXP (x, 0), next_code);
6592	  new = gen_rtx_MULT (mode, new,
6593			      GEN_INT ((HOST_WIDE_INT) 1
6594				       << INTVAL (XEXP (x, 1))));
6595	}
6596      break;
6597
6598    case AND:
6599      /* If the second operand is not a constant, we can't do anything
6600	 with it.  */
6601      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6602	break;
6603
6604      /* If the constant is a power of two minus one and the first operand
6605	 is a logical right shift, make an extraction.  */
6606      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6607	  && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6608	{
6609	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6610	  new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6611				 0, in_code == COMPARE);
6612	}
6613
6614      /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
6615      else if (GET_CODE (XEXP (x, 0)) == SUBREG
6616	       && subreg_lowpart_p (XEXP (x, 0))
6617	       && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6618	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6619	{
6620	  new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6621					 next_code);
6622	  new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
6623				 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6624				 0, in_code == COMPARE);
6625	}
6626      /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)).  */
6627      else if ((GET_CODE (XEXP (x, 0)) == XOR
6628		|| GET_CODE (XEXP (x, 0)) == IOR)
6629	       && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6630	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6631	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6632	{
6633	  /* Apply the distributive law, and then try to make extractions.  */
6634	  new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
6635				gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6636					     XEXP (x, 1)),
6637				gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6638					     XEXP (x, 1)));
6639	  new = make_compound_operation (new, in_code);
6640	}
6641
6642      /* If we are have (and (rotate X C) M) and C is larger than the number
6643	 of bits in M, this is an extraction.  */
6644
6645      else if (GET_CODE (XEXP (x, 0)) == ROTATE
6646	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6647	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6648	       && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
6649	{
6650	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6651	  new = make_extraction (mode, new,
6652				 (GET_MODE_BITSIZE (mode)
6653				  - INTVAL (XEXP (XEXP (x, 0), 1))),
6654				 NULL_RTX, i, 1, 0, in_code == COMPARE);
6655	}
6656
6657      /* On machines without logical shifts, if the operand of the AND is
6658	 a logical shift and our mask turns off all the propagated sign
6659	 bits, we can replace the logical shift with an arithmetic shift.  */
6660      else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6661	       && !have_insn_for (LSHIFTRT, mode)
6662	       && have_insn_for (ASHIFTRT, mode)
6663	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6664	       && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6665	       && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6666	       && mode_width <= HOST_BITS_PER_WIDE_INT)
6667	{
6668	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6669
6670	  mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6671	  if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6672	    SUBST (XEXP (x, 0),
6673		   gen_rtx_ASHIFTRT (mode,
6674				     make_compound_operation
6675				     (XEXP (XEXP (x, 0), 0), next_code),
6676				     XEXP (XEXP (x, 0), 1)));
6677	}
6678
6679      /* If the constant is one less than a power of two, this might be
6680	 representable by an extraction even if no shift is present.
6681	 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6682	 we are in a COMPARE.  */
6683      else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6684	new = make_extraction (mode,
6685			       make_compound_operation (XEXP (x, 0),
6686							next_code),
6687			       0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6688
6689      /* If we are in a comparison and this is an AND with a power of two,
6690	 convert this into the appropriate bit extract.  */
6691      else if (in_code == COMPARE
6692	       && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6693	new = make_extraction (mode,
6694			       make_compound_operation (XEXP (x, 0),
6695							next_code),
6696			       i, NULL_RTX, 1, 1, 0, 1);
6697
6698      break;
6699
6700    case LSHIFTRT:
6701      /* If the sign bit is known to be zero, replace this with an
6702	 arithmetic shift.  */
6703      if (have_insn_for (ASHIFTRT, mode)
6704	  && ! have_insn_for (LSHIFTRT, mode)
6705	  && mode_width <= HOST_BITS_PER_WIDE_INT
6706	  && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6707	{
6708	  new = gen_rtx_ASHIFTRT (mode,
6709				  make_compound_operation (XEXP (x, 0),
6710							   next_code),
6711				  XEXP (x, 1));
6712	  break;
6713	}
6714
6715      /* ... fall through ...  */
6716
6717    case ASHIFTRT:
6718      lhs = XEXP (x, 0);
6719      rhs = XEXP (x, 1);
6720
6721      /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6722	 this is a SIGN_EXTRACT.  */
6723      if (GET_CODE (rhs) == CONST_INT
6724	  && GET_CODE (lhs) == ASHIFT
6725	  && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6726	  && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6727	{
6728	  new = make_compound_operation (XEXP (lhs, 0), next_code);
6729	  new = make_extraction (mode, new,
6730				 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6731				 NULL_RTX, mode_width - INTVAL (rhs),
6732				 code == LSHIFTRT, 0, in_code == COMPARE);
6733	  break;
6734	}
6735
6736      /* See if we have operations between an ASHIFTRT and an ASHIFT.
6737	 If so, try to merge the shifts into a SIGN_EXTEND.  We could
6738	 also do this for some cases of SIGN_EXTRACT, but it doesn't
6739	 seem worth the effort; the case checked for occurs on Alpha.  */
6740
6741      if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6742	  && ! (GET_CODE (lhs) == SUBREG
6743		&& (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6744	  && GET_CODE (rhs) == CONST_INT
6745	  && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6746	  && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6747	new = make_extraction (mode, make_compound_operation (new, next_code),
6748			       0, NULL_RTX, mode_width - INTVAL (rhs),
6749			       code == LSHIFTRT, 0, in_code == COMPARE);
6750
6751      break;
6752
6753    case SUBREG:
6754      /* Call ourselves recursively on the inner expression.  If we are
6755	 narrowing the object and it has a different RTL code from
6756	 what it originally did, do this SUBREG as a force_to_mode.  */
6757
6758      tem = make_compound_operation (SUBREG_REG (x), in_code);
6759      if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6760	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6761	  && subreg_lowpart_p (x))
6762	{
6763	  rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6764				     NULL_RTX, 0);
6765
6766	  /* If we have something other than a SUBREG, we might have
6767	     done an expansion, so rerun ourselves.  */
6768	  if (GET_CODE (newer) != SUBREG)
6769	    newer = make_compound_operation (newer, in_code);
6770
6771	  return newer;
6772	}
6773
6774      /* If this is a paradoxical subreg, and the new code is a sign or
6775	 zero extension, omit the subreg and widen the extension.  If it
6776	 is a regular subreg, we can still get rid of the subreg by not
6777	 widening so much, or in fact removing the extension entirely.  */
6778      if ((GET_CODE (tem) == SIGN_EXTEND
6779	   || GET_CODE (tem) == ZERO_EXTEND)
6780	  && subreg_lowpart_p (x))
6781	{
6782	  if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6783	      || (GET_MODE_SIZE (mode) >
6784		  GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6785	    {
6786	      if (! SCALAR_INT_MODE_P (mode))
6787		break;
6788	      tem = gen_rtx_fmt_e (GET_CODE (tem), mode, XEXP (tem, 0));
6789	    }
6790	  else
6791	    tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6792	  return tem;
6793	}
6794      break;
6795
6796    default:
6797      break;
6798    }
6799
6800  if (new)
6801    {
6802      x = gen_lowpart_for_combine (mode, new);
6803      code = GET_CODE (x);
6804    }
6805
6806  /* Now recursively process each operand of this operation.  */
6807  fmt = GET_RTX_FORMAT (code);
6808  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6809    if (fmt[i] == 'e')
6810      {
6811	new = make_compound_operation (XEXP (x, i), next_code);
6812	SUBST (XEXP (x, i), new);
6813      }
6814
6815  return x;
6816}
6817
6818/* Given M see if it is a value that would select a field of bits
6819   within an item, but not the entire word.  Return -1 if not.
6820   Otherwise, return the starting position of the field, where 0 is the
6821   low-order bit.
6822
6823   *PLEN is set to the length of the field.  */
6824
6825static int
6826get_pos_from_mask (m, plen)
6827     unsigned HOST_WIDE_INT m;
6828     unsigned HOST_WIDE_INT *plen;
6829{
6830  /* Get the bit number of the first 1 bit from the right, -1 if none.  */
6831  int pos = exact_log2 (m & -m);
6832  int len;
6833
6834  if (pos < 0)
6835    return -1;
6836
6837  /* Now shift off the low-order zero bits and see if we have a power of
6838     two minus 1.  */
6839  len = exact_log2 ((m >> pos) + 1);
6840
6841  if (len <= 0)
6842    return -1;
6843
6844  *plen = len;
6845  return pos;
6846}
6847
6848/* See if X can be simplified knowing that we will only refer to it in
6849   MODE and will only refer to those bits that are nonzero in MASK.
6850   If other bits are being computed or if masking operations are done
6851   that select a superset of the bits in MASK, they can sometimes be
6852   ignored.
6853
6854   Return a possibly simplified expression, but always convert X to
6855   MODE.  If X is a CONST_INT, AND the CONST_INT with MASK.
6856
6857   Also, if REG is nonzero and X is a register equal in value to REG,
6858   replace X with REG.
6859
6860   If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6861   are all off in X.  This is used when X will be complemented, by either
6862   NOT, NEG, or XOR.  */
6863
6864static rtx
6865force_to_mode (x, mode, mask, reg, just_select)
6866     rtx x;
6867     enum machine_mode mode;
6868     unsigned HOST_WIDE_INT mask;
6869     rtx reg;
6870     int just_select;
6871{
6872  enum rtx_code code = GET_CODE (x);
6873  int next_select = just_select || code == XOR || code == NOT || code == NEG;
6874  enum machine_mode op_mode;
6875  unsigned HOST_WIDE_INT fuller_mask, nonzero;
6876  rtx op0, op1, temp;
6877
6878  /* If this is a CALL or ASM_OPERANDS, don't do anything.  Some of the
6879     code below will do the wrong thing since the mode of such an
6880     expression is VOIDmode.
6881
6882     Also do nothing if X is a CLOBBER; this can happen if X was
6883     the return value from a call to gen_lowpart_for_combine.  */
6884  if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
6885    return x;
6886
6887  /* We want to perform the operation is its present mode unless we know
6888     that the operation is valid in MODE, in which case we do the operation
6889     in MODE.  */
6890  op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6891	      && have_insn_for (code, mode))
6892	     ? mode : GET_MODE (x));
6893
6894  /* It is not valid to do a right-shift in a narrower mode
6895     than the one it came in with.  */
6896  if ((code == LSHIFTRT || code == ASHIFTRT)
6897      && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6898    op_mode = GET_MODE (x);
6899
6900  /* Truncate MASK to fit OP_MODE.  */
6901  if (op_mode)
6902    mask &= GET_MODE_MASK (op_mode);
6903
6904  /* When we have an arithmetic operation, or a shift whose count we
6905     do not know, we need to assume that all bit the up to the highest-order
6906     bit in MASK will be needed.  This is how we form such a mask.  */
6907  if (op_mode)
6908    fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6909		   ? GET_MODE_MASK (op_mode)
6910		   : (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
6911		      - 1));
6912  else
6913    fuller_mask = ~(HOST_WIDE_INT) 0;
6914
6915  /* Determine what bits of X are guaranteed to be (non)zero.  */
6916  nonzero = nonzero_bits (x, mode);
6917
6918  /* If none of the bits in X are needed, return a zero.  */
6919  if (! just_select && (nonzero & mask) == 0)
6920    x = const0_rtx;
6921
6922  /* If X is a CONST_INT, return a new one.  Do this here since the
6923     test below will fail.  */
6924  if (GET_CODE (x) == CONST_INT)
6925    {
6926      if (SCALAR_INT_MODE_P (mode))
6927        return gen_int_mode (INTVAL (x) & mask, mode);
6928      else
6929	{
6930	  x = GEN_INT (INTVAL (x) & mask);
6931	  return gen_lowpart_common (mode, x);
6932	}
6933    }
6934
6935  /* If X is narrower than MODE and we want all the bits in X's mode, just
6936     get X in the proper mode.  */
6937  if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6938      && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
6939    return gen_lowpart_for_combine (mode, x);
6940
6941  /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6942     MASK are already known to be zero in X, we need not do anything.  */
6943  if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0)
6944    return x;
6945
6946  switch (code)
6947    {
6948    case CLOBBER:
6949      /* If X is a (clobber (const_int)), return it since we know we are
6950	 generating something that won't match.  */
6951      return x;
6952
6953    case USE:
6954      /* X is a (use (mem ..)) that was made from a bit-field extraction that
6955	 spanned the boundary of the MEM.  If we are now masking so it is
6956	 within that boundary, we don't need the USE any more.  */
6957      if (! BITS_BIG_ENDIAN
6958	  && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6959	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6960      break;
6961
6962    case SIGN_EXTEND:
6963    case ZERO_EXTEND:
6964    case ZERO_EXTRACT:
6965    case SIGN_EXTRACT:
6966      x = expand_compound_operation (x);
6967      if (GET_CODE (x) != code)
6968	return force_to_mode (x, mode, mask, reg, next_select);
6969      break;
6970
6971    case REG:
6972      if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6973		       || rtx_equal_p (reg, get_last_value (x))))
6974	x = reg;
6975      break;
6976
6977    case SUBREG:
6978      if (subreg_lowpart_p (x)
6979	  /* We can ignore the effect of this SUBREG if it narrows the mode or
6980	     if the constant masks to zero all the bits the mode doesn't
6981	     have.  */
6982	  && ((GET_MODE_SIZE (GET_MODE (x))
6983	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6984	      || (0 == (mask
6985			& GET_MODE_MASK (GET_MODE (x))
6986			& ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
6987	return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
6988      break;
6989
6990    case AND:
6991      /* If this is an AND with a constant, convert it into an AND
6992	 whose constant is the AND of that constant with MASK.  If it
6993	 remains an AND of MASK, delete it since it is redundant.  */
6994
6995      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6996	{
6997	  x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6998				      mask & INTVAL (XEXP (x, 1)));
6999
7000	  /* If X is still an AND, see if it is an AND with a mask that
7001	     is just some low-order bits.  If so, and it is MASK, we don't
7002	     need it.  */
7003
7004	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
7005	      && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
7006		  == mask))
7007	    x = XEXP (x, 0);
7008
7009	  /* If it remains an AND, try making another AND with the bits
7010	     in the mode mask that aren't in MASK turned on.  If the
7011	     constant in the AND is wide enough, this might make a
7012	     cheaper constant.  */
7013
7014	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
7015	      && GET_MODE_MASK (GET_MODE (x)) != mask
7016	      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
7017	    {
7018	      HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
7019				    | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
7020	      int width = GET_MODE_BITSIZE (GET_MODE (x));
7021	      rtx y;
7022
7023	      /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
7024		 number, sign extend it.  */
7025	      if (width > 0 && width < HOST_BITS_PER_WIDE_INT
7026		  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7027		cval |= (HOST_WIDE_INT) -1 << width;
7028
7029	      y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
7030	      if (rtx_cost (y, SET) < rtx_cost (x, SET))
7031		x = y;
7032	    }
7033
7034	  break;
7035	}
7036
7037      goto binop;
7038
7039    case PLUS:
7040      /* In (and (plus FOO C1) M), if M is a mask that just turns off
7041	 low-order bits (as in an alignment operation) and FOO is already
7042	 aligned to that boundary, mask C1 to that boundary as well.
7043	 This may eliminate that PLUS and, later, the AND.  */
7044
7045      {
7046	unsigned int width = GET_MODE_BITSIZE (mode);
7047	unsigned HOST_WIDE_INT smask = mask;
7048
7049	/* If MODE is narrower than HOST_WIDE_INT and mask is a negative
7050	   number, sign extend it.  */
7051
7052	if (width < HOST_BITS_PER_WIDE_INT
7053	    && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7054	  smask |= (HOST_WIDE_INT) -1 << width;
7055
7056	if (GET_CODE (XEXP (x, 1)) == CONST_INT
7057	    && exact_log2 (- smask) >= 0
7058	    && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
7059	    && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
7060	  return force_to_mode (plus_constant (XEXP (x, 0),
7061					       (INTVAL (XEXP (x, 1)) & smask)),
7062				mode, smask, reg, next_select);
7063      }
7064
7065      /* ... fall through ...  */
7066
7067    case MULT:
7068      /* For PLUS, MINUS and MULT, we need any bits less significant than the
7069	 most significant bit in MASK since carries from those bits will
7070	 affect the bits we are interested in.  */
7071      mask = fuller_mask;
7072      goto binop;
7073
7074    case MINUS:
7075      /* If X is (minus C Y) where C's least set bit is larger than any bit
7076	 in the mask, then we may replace with (neg Y).  */
7077      if (GET_CODE (XEXP (x, 0)) == CONST_INT
7078	  && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
7079					& -INTVAL (XEXP (x, 0))))
7080	      > mask))
7081	{
7082	  x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
7083				  GET_MODE (x));
7084	  return force_to_mode (x, mode, mask, reg, next_select);
7085	}
7086
7087      /* Similarly, if C contains every bit in the fuller_mask, then we may
7088	 replace with (not Y).  */
7089      if (GET_CODE (XEXP (x, 0)) == CONST_INT
7090	  && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask)
7091	      == INTVAL (XEXP (x, 0))))
7092	{
7093	  x = simplify_gen_unary (NOT, GET_MODE (x),
7094				  XEXP (x, 1), GET_MODE (x));
7095	  return force_to_mode (x, mode, mask, reg, next_select);
7096	}
7097
7098      mask = fuller_mask;
7099      goto binop;
7100
7101    case IOR:
7102    case XOR:
7103      /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
7104	 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
7105	 operation which may be a bitfield extraction.  Ensure that the
7106	 constant we form is not wider than the mode of X.  */
7107
7108      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7109	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7110	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7111	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
7112	  && GET_CODE (XEXP (x, 1)) == CONST_INT
7113	  && ((INTVAL (XEXP (XEXP (x, 0), 1))
7114	       + floor_log2 (INTVAL (XEXP (x, 1))))
7115	      < GET_MODE_BITSIZE (GET_MODE (x)))
7116	  && (INTVAL (XEXP (x, 1))
7117	      & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
7118	{
7119	  temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
7120			  << INTVAL (XEXP (XEXP (x, 0), 1)));
7121	  temp = gen_binary (GET_CODE (x), GET_MODE (x),
7122			     XEXP (XEXP (x, 0), 0), temp);
7123	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
7124			  XEXP (XEXP (x, 0), 1));
7125	  return force_to_mode (x, mode, mask, reg, next_select);
7126	}
7127
7128    binop:
7129      /* For most binary operations, just propagate into the operation and
7130	 change the mode if we have an operation of that mode.  */
7131
7132      op0 = gen_lowpart_for_combine (op_mode,
7133				     force_to_mode (XEXP (x, 0), mode, mask,
7134						    reg, next_select));
7135      op1 = gen_lowpart_for_combine (op_mode,
7136				     force_to_mode (XEXP (x, 1), mode, mask,
7137						    reg, next_select));
7138
7139      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7140	x = gen_binary (code, op_mode, op0, op1);
7141      break;
7142
7143    case ASHIFT:
7144      /* For left shifts, do the same, but just for the first operand.
7145	 However, we cannot do anything with shifts where we cannot
7146	 guarantee that the counts are smaller than the size of the mode
7147	 because such a count will have a different meaning in a
7148	 wider mode.  */
7149
7150      if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
7151	     && INTVAL (XEXP (x, 1)) >= 0
7152	     && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
7153	  && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
7154		&& (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
7155		    < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
7156	break;
7157
7158      /* If the shift count is a constant and we can do arithmetic in
7159	 the mode of the shift, refine which bits we need.  Otherwise, use the
7160	 conservative form of the mask.  */
7161      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7162	  && INTVAL (XEXP (x, 1)) >= 0
7163	  && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
7164	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7165	mask >>= INTVAL (XEXP (x, 1));
7166      else
7167	mask = fuller_mask;
7168
7169      op0 = gen_lowpart_for_combine (op_mode,
7170				     force_to_mode (XEXP (x, 0), op_mode,
7171						    mask, reg, next_select));
7172
7173      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7174	x = gen_binary (code, op_mode, op0, XEXP (x, 1));
7175      break;
7176
7177    case LSHIFTRT:
7178      /* Here we can only do something if the shift count is a constant,
7179	 this shift constant is valid for the host, and we can do arithmetic
7180	 in OP_MODE.  */
7181
7182      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7183	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7184	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7185	{
7186	  rtx inner = XEXP (x, 0);
7187	  unsigned HOST_WIDE_INT inner_mask;
7188
7189	  /* Select the mask of the bits we need for the shift operand.  */
7190	  inner_mask = mask << INTVAL (XEXP (x, 1));
7191
7192	  /* We can only change the mode of the shift if we can do arithmetic
7193	     in the mode of the shift and INNER_MASK is no wider than the
7194	     width of OP_MODE.  */
7195	  if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
7196	      || (inner_mask & ~GET_MODE_MASK (op_mode)) != 0)
7197	    op_mode = GET_MODE (x);
7198
7199	  inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
7200
7201	  if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7202	    x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
7203	}
7204
7205      /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7206	 shift and AND produces only copies of the sign bit (C2 is one less
7207	 than a power of two), we can do this with just a shift.  */
7208
7209      if (GET_CODE (x) == LSHIFTRT
7210	  && GET_CODE (XEXP (x, 1)) == CONST_INT
7211	  /* The shift puts one of the sign bit copies in the least significant
7212	     bit.  */
7213	  && ((INTVAL (XEXP (x, 1))
7214	       + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7215	      >= GET_MODE_BITSIZE (GET_MODE (x)))
7216	  && exact_log2 (mask + 1) >= 0
7217	  /* Number of bits left after the shift must be more than the mask
7218	     needs.  */
7219	  && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7220	      <= GET_MODE_BITSIZE (GET_MODE (x)))
7221	  /* Must be more sign bit copies than the mask needs.  */
7222	  && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
7223	      >= exact_log2 (mask + 1)))
7224	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7225			GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7226				 - exact_log2 (mask + 1)));
7227
7228      goto shiftrt;
7229
7230    case ASHIFTRT:
7231      /* If we are just looking for the sign bit, we don't need this shift at
7232	 all, even if it has a variable count.  */
7233      if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7234	  && (mask == ((unsigned HOST_WIDE_INT) 1
7235		       << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7236	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7237
7238      /* If this is a shift by a constant, get a mask that contains those bits
7239	 that are not copies of the sign bit.  We then have two cases:  If
7240	 MASK only includes those bits, this can be a logical shift, which may
7241	 allow simplifications.  If MASK is a single-bit field not within
7242	 those bits, we are requesting a copy of the sign bit and hence can
7243	 shift the sign bit to the appropriate location.  */
7244
7245      if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
7246	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7247	{
7248	  int i = -1;
7249
7250	  /* If the considered data is wider than HOST_WIDE_INT, we can't
7251	     represent a mask for all its bits in a single scalar.
7252	     But we only care about the lower bits, so calculate these.  */
7253
7254	  if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7255	    {
7256	      nonzero = ~(HOST_WIDE_INT) 0;
7257
7258	      /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7259		 is the number of bits a full-width mask would have set.
7260		 We need only shift if these are fewer than nonzero can
7261		 hold.  If not, we must keep all bits set in nonzero.  */
7262
7263	      if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7264		  < HOST_BITS_PER_WIDE_INT)
7265		nonzero >>= INTVAL (XEXP (x, 1))
7266			    + HOST_BITS_PER_WIDE_INT
7267			    - GET_MODE_BITSIZE (GET_MODE (x)) ;
7268	    }
7269	  else
7270	    {
7271	      nonzero = GET_MODE_MASK (GET_MODE (x));
7272	      nonzero >>= INTVAL (XEXP (x, 1));
7273	    }
7274
7275	  if ((mask & ~nonzero) == 0
7276	      || (i = exact_log2 (mask)) >= 0)
7277	    {
7278	      x = simplify_shift_const
7279		(x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7280		 i < 0 ? INTVAL (XEXP (x, 1))
7281		 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7282
7283	      if (GET_CODE (x) != ASHIFTRT)
7284		return force_to_mode (x, mode, mask, reg, next_select);
7285	    }
7286	}
7287
7288      /* If MASK is 1, convert this to an LSHIFTRT.  This can be done
7289	 even if the shift count isn't a constant.  */
7290      if (mask == 1)
7291	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
7292
7293    shiftrt:
7294
7295      /* If this is a zero- or sign-extension operation that just affects bits
7296	 we don't care about, remove it.  Be sure the call above returned
7297	 something that is still a shift.  */
7298
7299      if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7300	  && GET_CODE (XEXP (x, 1)) == CONST_INT
7301	  && INTVAL (XEXP (x, 1)) >= 0
7302	  && (INTVAL (XEXP (x, 1))
7303	      <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
7304	  && GET_CODE (XEXP (x, 0)) == ASHIFT
7305	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7306	  && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
7307	return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7308			      reg, next_select);
7309
7310      break;
7311
7312    case ROTATE:
7313    case ROTATERT:
7314      /* If the shift count is constant and we can do computations
7315	 in the mode of X, compute where the bits we care about are.
7316	 Otherwise, we can't do anything.  Don't change the mode of
7317	 the shift or propagate MODE into the shift, though.  */
7318      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7319	  && INTVAL (XEXP (x, 1)) >= 0)
7320	{
7321	  temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7322					    GET_MODE (x), GEN_INT (mask),
7323					    XEXP (x, 1));
7324	  if (temp && GET_CODE(temp) == CONST_INT)
7325	    SUBST (XEXP (x, 0),
7326		   force_to_mode (XEXP (x, 0), GET_MODE (x),
7327				  INTVAL (temp), reg, next_select));
7328	}
7329      break;
7330
7331    case NEG:
7332      /* If we just want the low-order bit, the NEG isn't needed since it
7333	 won't change the low-order bit.  */
7334      if (mask == 1)
7335	return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
7336
7337      /* We need any bits less significant than the most significant bit in
7338	 MASK since carries from those bits will affect the bits we are
7339	 interested in.  */
7340      mask = fuller_mask;
7341      goto unop;
7342
7343    case NOT:
7344      /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7345	 same as the XOR case above.  Ensure that the constant we form is not
7346	 wider than the mode of X.  */
7347
7348      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7349	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7350	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7351	  && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7352	      < GET_MODE_BITSIZE (GET_MODE (x)))
7353	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7354	{
7355	  temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)),
7356			       GET_MODE (x));
7357	  temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
7358	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
7359
7360	  return force_to_mode (x, mode, mask, reg, next_select);
7361	}
7362
7363      /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7364	 use the full mask inside the NOT.  */
7365      mask = fuller_mask;
7366
7367    unop:
7368      op0 = gen_lowpart_for_combine (op_mode,
7369				     force_to_mode (XEXP (x, 0), mode, mask,
7370						    reg, next_select));
7371      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7372	x = simplify_gen_unary (code, op_mode, op0, op_mode);
7373      break;
7374
7375    case NE:
7376      /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
7377	 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
7378	 which is equal to STORE_FLAG_VALUE.  */
7379      if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
7380	  && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
7381	  && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
7382	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7383
7384      break;
7385
7386    case IF_THEN_ELSE:
7387      /* We have no way of knowing if the IF_THEN_ELSE can itself be
7388	 written in a narrower mode.  We play it safe and do not do so.  */
7389
7390      SUBST (XEXP (x, 1),
7391	     gen_lowpart_for_combine (GET_MODE (x),
7392				      force_to_mode (XEXP (x, 1), mode,
7393						     mask, reg, next_select)));
7394      SUBST (XEXP (x, 2),
7395	     gen_lowpart_for_combine (GET_MODE (x),
7396				      force_to_mode (XEXP (x, 2), mode,
7397						     mask, reg,next_select)));
7398      break;
7399
7400    default:
7401      break;
7402    }
7403
7404  /* Ensure we return a value of the proper mode.  */
7405  return gen_lowpart_for_combine (mode, x);
7406}
7407
7408/* Return nonzero if X is an expression that has one of two values depending on
7409   whether some other value is zero or nonzero.  In that case, we return the
7410   value that is being tested, *PTRUE is set to the value if the rtx being
7411   returned has a nonzero value, and *PFALSE is set to the other alternative.
7412
7413   If we return zero, we set *PTRUE and *PFALSE to X.  */
7414
7415static rtx
7416if_then_else_cond (x, ptrue, pfalse)
7417     rtx x;
7418     rtx *ptrue, *pfalse;
7419{
7420  enum machine_mode mode = GET_MODE (x);
7421  enum rtx_code code = GET_CODE (x);
7422  rtx cond0, cond1, true0, true1, false0, false1;
7423  unsigned HOST_WIDE_INT nz;
7424
7425  /* If we are comparing a value against zero, we are done.  */
7426  if ((code == NE || code == EQ)
7427      && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
7428    {
7429      *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7430      *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
7431      return XEXP (x, 0);
7432    }
7433
7434  /* If this is a unary operation whose operand has one of two values, apply
7435     our opcode to compute those values.  */
7436  else if (GET_RTX_CLASS (code) == '1'
7437	   && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
7438    {
7439      *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
7440      *pfalse = simplify_gen_unary (code, mode, false0,
7441				    GET_MODE (XEXP (x, 0)));
7442      return cond0;
7443    }
7444
7445  /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
7446     make can't possibly match and would suppress other optimizations.  */
7447  else if (code == COMPARE)
7448    ;
7449
7450  /* If this is a binary operation, see if either side has only one of two
7451     values.  If either one does or if both do and they are conditional on
7452     the same value, compute the new true and false values.  */
7453  else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
7454	   || GET_RTX_CLASS (code) == '<')
7455    {
7456      cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7457      cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7458
7459      if ((cond0 != 0 || cond1 != 0)
7460	  && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7461	{
7462	  /* If if_then_else_cond returned zero, then true/false are the
7463	     same rtl.  We must copy one of them to prevent invalid rtl
7464	     sharing.  */
7465	  if (cond0 == 0)
7466	    true0 = copy_rtx (true0);
7467	  else if (cond1 == 0)
7468	    true1 = copy_rtx (true1);
7469
7470	  *ptrue = gen_binary (code, mode, true0, true1);
7471	  *pfalse = gen_binary (code, mode, false0, false1);
7472	  return cond0 ? cond0 : cond1;
7473	}
7474
7475      /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
7476	 operands is zero when the other is nonzero, and vice-versa,
7477	 and STORE_FLAG_VALUE is 1 or -1.  */
7478
7479      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7480	  && (code == PLUS || code == IOR || code == XOR || code == MINUS
7481	      || code == UMAX)
7482	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7483	{
7484	  rtx op0 = XEXP (XEXP (x, 0), 1);
7485	  rtx op1 = XEXP (XEXP (x, 1), 1);
7486
7487	  cond0 = XEXP (XEXP (x, 0), 0);
7488	  cond1 = XEXP (XEXP (x, 1), 0);
7489
7490	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7491	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7492	      && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7493		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7494		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7495		  || ((swap_condition (GET_CODE (cond0))
7496		       == combine_reversed_comparison_code (cond1))
7497		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7498		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7499	      && ! side_effects_p (x))
7500	    {
7501	      *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
7502	      *pfalse = gen_binary (MULT, mode,
7503				    (code == MINUS
7504				     ? simplify_gen_unary (NEG, mode, op1,
7505							   mode)
7506				     : op1),
7507				    const_true_rtx);
7508	      return cond0;
7509	    }
7510	}
7511
7512      /* Similarly for MULT, AND and UMIN, except that for these the result
7513	 is always zero.  */
7514      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7515	  && (code == MULT || code == AND || code == UMIN)
7516	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7517	{
7518	  cond0 = XEXP (XEXP (x, 0), 0);
7519	  cond1 = XEXP (XEXP (x, 1), 0);
7520
7521	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7522	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7523	      && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7524		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7525		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7526		  || ((swap_condition (GET_CODE (cond0))
7527		       == combine_reversed_comparison_code (cond1))
7528		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7529		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7530	      && ! side_effects_p (x))
7531	    {
7532	      *ptrue = *pfalse = const0_rtx;
7533	      return cond0;
7534	    }
7535	}
7536    }
7537
7538  else if (code == IF_THEN_ELSE)
7539    {
7540      /* If we have IF_THEN_ELSE already, extract the condition and
7541	 canonicalize it if it is NE or EQ.  */
7542      cond0 = XEXP (x, 0);
7543      *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7544      if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7545	return XEXP (cond0, 0);
7546      else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7547	{
7548	  *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7549	  return XEXP (cond0, 0);
7550	}
7551      else
7552	return cond0;
7553    }
7554
7555  /* If X is a SUBREG, we can narrow both the true and false values
7556     if the inner expression, if there is a condition.  */
7557  else if (code == SUBREG
7558	   && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7559					       &true0, &false0)))
7560    {
7561      *ptrue = simplify_gen_subreg (mode, true0,
7562				    GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7563      *pfalse = simplify_gen_subreg (mode, false0,
7564				     GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7565
7566      return cond0;
7567    }
7568
7569  /* If X is a constant, this isn't special and will cause confusions
7570     if we treat it as such.  Likewise if it is equivalent to a constant.  */
7571  else if (CONSTANT_P (x)
7572	   || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7573    ;
7574
7575  /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7576     will be least confusing to the rest of the compiler.  */
7577  else if (mode == BImode)
7578    {
7579      *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7580      return x;
7581    }
7582
7583  /* If X is known to be either 0 or -1, those are the true and
7584     false values when testing X.  */
7585  else if (x == constm1_rtx || x == const0_rtx
7586	   || (mode != VOIDmode
7587	       && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
7588    {
7589      *ptrue = constm1_rtx, *pfalse = const0_rtx;
7590      return x;
7591    }
7592
7593  /* Likewise for 0 or a single bit.  */
7594  else if (mode != VOIDmode
7595	   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7596	   && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
7597    {
7598      *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
7599      return x;
7600    }
7601
7602  /* Otherwise fail; show no condition with true and false values the same.  */
7603  *ptrue = *pfalse = x;
7604  return 0;
7605}
7606
7607/* Return the value of expression X given the fact that condition COND
7608   is known to be true when applied to REG as its first operand and VAL
7609   as its second.  X is known to not be shared and so can be modified in
7610   place.
7611
7612   We only handle the simplest cases, and specifically those cases that
7613   arise with IF_THEN_ELSE expressions.  */
7614
7615static rtx
7616known_cond (x, cond, reg, val)
7617     rtx x;
7618     enum rtx_code cond;
7619     rtx reg, val;
7620{
7621  enum rtx_code code = GET_CODE (x);
7622  rtx temp;
7623  const char *fmt;
7624  int i, j;
7625
7626  if (side_effects_p (x))
7627    return x;
7628
7629  /* If either operand of the condition is a floating point value,
7630     then we have to avoid collapsing an EQ comparison.  */
7631  if (cond == EQ
7632      && rtx_equal_p (x, reg)
7633      && ! FLOAT_MODE_P (GET_MODE (x))
7634      && ! FLOAT_MODE_P (GET_MODE (val)))
7635    return val;
7636
7637  if (cond == UNEQ && rtx_equal_p (x, reg))
7638    return val;
7639
7640  /* If X is (abs REG) and we know something about REG's relationship
7641     with zero, we may be able to simplify this.  */
7642
7643  if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7644    switch (cond)
7645      {
7646      case GE:  case GT:  case EQ:
7647	return XEXP (x, 0);
7648      case LT:  case LE:
7649	return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
7650				   XEXP (x, 0),
7651				   GET_MODE (XEXP (x, 0)));
7652      default:
7653	break;
7654      }
7655
7656  /* The only other cases we handle are MIN, MAX, and comparisons if the
7657     operands are the same as REG and VAL.  */
7658
7659  else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
7660    {
7661      if (rtx_equal_p (XEXP (x, 0), val))
7662	cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7663
7664      if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7665	{
7666	  if (GET_RTX_CLASS (code) == '<')
7667	    {
7668	      if (comparison_dominates_p (cond, code))
7669		return const_true_rtx;
7670
7671	      code = combine_reversed_comparison_code (x);
7672	      if (code != UNKNOWN
7673		  && comparison_dominates_p (cond, code))
7674		return const0_rtx;
7675	      else
7676		return x;
7677	    }
7678	  else if (code == SMAX || code == SMIN
7679		   || code == UMIN || code == UMAX)
7680	    {
7681	      int unsignedp = (code == UMIN || code == UMAX);
7682
7683	      /* Do not reverse the condition when it is NE or EQ.
7684		 This is because we cannot conclude anything about
7685		 the value of 'SMAX (x, y)' when x is not equal to y,
7686		 but we can when x equals y.  */
7687	      if ((code == SMAX || code == UMAX)
7688		  && ! (cond == EQ || cond == NE))
7689		cond = reverse_condition (cond);
7690
7691	      switch (cond)
7692		{
7693		case GE:   case GT:
7694		  return unsignedp ? x : XEXP (x, 1);
7695		case LE:   case LT:
7696		  return unsignedp ? x : XEXP (x, 0);
7697		case GEU:  case GTU:
7698		  return unsignedp ? XEXP (x, 1) : x;
7699		case LEU:  case LTU:
7700		  return unsignedp ? XEXP (x, 0) : x;
7701		default:
7702		  break;
7703		}
7704	    }
7705	}
7706    }
7707  else if (code == SUBREG)
7708    {
7709      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
7710      rtx new, r = known_cond (SUBREG_REG (x), cond, reg, val);
7711
7712      if (SUBREG_REG (x) != r)
7713	{
7714	  /* We must simplify subreg here, before we lose track of the
7715	     original inner_mode.  */
7716	  new = simplify_subreg (GET_MODE (x), r,
7717				 inner_mode, SUBREG_BYTE (x));
7718	  if (new)
7719	    return new;
7720	  else
7721	    SUBST (SUBREG_REG (x), r);
7722	}
7723
7724      return x;
7725    }
7726  /* We don't have to handle SIGN_EXTEND here, because even in the
7727     case of replacing something with a modeless CONST_INT, a
7728     CONST_INT is already (supposed to be) a valid sign extension for
7729     its narrower mode, which implies it's already properly
7730     sign-extended for the wider mode.  Now, for ZERO_EXTEND, the
7731     story is different.  */
7732  else if (code == ZERO_EXTEND)
7733    {
7734      enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
7735      rtx new, r = known_cond (XEXP (x, 0), cond, reg, val);
7736
7737      if (XEXP (x, 0) != r)
7738	{
7739	  /* We must simplify the zero_extend here, before we lose
7740             track of the original inner_mode.  */
7741	  new = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
7742					  r, inner_mode);
7743	  if (new)
7744	    return new;
7745	  else
7746	    SUBST (XEXP (x, 0), r);
7747	}
7748
7749      return x;
7750    }
7751
7752  fmt = GET_RTX_FORMAT (code);
7753  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7754    {
7755      if (fmt[i] == 'e')
7756	SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7757      else if (fmt[i] == 'E')
7758	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7759	  SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7760						cond, reg, val));
7761    }
7762
7763  return x;
7764}
7765
7766/* See if X and Y are equal for the purposes of seeing if we can rewrite an
7767   assignment as a field assignment.  */
7768
7769static int
7770rtx_equal_for_field_assignment_p (x, y)
7771     rtx x;
7772     rtx y;
7773{
7774  if (x == y || rtx_equal_p (x, y))
7775    return 1;
7776
7777  if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7778    return 0;
7779
7780  /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7781     Note that all SUBREGs of MEM are paradoxical; otherwise they
7782     would have been rewritten.  */
7783  if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7784      && GET_CODE (SUBREG_REG (y)) == MEM
7785      && rtx_equal_p (SUBREG_REG (y),
7786		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7787    return 1;
7788
7789  if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7790      && GET_CODE (SUBREG_REG (x)) == MEM
7791      && rtx_equal_p (SUBREG_REG (x),
7792		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7793    return 1;
7794
7795  /* We used to see if get_last_value of X and Y were the same but that's
7796     not correct.  In one direction, we'll cause the assignment to have
7797     the wrong destination and in the case, we'll import a register into this
7798     insn that might have already have been dead.   So fail if none of the
7799     above cases are true.  */
7800  return 0;
7801}
7802
7803/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7804   Return that assignment if so.
7805
7806   We only handle the most common cases.  */
7807
7808static rtx
7809make_field_assignment (x)
7810     rtx x;
7811{
7812  rtx dest = SET_DEST (x);
7813  rtx src = SET_SRC (x);
7814  rtx assign;
7815  rtx rhs, lhs;
7816  HOST_WIDE_INT c1;
7817  HOST_WIDE_INT pos;
7818  unsigned HOST_WIDE_INT len;
7819  rtx other;
7820  enum machine_mode mode;
7821
7822  /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7823     a clear of a one-bit field.  We will have changed it to
7824     (and (rotate (const_int -2) POS) DEST), so check for that.  Also check
7825     for a SUBREG.  */
7826
7827  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7828      && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7829      && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7830      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7831    {
7832      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7833				1, 1, 1, 0);
7834      if (assign != 0)
7835	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7836      return x;
7837    }
7838
7839  else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7840	   && subreg_lowpart_p (XEXP (src, 0))
7841	   && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7842	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7843	   && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7844	   && GET_CODE (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == CONST_INT
7845	   && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7846	   && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7847    {
7848      assign = make_extraction (VOIDmode, dest, 0,
7849				XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7850				1, 1, 1, 0);
7851      if (assign != 0)
7852	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7853      return x;
7854    }
7855
7856  /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
7857     one-bit field.  */
7858  else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7859	   && XEXP (XEXP (src, 0), 0) == const1_rtx
7860	   && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7861    {
7862      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7863				1, 1, 1, 0);
7864      if (assign != 0)
7865	return gen_rtx_SET (VOIDmode, assign, const1_rtx);
7866      return x;
7867    }
7868
7869  /* The other case we handle is assignments into a constant-position
7870     field.  They look like (ior/xor (and DEST C1) OTHER).  If C1 represents
7871     a mask that has all one bits except for a group of zero bits and
7872     OTHER is known to have zeros where C1 has ones, this is such an
7873     assignment.  Compute the position and length from C1.  Shift OTHER
7874     to the appropriate position, force it to the required mode, and
7875     make the extraction.  Check for the AND in both operands.  */
7876
7877  if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
7878    return x;
7879
7880  rhs = expand_compound_operation (XEXP (src, 0));
7881  lhs = expand_compound_operation (XEXP (src, 1));
7882
7883  if (GET_CODE (rhs) == AND
7884      && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7885      && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7886    c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7887  else if (GET_CODE (lhs) == AND
7888	   && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7889	   && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7890    c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
7891  else
7892    return x;
7893
7894  pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
7895  if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
7896      || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7897      || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
7898    return x;
7899
7900  assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
7901  if (assign == 0)
7902    return x;
7903
7904  /* The mode to use for the source is the mode of the assignment, or of
7905     what is inside a possible STRICT_LOW_PART.  */
7906  mode = (GET_CODE (assign) == STRICT_LOW_PART
7907	  ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
7908
7909  /* Shift OTHER right POS places and make it the source, restricting it
7910     to the proper length and mode.  */
7911
7912  src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7913					     GET_MODE (src), other, pos),
7914		       mode,
7915		       GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7916		       ? ~(unsigned HOST_WIDE_INT) 0
7917		       : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
7918		       dest, 0);
7919
7920  return gen_rtx_SET (VOIDmode, assign, src);
7921}
7922
7923/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7924   if so.  */
7925
7926static rtx
7927apply_distributive_law (x)
7928     rtx x;
7929{
7930  enum rtx_code code = GET_CODE (x);
7931  rtx lhs, rhs, other;
7932  rtx tem;
7933  enum rtx_code inner_code;
7934
7935  /* Distributivity is not true for floating point.
7936     It can change the value.  So don't do it.
7937     -- rms and moshier@world.std.com.  */
7938  if (FLOAT_MODE_P (GET_MODE (x)))
7939    return x;
7940
7941  /* The outer operation can only be one of the following:  */
7942  if (code != IOR && code != AND && code != XOR
7943      && code != PLUS && code != MINUS)
7944    return x;
7945
7946  lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7947
7948  /* If either operand is a primitive we can't do anything, so get out
7949     fast.  */
7950  if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
7951      || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
7952    return x;
7953
7954  lhs = expand_compound_operation (lhs);
7955  rhs = expand_compound_operation (rhs);
7956  inner_code = GET_CODE (lhs);
7957  if (inner_code != GET_CODE (rhs))
7958    return x;
7959
7960  /* See if the inner and outer operations distribute.  */
7961  switch (inner_code)
7962    {
7963    case LSHIFTRT:
7964    case ASHIFTRT:
7965    case AND:
7966    case IOR:
7967      /* These all distribute except over PLUS.  */
7968      if (code == PLUS || code == MINUS)
7969	return x;
7970      break;
7971
7972    case MULT:
7973      if (code != PLUS && code != MINUS)
7974	return x;
7975      break;
7976
7977    case ASHIFT:
7978      /* This is also a multiply, so it distributes over everything.  */
7979      break;
7980
7981    case SUBREG:
7982      /* Non-paradoxical SUBREGs distributes over all operations, provided
7983	 the inner modes and byte offsets are the same, this is an extraction
7984	 of a low-order part, we don't convert an fp operation to int or
7985	 vice versa, and we would not be converting a single-word
7986	 operation into a multi-word operation.  The latter test is not
7987	 required, but it prevents generating unneeded multi-word operations.
7988	 Some of the previous tests are redundant given the latter test, but
7989	 are retained because they are required for correctness.
7990
7991	 We produce the result slightly differently in this case.  */
7992
7993      if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7994	  || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
7995	  || ! subreg_lowpart_p (lhs)
7996	  || (GET_MODE_CLASS (GET_MODE (lhs))
7997	      != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
7998	  || (GET_MODE_SIZE (GET_MODE (lhs))
7999	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
8000	  || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
8001	return x;
8002
8003      tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
8004			SUBREG_REG (lhs), SUBREG_REG (rhs));
8005      return gen_lowpart_for_combine (GET_MODE (x), tem);
8006
8007    default:
8008      return x;
8009    }
8010
8011  /* Set LHS and RHS to the inner operands (A and B in the example
8012     above) and set OTHER to the common operand (C in the example).
8013     These is only one way to do this unless the inner operation is
8014     commutative.  */
8015  if (GET_RTX_CLASS (inner_code) == 'c'
8016      && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
8017    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
8018  else if (GET_RTX_CLASS (inner_code) == 'c'
8019	   && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
8020    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
8021  else if (GET_RTX_CLASS (inner_code) == 'c'
8022	   && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
8023    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
8024  else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
8025    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
8026  else
8027    return x;
8028
8029  /* Form the new inner operation, seeing if it simplifies first.  */
8030  tem = gen_binary (code, GET_MODE (x), lhs, rhs);
8031
8032  /* There is one exception to the general way of distributing:
8033     (a ^ b) | (a ^ c) -> (~a) & (b ^ c)  */
8034  if (code == XOR && inner_code == IOR)
8035    {
8036      inner_code = AND;
8037      other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
8038    }
8039
8040  /* We may be able to continuing distributing the result, so call
8041     ourselves recursively on the inner operation before forming the
8042     outer operation, which we return.  */
8043  return gen_binary (inner_code, GET_MODE (x),
8044		     apply_distributive_law (tem), other);
8045}
8046
8047/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
8048   in MODE.
8049
8050   Return an equivalent form, if different from X.  Otherwise, return X.  If
8051   X is zero, we are to always construct the equivalent form.  */
8052
8053static rtx
8054simplify_and_const_int (x, mode, varop, constop)
8055     rtx x;
8056     enum machine_mode mode;
8057     rtx varop;
8058     unsigned HOST_WIDE_INT constop;
8059{
8060  unsigned HOST_WIDE_INT nonzero;
8061  int i;
8062
8063  /* Simplify VAROP knowing that we will be only looking at some of the
8064     bits in it.
8065
8066     Note by passing in CONSTOP, we guarantee that the bits not set in
8067     CONSTOP are not significant and will never be examined.  We must
8068     ensure that is the case by explicitly masking out those bits
8069     before returning.  */
8070  varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
8071
8072  /* If VAROP is a CLOBBER, we will fail so return it.  */
8073  if (GET_CODE (varop) == CLOBBER)
8074    return varop;
8075
8076  /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
8077     to VAROP and return the new constant.  */
8078  if (GET_CODE (varop) == CONST_INT)
8079    return GEN_INT (trunc_int_for_mode (INTVAL (varop) & constop, mode));
8080
8081  /* See what bits may be nonzero in VAROP.  Unlike the general case of
8082     a call to nonzero_bits, here we don't care about bits outside
8083     MODE.  */
8084
8085  nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
8086
8087  /* Turn off all bits in the constant that are known to already be zero.
8088     Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
8089     which is tested below.  */
8090
8091  constop &= nonzero;
8092
8093  /* If we don't have any bits left, return zero.  */
8094  if (constop == 0)
8095    return const0_rtx;
8096
8097  /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
8098     a power of two, we can replace this with an ASHIFT.  */
8099  if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
8100      && (i = exact_log2 (constop)) >= 0)
8101    return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
8102
8103  /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
8104     or XOR, then try to apply the distributive law.  This may eliminate
8105     operations if either branch can be simplified because of the AND.
8106     It may also make some cases more complex, but those cases probably
8107     won't match a pattern either with or without this.  */
8108
8109  if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
8110    return
8111      gen_lowpart_for_combine
8112	(mode,
8113	 apply_distributive_law
8114	 (gen_binary (GET_CODE (varop), GET_MODE (varop),
8115		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
8116					      XEXP (varop, 0), constop),
8117		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
8118					      XEXP (varop, 1), constop))));
8119
8120  /* If VAROP is PLUS, and the constant is a mask of low bite, distribute
8121     the AND and see if one of the operands simplifies to zero.  If so, we
8122     may eliminate it.  */
8123
8124  if (GET_CODE (varop) == PLUS
8125      && exact_log2 (constop + 1) >= 0)
8126    {
8127      rtx o0, o1;
8128
8129      o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
8130      o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
8131      if (o0 == const0_rtx)
8132	return o1;
8133      if (o1 == const0_rtx)
8134	return o0;
8135    }
8136
8137  /* Get VAROP in MODE.  Try to get a SUBREG if not.  Don't make a new SUBREG
8138     if we already had one (just check for the simplest cases).  */
8139  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8140      && GET_MODE (XEXP (x, 0)) == mode
8141      && SUBREG_REG (XEXP (x, 0)) == varop)
8142    varop = XEXP (x, 0);
8143  else
8144    varop = gen_lowpart_for_combine (mode, varop);
8145
8146  /* If we can't make the SUBREG, try to return what we were given.  */
8147  if (GET_CODE (varop) == CLOBBER)
8148    return x ? x : varop;
8149
8150  /* If we are only masking insignificant bits, return VAROP.  */
8151  if (constop == nonzero)
8152    x = varop;
8153  else
8154    {
8155      /* Otherwise, return an AND.  */
8156      constop = trunc_int_for_mode (constop, mode);
8157      /* See how much, if any, of X we can use.  */
8158      if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
8159	x = gen_binary (AND, mode, varop, GEN_INT (constop));
8160
8161      else
8162	{
8163	  if (GET_CODE (XEXP (x, 1)) != CONST_INT
8164	      || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
8165	    SUBST (XEXP (x, 1), GEN_INT (constop));
8166
8167	  SUBST (XEXP (x, 0), varop);
8168	}
8169    }
8170
8171  return x;
8172}
8173
8174#define nonzero_bits_with_known(X, MODE) \
8175  cached_nonzero_bits (X, MODE, known_x, known_mode, known_ret)
8176
8177/* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
8178   It avoids exponential behavior in nonzero_bits1 when X has
8179   identical subexpressions on the first or the second level.  */
8180
8181static unsigned HOST_WIDE_INT
8182cached_nonzero_bits (x, mode, known_x, known_mode, known_ret)
8183     rtx x;
8184     enum machine_mode mode;
8185     rtx known_x;
8186     enum machine_mode known_mode;
8187     unsigned HOST_WIDE_INT known_ret;
8188{
8189  if (x == known_x && mode == known_mode)
8190    return known_ret;
8191
8192  /* Try to find identical subexpressions.  If found call
8193     nonzero_bits1 on X with the subexpressions as KNOWN_X and the
8194     precomputed value for the subexpression as KNOWN_RET.  */
8195
8196  if (GET_RTX_CLASS (GET_CODE (x)) == '2'
8197      || GET_RTX_CLASS (GET_CODE (x)) == 'c')
8198    {
8199      rtx x0 = XEXP (x, 0);
8200      rtx x1 = XEXP (x, 1);
8201
8202      /* Check the first level.  */
8203      if (x0 == x1)
8204	return nonzero_bits1 (x, mode, x0, mode,
8205			      nonzero_bits_with_known (x0, mode));
8206
8207      /* Check the second level.  */
8208      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
8209	   || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
8210	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
8211	return nonzero_bits1 (x, mode, x1, mode,
8212			      nonzero_bits_with_known (x1, mode));
8213
8214      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
8215	   || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
8216	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
8217	return nonzero_bits1 (x, mode, x0, mode,
8218			 nonzero_bits_with_known (x0, mode));
8219    }
8220
8221  return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
8222}
8223
8224/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
8225   We don't let nonzero_bits recur into num_sign_bit_copies, because that
8226   is less useful.  We can't allow both, because that results in exponential
8227   run time recursion.  There is a nullstone testcase that triggered
8228   this.  This macro avoids accidental uses of num_sign_bit_copies.  */
8229#define cached_num_sign_bit_copies()
8230
8231/* Given an expression, X, compute which bits in X can be nonzero.
8232   We don't care about bits outside of those defined in MODE.
8233
8234   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
8235   a shift, AND, or zero_extract, we can do better.  */
8236
8237static unsigned HOST_WIDE_INT
8238nonzero_bits1 (x, mode, known_x, known_mode, known_ret)
8239     rtx x;
8240     enum machine_mode mode;
8241     rtx known_x;
8242     enum machine_mode known_mode;
8243     unsigned HOST_WIDE_INT known_ret;
8244{
8245  unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
8246  unsigned HOST_WIDE_INT inner_nz;
8247  enum rtx_code code;
8248  unsigned int mode_width = GET_MODE_BITSIZE (mode);
8249  rtx tem;
8250
8251  /* For floating-point values, assume all bits are needed.  */
8252  if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
8253    return nonzero;
8254
8255  /* If X is wider than MODE, use its mode instead.  */
8256  if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
8257    {
8258      mode = GET_MODE (x);
8259      nonzero = GET_MODE_MASK (mode);
8260      mode_width = GET_MODE_BITSIZE (mode);
8261    }
8262
8263  if (mode_width > HOST_BITS_PER_WIDE_INT)
8264    /* Our only callers in this case look for single bit values.  So
8265       just return the mode mask.  Those tests will then be false.  */
8266    return nonzero;
8267
8268#ifndef WORD_REGISTER_OPERATIONS
8269  /* If MODE is wider than X, but both are a single word for both the host
8270     and target machines, we can compute this from which bits of the
8271     object might be nonzero in its own mode, taking into account the fact
8272     that on many CISC machines, accessing an object in a wider mode
8273     causes the high-order bits to become undefined.  So they are
8274     not known to be zero.  */
8275
8276  if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
8277      && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
8278      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
8279      && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
8280    {
8281      nonzero &= nonzero_bits_with_known (x, GET_MODE (x));
8282      nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
8283      return nonzero;
8284    }
8285#endif
8286
8287  code = GET_CODE (x);
8288  switch (code)
8289    {
8290    case REG:
8291#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
8292      /* If pointers extend unsigned and this is a pointer in Pmode, say that
8293	 all the bits above ptr_mode are known to be zero.  */
8294      if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8295	  && REG_POINTER (x))
8296	nonzero &= GET_MODE_MASK (ptr_mode);
8297#endif
8298
8299      /* Include declared information about alignment of pointers.  */
8300      /* ??? We don't properly preserve REG_POINTER changes across
8301	 pointer-to-integer casts, so we can't trust it except for
8302	 things that we know must be pointers.  See execute/960116-1.c.  */
8303      if ((x == stack_pointer_rtx
8304	   || x == frame_pointer_rtx
8305	   || x == arg_pointer_rtx)
8306	  && REGNO_POINTER_ALIGN (REGNO (x)))
8307	{
8308	  unsigned HOST_WIDE_INT alignment
8309	    = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
8310
8311#ifdef PUSH_ROUNDING
8312	  /* If PUSH_ROUNDING is defined, it is possible for the
8313	     stack to be momentarily aligned only to that amount,
8314	     so we pick the least alignment.  */
8315	  if (x == stack_pointer_rtx && PUSH_ARGS)
8316	    alignment = MIN (PUSH_ROUNDING (1), alignment);
8317#endif
8318
8319	  nonzero &= ~(alignment - 1);
8320	}
8321
8322      /* If X is a register whose nonzero bits value is current, use it.
8323	 Otherwise, if X is a register whose value we can find, use that
8324	 value.  Otherwise, use the previously-computed global nonzero bits
8325	 for this register.  */
8326
8327      if (reg_last_set_value[REGNO (x)] != 0
8328	  && (reg_last_set_mode[REGNO (x)] == mode
8329	      || (GET_MODE_CLASS (reg_last_set_mode[REGNO (x)]) == MODE_INT
8330		  && GET_MODE_CLASS (mode) == MODE_INT))
8331	  && (reg_last_set_label[REGNO (x)] == label_tick
8332	      || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8333		  && REG_N_SETS (REGNO (x)) == 1
8334		  && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start,
8335					REGNO (x))))
8336	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8337	return reg_last_set_nonzero_bits[REGNO (x)] & nonzero;
8338
8339      tem = get_last_value (x);
8340
8341      if (tem)
8342	{
8343#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8344	  /* If X is narrower than MODE and TEM is a non-negative
8345	     constant that would appear negative in the mode of X,
8346	     sign-extend it for use in reg_nonzero_bits because some
8347	     machines (maybe most) will actually do the sign-extension
8348	     and this is the conservative approach.
8349
8350	     ??? For 2.5, try to tighten up the MD files in this regard
8351	     instead of this kludge.  */
8352
8353	  if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
8354	      && GET_CODE (tem) == CONST_INT
8355	      && INTVAL (tem) > 0
8356	      && 0 != (INTVAL (tem)
8357		       & ((HOST_WIDE_INT) 1
8358			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
8359	    tem = GEN_INT (INTVAL (tem)
8360			   | ((HOST_WIDE_INT) (-1)
8361			      << GET_MODE_BITSIZE (GET_MODE (x))));
8362#endif
8363	  return nonzero_bits_with_known (tem, mode) & nonzero;
8364	}
8365      else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
8366	{
8367	  unsigned HOST_WIDE_INT mask = reg_nonzero_bits[REGNO (x)];
8368
8369	  if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width)
8370	    /* We don't know anything about the upper bits.  */
8371	    mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
8372	  return nonzero & mask;
8373	}
8374      else
8375	return nonzero;
8376
8377    case CONST_INT:
8378#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8379      /* If X is negative in MODE, sign-extend the value.  */
8380      if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
8381	  && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
8382	return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
8383#endif
8384
8385      return INTVAL (x);
8386
8387    case MEM:
8388#ifdef LOAD_EXTEND_OP
8389      /* In many, if not most, RISC machines, reading a byte from memory
8390	 zeros the rest of the register.  Noticing that fact saves a lot
8391	 of extra zero-extends.  */
8392      if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
8393	nonzero &= GET_MODE_MASK (GET_MODE (x));
8394#endif
8395      break;
8396
8397    case EQ:  case NE:
8398    case UNEQ:  case LTGT:
8399    case GT:  case GTU:  case UNGT:
8400    case LT:  case LTU:  case UNLT:
8401    case GE:  case GEU:  case UNGE:
8402    case LE:  case LEU:  case UNLE:
8403    case UNORDERED: case ORDERED:
8404
8405      /* If this produces an integer result, we know which bits are set.
8406	 Code here used to clear bits outside the mode of X, but that is
8407	 now done above.  */
8408
8409      if (GET_MODE_CLASS (mode) == MODE_INT
8410	  && mode_width <= HOST_BITS_PER_WIDE_INT)
8411	nonzero = STORE_FLAG_VALUE;
8412      break;
8413
8414    case NEG:
8415#if 0
8416      /* Disabled to avoid exponential mutual recursion between nonzero_bits
8417	 and num_sign_bit_copies.  */
8418      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8419	  == GET_MODE_BITSIZE (GET_MODE (x)))
8420	nonzero = 1;
8421#endif
8422
8423      if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
8424	nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
8425      break;
8426
8427    case ABS:
8428#if 0
8429      /* Disabled to avoid exponential mutual recursion between nonzero_bits
8430	 and num_sign_bit_copies.  */
8431      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8432	  == GET_MODE_BITSIZE (GET_MODE (x)))
8433	nonzero = 1;
8434#endif
8435      break;
8436
8437    case TRUNCATE:
8438      nonzero &= (nonzero_bits_with_known (XEXP (x, 0), mode)
8439		  & GET_MODE_MASK (mode));
8440      break;
8441
8442    case ZERO_EXTEND:
8443      nonzero &= nonzero_bits_with_known (XEXP (x, 0), mode);
8444      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8445	nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8446      break;
8447
8448    case SIGN_EXTEND:
8449      /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
8450	 Otherwise, show all the bits in the outer mode but not the inner
8451	 may be nonzero.  */
8452      inner_nz = nonzero_bits_with_known (XEXP (x, 0), mode);
8453      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8454	{
8455	  inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8456	  if (inner_nz
8457	      & (((HOST_WIDE_INT) 1
8458		  << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
8459	    inner_nz |= (GET_MODE_MASK (mode)
8460			 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
8461	}
8462
8463      nonzero &= inner_nz;
8464      break;
8465
8466    case AND:
8467      nonzero &= (nonzero_bits_with_known (XEXP (x, 0), mode)
8468		  & nonzero_bits_with_known (XEXP (x, 1), mode));
8469      break;
8470
8471    case XOR:   case IOR:
8472    case UMIN:  case UMAX:  case SMIN:  case SMAX:
8473      {
8474	unsigned HOST_WIDE_INT nonzero0 =
8475	  nonzero_bits_with_known (XEXP (x, 0), mode);
8476
8477	/* Don't call nonzero_bits for the second time if it cannot change
8478	   anything.  */
8479	if ((nonzero & nonzero0) != nonzero)
8480	  nonzero &= (nonzero0
8481		      | nonzero_bits_with_known (XEXP (x, 1), mode));
8482      }
8483      break;
8484
8485    case PLUS:  case MINUS:
8486    case MULT:
8487    case DIV:   case UDIV:
8488    case MOD:   case UMOD:
8489      /* We can apply the rules of arithmetic to compute the number of
8490	 high- and low-order zero bits of these operations.  We start by
8491	 computing the width (position of the highest-order nonzero bit)
8492	 and the number of low-order zero bits for each value.  */
8493      {
8494	unsigned HOST_WIDE_INT nz0 =
8495	  nonzero_bits_with_known (XEXP (x, 0), mode);
8496	unsigned HOST_WIDE_INT nz1 =
8497	  nonzero_bits_with_known (XEXP (x, 1), mode);
8498	int width0 = floor_log2 (nz0) + 1;
8499	int width1 = floor_log2 (nz1) + 1;
8500	int low0 = floor_log2 (nz0 & -nz0);
8501	int low1 = floor_log2 (nz1 & -nz1);
8502	HOST_WIDE_INT op0_maybe_minusp
8503	  = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8504	HOST_WIDE_INT op1_maybe_minusp
8505	  = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8506	unsigned int result_width = mode_width;
8507	int result_low = 0;
8508
8509	switch (code)
8510	  {
8511	  case PLUS:
8512	    result_width = MAX (width0, width1) + 1;
8513	    result_low = MIN (low0, low1);
8514	    break;
8515	  case MINUS:
8516	    result_low = MIN (low0, low1);
8517	    break;
8518	  case MULT:
8519	    result_width = width0 + width1;
8520	    result_low = low0 + low1;
8521	    break;
8522	  case DIV:
8523	    if (width1 == 0)
8524	      break;
8525	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
8526	      result_width = width0;
8527	    break;
8528	  case UDIV:
8529	    if (width1 == 0)
8530	      break;
8531	    result_width = width0;
8532	    break;
8533	  case MOD:
8534	    if (width1 == 0)
8535	      break;
8536	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
8537	      result_width = MIN (width0, width1);
8538	    result_low = MIN (low0, low1);
8539	    break;
8540	  case UMOD:
8541	    if (width1 == 0)
8542	      break;
8543	    result_width = MIN (width0, width1);
8544	    result_low = MIN (low0, low1);
8545	    break;
8546	  default:
8547	    abort ();
8548	  }
8549
8550	if (result_width < mode_width)
8551	  nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
8552
8553	if (result_low > 0)
8554	  nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
8555
8556#ifdef POINTERS_EXTEND_UNSIGNED
8557	/* If pointers extend unsigned and this is an addition or subtraction
8558	   to a pointer in Pmode, all the bits above ptr_mode are known to be
8559	   zero.  */
8560	if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
8561	    && (code == PLUS || code == MINUS)
8562	    && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8563	  nonzero &= GET_MODE_MASK (ptr_mode);
8564#endif
8565      }
8566      break;
8567
8568    case ZERO_EXTRACT:
8569      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8570	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8571	nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
8572      break;
8573
8574    case SUBREG:
8575      /* If this is a SUBREG formed for a promoted variable that has
8576	 been zero-extended, we know that at least the high-order bits
8577	 are zero, though others might be too.  */
8578
8579      if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
8580	nonzero = (GET_MODE_MASK (GET_MODE (x))
8581		   & nonzero_bits_with_known (SUBREG_REG (x), GET_MODE (x)));
8582
8583      /* If the inner mode is a single word for both the host and target
8584	 machines, we can compute this from which bits of the inner
8585	 object might be nonzero.  */
8586      if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
8587	  && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8588	      <= HOST_BITS_PER_WIDE_INT))
8589	{
8590	  nonzero &= nonzero_bits_with_known (SUBREG_REG (x), mode);
8591
8592#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
8593	  /* If this is a typical RISC machine, we only have to worry
8594	     about the way loads are extended.  */
8595	  if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
8596	       ? (((nonzero
8597		    & (((unsigned HOST_WIDE_INT) 1
8598			<< (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
8599		   != 0))
8600	       : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
8601	      || GET_CODE (SUBREG_REG (x)) != MEM)
8602#endif
8603	    {
8604	      /* On many CISC machines, accessing an object in a wider mode
8605		 causes the high-order bits to become undefined.  So they are
8606		 not known to be zero.  */
8607	      if (GET_MODE_SIZE (GET_MODE (x))
8608		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8609		nonzero |= (GET_MODE_MASK (GET_MODE (x))
8610			    & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
8611	    }
8612	}
8613      break;
8614
8615    case ASHIFTRT:
8616    case LSHIFTRT:
8617    case ASHIFT:
8618    case ROTATE:
8619      /* The nonzero bits are in two classes: any bits within MODE
8620	 that aren't in GET_MODE (x) are always significant.  The rest of the
8621	 nonzero bits are those that are significant in the operand of
8622	 the shift when shifted the appropriate number of bits.  This
8623	 shows that high-order bits are cleared by the right shift and
8624	 low-order bits by left shifts.  */
8625      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8626	  && INTVAL (XEXP (x, 1)) >= 0
8627	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8628	{
8629	  enum machine_mode inner_mode = GET_MODE (x);
8630	  unsigned int width = GET_MODE_BITSIZE (inner_mode);
8631	  int count = INTVAL (XEXP (x, 1));
8632	  unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
8633	  unsigned HOST_WIDE_INT op_nonzero =
8634	    nonzero_bits_with_known (XEXP (x, 0), mode);
8635	  unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
8636	  unsigned HOST_WIDE_INT outer = 0;
8637
8638	  if (mode_width > width)
8639	    outer = (op_nonzero & nonzero & ~mode_mask);
8640
8641	  if (code == LSHIFTRT)
8642	    inner >>= count;
8643	  else if (code == ASHIFTRT)
8644	    {
8645	      inner >>= count;
8646
8647	      /* If the sign bit may have been nonzero before the shift, we
8648		 need to mark all the places it could have been copied to
8649		 by the shift as possibly nonzero.  */
8650	      if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
8651		inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
8652	    }
8653	  else if (code == ASHIFT)
8654	    inner <<= count;
8655	  else
8656	    inner = ((inner << (count % width)
8657		      | (inner >> (width - (count % width)))) & mode_mask);
8658
8659	  nonzero &= (outer | inner);
8660	}
8661      break;
8662
8663    case FFS:
8664      /* This is at most the number of bits in the mode.  */
8665      nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
8666      break;
8667
8668    case IF_THEN_ELSE:
8669      nonzero &= (nonzero_bits_with_known (XEXP (x, 1), mode)
8670		  | nonzero_bits_with_known (XEXP (x, 2), mode));
8671      break;
8672
8673    default:
8674      break;
8675    }
8676
8677  return nonzero;
8678}
8679
8680/* See the macro definition above.  */
8681#undef cached_num_sign_bit_copies
8682
8683#define num_sign_bit_copies_with_known(X, M) \
8684  cached_num_sign_bit_copies (X, M, known_x, known_mode, known_ret)
8685
8686/* The function cached_num_sign_bit_copies is a wrapper around
8687   num_sign_bit_copies1.  It avoids exponential behavior in
8688   num_sign_bit_copies1 when X has identical subexpressions on the
8689   first or the second level.  */
8690
8691static unsigned int
8692cached_num_sign_bit_copies (x, mode, known_x, known_mode, known_ret)
8693     rtx x;
8694     enum machine_mode mode;
8695     rtx known_x;
8696     enum machine_mode known_mode;
8697     unsigned int known_ret;
8698{
8699  if (x == known_x && mode == known_mode)
8700    return known_ret;
8701
8702  /* Try to find identical subexpressions.  If found call
8703     num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
8704     the precomputed value for the subexpression as KNOWN_RET.  */
8705
8706  if (GET_RTX_CLASS (GET_CODE (x)) == '2'
8707      || GET_RTX_CLASS (GET_CODE (x)) == 'c')
8708    {
8709      rtx x0 = XEXP (x, 0);
8710      rtx x1 = XEXP (x, 1);
8711
8712      /* Check the first level.  */
8713      if (x0 == x1)
8714	return
8715	  num_sign_bit_copies1 (x, mode, x0, mode,
8716				num_sign_bit_copies_with_known (x0, mode));
8717
8718      /* Check the second level.  */
8719      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
8720	   || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
8721	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
8722	return
8723	  num_sign_bit_copies1 (x, mode, x1, mode,
8724				num_sign_bit_copies_with_known (x1, mode));
8725
8726      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
8727	   || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
8728	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
8729	return
8730	  num_sign_bit_copies1 (x, mode, x0, mode,
8731				num_sign_bit_copies_with_known (x0, mode));
8732    }
8733
8734  return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
8735}
8736
8737/* Return the number of bits at the high-order end of X that are known to
8738   be equal to the sign bit.  X will be used in mode MODE; if MODE is
8739   VOIDmode, X will be used in its own mode.  The returned value  will always
8740   be between 1 and the number of bits in MODE.  */
8741
8742static unsigned int
8743num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret)
8744     rtx x;
8745     enum machine_mode mode;
8746     rtx known_x;
8747     enum machine_mode known_mode;
8748     unsigned int known_ret;
8749{
8750  enum rtx_code code = GET_CODE (x);
8751  unsigned int bitwidth;
8752  int num0, num1, result;
8753  unsigned HOST_WIDE_INT nonzero;
8754  rtx tem;
8755
8756  /* If we weren't given a mode, use the mode of X.  If the mode is still
8757     VOIDmode, we don't know anything.  Likewise if one of the modes is
8758     floating-point.  */
8759
8760  if (mode == VOIDmode)
8761    mode = GET_MODE (x);
8762
8763  if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
8764    return 1;
8765
8766  bitwidth = GET_MODE_BITSIZE (mode);
8767
8768  /* For a smaller object, just ignore the high bits.  */
8769  if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
8770    {
8771      num0 = num_sign_bit_copies_with_known (x, GET_MODE (x));
8772      return MAX (1,
8773		  num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
8774    }
8775
8776  if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
8777    {
8778#ifndef WORD_REGISTER_OPERATIONS
8779  /* If this machine does not do all register operations on the entire
8780     register and MODE is wider than the mode of X, we can say nothing
8781     at all about the high-order bits.  */
8782      return 1;
8783#else
8784      /* Likewise on machines that do, if the mode of the object is smaller
8785	 than a word and loads of that size don't sign extend, we can say
8786	 nothing about the high order bits.  */
8787      if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
8788#ifdef LOAD_EXTEND_OP
8789	  && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
8790#endif
8791	  )
8792	return 1;
8793#endif
8794    }
8795
8796  switch (code)
8797    {
8798    case REG:
8799
8800#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
8801      /* If pointers extend signed and this is a pointer in Pmode, say that
8802	 all the bits above ptr_mode are known to be sign bit copies.  */
8803      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
8804	  && REG_POINTER (x))
8805	return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
8806#endif
8807
8808      if (reg_last_set_value[REGNO (x)] != 0
8809	  && reg_last_set_mode[REGNO (x)] == mode
8810	  && (reg_last_set_label[REGNO (x)] == label_tick
8811	      || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8812		  && REG_N_SETS (REGNO (x)) == 1
8813		  && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start,
8814					REGNO (x))))
8815	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8816	return reg_last_set_sign_bit_copies[REGNO (x)];
8817
8818      tem = get_last_value (x);
8819      if (tem != 0)
8820	return num_sign_bit_copies_with_known (tem, mode);
8821
8822      if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0
8823	  && GET_MODE_BITSIZE (GET_MODE (x)) == bitwidth)
8824	return reg_sign_bit_copies[REGNO (x)];
8825      break;
8826
8827    case MEM:
8828#ifdef LOAD_EXTEND_OP
8829      /* Some RISC machines sign-extend all loads of smaller than a word.  */
8830      if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
8831	return MAX (1, ((int) bitwidth
8832			- (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
8833#endif
8834      break;
8835
8836    case CONST_INT:
8837      /* If the constant is negative, take its 1's complement and remask.
8838	 Then see how many zero bits we have.  */
8839      nonzero = INTVAL (x) & GET_MODE_MASK (mode);
8840      if (bitwidth <= HOST_BITS_PER_WIDE_INT
8841	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8842	nonzero = (~nonzero) & GET_MODE_MASK (mode);
8843
8844      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8845
8846    case SUBREG:
8847      /* If this is a SUBREG for a promoted object that is sign-extended
8848	 and we are looking at it in a wider mode, we know that at least the
8849	 high-order bits are known to be sign bit copies.  */
8850
8851      if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
8852	{
8853	  num0 = num_sign_bit_copies_with_known (SUBREG_REG (x), mode);
8854	  return MAX ((int) bitwidth
8855		      - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
8856		      num0);
8857	}
8858
8859      /* For a smaller object, just ignore the high bits.  */
8860      if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
8861	{
8862	  num0 = num_sign_bit_copies_with_known (SUBREG_REG (x), VOIDmode);
8863	  return MAX (1, (num0
8864			  - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8865				   - bitwidth)));
8866	}
8867
8868#ifdef WORD_REGISTER_OPERATIONS
8869#ifdef LOAD_EXTEND_OP
8870      /* For paradoxical SUBREGs on machines where all register operations
8871	 affect the entire register, just look inside.  Note that we are
8872	 passing MODE to the recursive call, so the number of sign bit copies
8873	 will remain relative to that mode, not the inner mode.  */
8874
8875      /* This works only if loads sign extend.  Otherwise, if we get a
8876	 reload for the inner part, it may be loaded from the stack, and
8877	 then we lose all sign bit copies that existed before the store
8878	 to the stack.  */
8879
8880      if ((GET_MODE_SIZE (GET_MODE (x))
8881	   > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8882	  && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
8883	  && GET_CODE (SUBREG_REG (x)) == MEM)
8884	return num_sign_bit_copies_with_known (SUBREG_REG (x), mode);
8885#endif
8886#endif
8887      break;
8888
8889    case SIGN_EXTRACT:
8890      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
8891	return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
8892      break;
8893
8894    case SIGN_EXTEND:
8895      return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8896	      + num_sign_bit_copies_with_known (XEXP (x, 0), VOIDmode));
8897
8898    case TRUNCATE:
8899      /* For a smaller object, just ignore the high bits.  */
8900      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), VOIDmode);
8901      return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8902				    - bitwidth)));
8903
8904    case NOT:
8905      return num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8906
8907    case ROTATE:       case ROTATERT:
8908      /* If we are rotating left by a number of bits less than the number
8909	 of sign bit copies, we can just subtract that amount from the
8910	 number.  */
8911      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8912	  && INTVAL (XEXP (x, 1)) >= 0
8913	  && INTVAL (XEXP (x, 1)) < (int) bitwidth)
8914	{
8915	  num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8916	  return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
8917				 : (int) bitwidth - INTVAL (XEXP (x, 1))));
8918	}
8919      break;
8920
8921    case NEG:
8922      /* In general, this subtracts one sign bit copy.  But if the value
8923	 is known to be positive, the number of sign bit copies is the
8924	 same as that of the input.  Finally, if the input has just one bit
8925	 that might be nonzero, all the bits are copies of the sign bit.  */
8926      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8927      if (bitwidth > HOST_BITS_PER_WIDE_INT)
8928	return num0 > 1 ? num0 - 1 : 1;
8929
8930      nonzero = nonzero_bits (XEXP (x, 0), mode);
8931      if (nonzero == 1)
8932	return bitwidth;
8933
8934      if (num0 > 1
8935	  && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
8936	num0--;
8937
8938      return num0;
8939
8940    case IOR:   case AND:   case XOR:
8941    case SMIN:  case SMAX:  case UMIN:  case UMAX:
8942      /* Logical operations will preserve the number of sign-bit copies.
8943	 MIN and MAX operations always return one of the operands.  */
8944      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8945      num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8946      return MIN (num0, num1);
8947
8948    case PLUS:  case MINUS:
8949      /* For addition and subtraction, we can have a 1-bit carry.  However,
8950	 if we are subtracting 1 from a positive number, there will not
8951	 be such a carry.  Furthermore, if the positive number is known to
8952	 be 0 or 1, we know the result is either -1 or 0.  */
8953
8954      if (code == PLUS && XEXP (x, 1) == constm1_rtx
8955	  && bitwidth <= HOST_BITS_PER_WIDE_INT)
8956	{
8957	  nonzero = nonzero_bits (XEXP (x, 0), mode);
8958	  if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8959	    return (nonzero == 1 || nonzero == 0 ? bitwidth
8960		    : bitwidth - floor_log2 (nonzero) - 1);
8961	}
8962
8963      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8964      num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8965      result = MAX (1, MIN (num0, num1) - 1);
8966
8967#ifdef POINTERS_EXTEND_UNSIGNED
8968      /* If pointers extend signed and this is an addition or subtraction
8969	 to a pointer in Pmode, all the bits above ptr_mode are known to be
8970	 sign bit copies.  */
8971      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8972	  && (code == PLUS || code == MINUS)
8973	  && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8974	result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
8975			     - GET_MODE_BITSIZE (ptr_mode) + 1),
8976		      result);
8977#endif
8978      return result;
8979
8980    case MULT:
8981      /* The number of bits of the product is the sum of the number of
8982	 bits of both terms.  However, unless one of the terms if known
8983	 to be positive, we must allow for an additional bit since negating
8984	 a negative number can remove one sign bit copy.  */
8985
8986      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8987      num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8988
8989      result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8990      if (result > 0
8991	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8992	      || (((nonzero_bits (XEXP (x, 0), mode)
8993		    & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8994		  && ((nonzero_bits (XEXP (x, 1), mode)
8995		       & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
8996	result--;
8997
8998      return MAX (1, result);
8999
9000    case UDIV:
9001      /* The result must be <= the first operand.  If the first operand
9002         has the high bit set, we know nothing about the number of sign
9003         bit copies.  */
9004      if (bitwidth > HOST_BITS_PER_WIDE_INT)
9005	return 1;
9006      else if ((nonzero_bits (XEXP (x, 0), mode)
9007		& ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
9008	return 1;
9009      else
9010	return num_sign_bit_copies_with_known (XEXP (x, 0), mode);
9011
9012    case UMOD:
9013      /* The result must be <= the second operand.  */
9014      return num_sign_bit_copies_with_known (XEXP (x, 1), mode);
9015
9016    case DIV:
9017      /* Similar to unsigned division, except that we have to worry about
9018	 the case where the divisor is negative, in which case we have
9019	 to add 1.  */
9020      result = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
9021      if (result > 1
9022	  && (bitwidth > HOST_BITS_PER_WIDE_INT
9023	      || (nonzero_bits (XEXP (x, 1), mode)
9024		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
9025	result--;
9026
9027      return result;
9028
9029    case MOD:
9030      result = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
9031      if (result > 1
9032	  && (bitwidth > HOST_BITS_PER_WIDE_INT
9033	      || (nonzero_bits (XEXP (x, 1), mode)
9034		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
9035	result--;
9036
9037      return result;
9038
9039    case ASHIFTRT:
9040      /* Shifts by a constant add to the number of bits equal to the
9041	 sign bit.  */
9042      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
9043      if (GET_CODE (XEXP (x, 1)) == CONST_INT
9044	  && INTVAL (XEXP (x, 1)) > 0)
9045	num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
9046
9047      return num0;
9048
9049    case ASHIFT:
9050      /* Left shifts destroy copies.  */
9051      if (GET_CODE (XEXP (x, 1)) != CONST_INT
9052	  || INTVAL (XEXP (x, 1)) < 0
9053	  || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
9054	return 1;
9055
9056      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
9057      return MAX (1, num0 - INTVAL (XEXP (x, 1)));
9058
9059    case IF_THEN_ELSE:
9060      num0 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
9061      num1 = num_sign_bit_copies_with_known (XEXP (x, 2), mode);
9062      return MIN (num0, num1);
9063
9064    case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
9065    case UNEQ:  case LTGT:  case UNGE:  case UNGT:  case UNLE:  case UNLT:
9066    case GEU: case GTU: case LEU: case LTU:
9067    case UNORDERED: case ORDERED:
9068      /* If the constant is negative, take its 1's complement and remask.
9069	 Then see how many zero bits we have.  */
9070      nonzero = STORE_FLAG_VALUE;
9071      if (bitwidth <= HOST_BITS_PER_WIDE_INT
9072	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
9073	nonzero = (~nonzero) & GET_MODE_MASK (mode);
9074
9075      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
9076      break;
9077
9078    default:
9079      break;
9080    }
9081
9082  /* If we haven't been able to figure it out by one of the above rules,
9083     see if some of the high-order bits are known to be zero.  If so,
9084     count those bits and return one less than that amount.  If we can't
9085     safely compute the mask for this mode, always return BITWIDTH.  */
9086
9087  if (bitwidth > HOST_BITS_PER_WIDE_INT)
9088    return 1;
9089
9090  nonzero = nonzero_bits (x, mode);
9091  return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
9092	  ? 1 : bitwidth - floor_log2 (nonzero) - 1);
9093}
9094
9095/* Return the number of "extended" bits there are in X, when interpreted
9096   as a quantity in MODE whose signedness is indicated by UNSIGNEDP.  For
9097   unsigned quantities, this is the number of high-order zero bits.
9098   For signed quantities, this is the number of copies of the sign bit
9099   minus 1.  In both case, this function returns the number of "spare"
9100   bits.  For example, if two quantities for which this function returns
9101   at least 1 are added, the addition is known not to overflow.
9102
9103   This function will always return 0 unless called during combine, which
9104   implies that it must be called from a define_split.  */
9105
9106unsigned int
9107extended_count (x, mode, unsignedp)
9108     rtx x;
9109     enum machine_mode mode;
9110     int unsignedp;
9111{
9112  if (nonzero_sign_valid == 0)
9113    return 0;
9114
9115  return (unsignedp
9116	  ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9117	     ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1
9118			       - floor_log2 (nonzero_bits (x, mode)))
9119	     : 0)
9120	  : num_sign_bit_copies (x, mode) - 1);
9121}
9122
9123/* This function is called from `simplify_shift_const' to merge two
9124   outer operations.  Specifically, we have already found that we need
9125   to perform operation *POP0 with constant *PCONST0 at the outermost
9126   position.  We would now like to also perform OP1 with constant CONST1
9127   (with *POP0 being done last).
9128
9129   Return 1 if we can do the operation and update *POP0 and *PCONST0 with
9130   the resulting operation.  *PCOMP_P is set to 1 if we would need to
9131   complement the innermost operand, otherwise it is unchanged.
9132
9133   MODE is the mode in which the operation will be done.  No bits outside
9134   the width of this mode matter.  It is assumed that the width of this mode
9135   is smaller than or equal to HOST_BITS_PER_WIDE_INT.
9136
9137   If *POP0 or OP1 are NIL, it means no operation is required.  Only NEG, PLUS,
9138   IOR, XOR, and AND are supported.  We may set *POP0 to SET if the proper
9139   result is simply *PCONST0.
9140
9141   If the resulting operation cannot be expressed as one operation, we
9142   return 0 and do not change *POP0, *PCONST0, and *PCOMP_P.  */
9143
9144static int
9145merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
9146     enum rtx_code *pop0;
9147     HOST_WIDE_INT *pconst0;
9148     enum rtx_code op1;
9149     HOST_WIDE_INT const1;
9150     enum machine_mode mode;
9151     int *pcomp_p;
9152{
9153  enum rtx_code op0 = *pop0;
9154  HOST_WIDE_INT const0 = *pconst0;
9155
9156  const0 &= GET_MODE_MASK (mode);
9157  const1 &= GET_MODE_MASK (mode);
9158
9159  /* If OP0 is an AND, clear unimportant bits in CONST1.  */
9160  if (op0 == AND)
9161    const1 &= const0;
9162
9163  /* If OP0 or OP1 is NIL, this is easy.  Similarly if they are the same or
9164     if OP0 is SET.  */
9165
9166  if (op1 == NIL || op0 == SET)
9167    return 1;
9168
9169  else if (op0 == NIL)
9170    op0 = op1, const0 = const1;
9171
9172  else if (op0 == op1)
9173    {
9174      switch (op0)
9175	{
9176	case AND:
9177	  const0 &= const1;
9178	  break;
9179	case IOR:
9180	  const0 |= const1;
9181	  break;
9182	case XOR:
9183	  const0 ^= const1;
9184	  break;
9185	case PLUS:
9186	  const0 += const1;
9187	  break;
9188	case NEG:
9189	  op0 = NIL;
9190	  break;
9191	default:
9192	  break;
9193	}
9194    }
9195
9196  /* Otherwise, if either is a PLUS or NEG, we can't do anything.  */
9197  else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
9198    return 0;
9199
9200  /* If the two constants aren't the same, we can't do anything.  The
9201     remaining six cases can all be done.  */
9202  else if (const0 != const1)
9203    return 0;
9204
9205  else
9206    switch (op0)
9207      {
9208      case IOR:
9209	if (op1 == AND)
9210	  /* (a & b) | b == b */
9211	  op0 = SET;
9212	else /* op1 == XOR */
9213	  /* (a ^ b) | b == a | b */
9214	  {;}
9215	break;
9216
9217      case XOR:
9218	if (op1 == AND)
9219	  /* (a & b) ^ b == (~a) & b */
9220	  op0 = AND, *pcomp_p = 1;
9221	else /* op1 == IOR */
9222	  /* (a | b) ^ b == a & ~b */
9223	  op0 = AND, *pconst0 = ~const0;
9224	break;
9225
9226      case AND:
9227	if (op1 == IOR)
9228	  /* (a | b) & b == b */
9229	op0 = SET;
9230	else /* op1 == XOR */
9231	  /* (a ^ b) & b) == (~a) & b */
9232	  *pcomp_p = 1;
9233	break;
9234      default:
9235	break;
9236      }
9237
9238  /* Check for NO-OP cases.  */
9239  const0 &= GET_MODE_MASK (mode);
9240  if (const0 == 0
9241      && (op0 == IOR || op0 == XOR || op0 == PLUS))
9242    op0 = NIL;
9243  else if (const0 == 0 && op0 == AND)
9244    op0 = SET;
9245  else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
9246	   && op0 == AND)
9247    op0 = NIL;
9248
9249  /* ??? Slightly redundant with the above mask, but not entirely.
9250     Moving this above means we'd have to sign-extend the mode mask
9251     for the final test.  */
9252  const0 = trunc_int_for_mode (const0, mode);
9253
9254  *pop0 = op0;
9255  *pconst0 = const0;
9256
9257  return 1;
9258}
9259
9260/* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
9261   The result of the shift is RESULT_MODE.  X, if nonzero, is an expression
9262   that we started with.
9263
9264   The shift is normally computed in the widest mode we find in VAROP, as
9265   long as it isn't a different number of words than RESULT_MODE.  Exceptions
9266   are ASHIFTRT and ROTATE, which are always done in their original mode,  */
9267
9268static rtx
9269simplify_shift_const (x, code, result_mode, varop, orig_count)
9270     rtx x;
9271     enum rtx_code code;
9272     enum machine_mode result_mode;
9273     rtx varop;
9274     int orig_count;
9275{
9276  enum rtx_code orig_code = code;
9277  unsigned int count;
9278  int signed_count;
9279  enum machine_mode mode = result_mode;
9280  enum machine_mode shift_mode, tmode;
9281  unsigned int mode_words
9282    = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
9283  /* We form (outer_op (code varop count) (outer_const)).  */
9284  enum rtx_code outer_op = NIL;
9285  HOST_WIDE_INT outer_const = 0;
9286  rtx const_rtx;
9287  int complement_p = 0;
9288  rtx new;
9289
9290  /* Make sure and truncate the "natural" shift on the way in.  We don't
9291     want to do this inside the loop as it makes it more difficult to
9292     combine shifts.  */
9293  if (SHIFT_COUNT_TRUNCATED)
9294    orig_count &= GET_MODE_BITSIZE (mode) - 1;
9295
9296  /* If we were given an invalid count, don't do anything except exactly
9297     what was requested.  */
9298
9299  if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
9300    {
9301      if (x)
9302	return x;
9303
9304      return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (orig_count));
9305    }
9306
9307  count = orig_count;
9308
9309  /* Unless one of the branches of the `if' in this loop does a `continue',
9310     we will `break' the loop after the `if'.  */
9311
9312  while (count != 0)
9313    {
9314      /* If we have an operand of (clobber (const_int 0)), just return that
9315	 value.  */
9316      if (GET_CODE (varop) == CLOBBER)
9317	return varop;
9318
9319      /* If we discovered we had to complement VAROP, leave.  Making a NOT
9320	 here would cause an infinite loop.  */
9321      if (complement_p)
9322	break;
9323
9324      /* Convert ROTATERT to ROTATE.  */
9325      if (code == ROTATERT)
9326	{
9327	  unsigned int bitsize = GET_MODE_BITSIZE (result_mode);;
9328	  code = ROTATE;
9329	  if (VECTOR_MODE_P (result_mode))
9330	    count = bitsize / GET_MODE_NUNITS (result_mode) - count;
9331	  else
9332	    count = bitsize - count;
9333	}
9334
9335      /* We need to determine what mode we will do the shift in.  If the
9336	 shift is a right shift or a ROTATE, we must always do it in the mode
9337	 it was originally done in.  Otherwise, we can do it in MODE, the
9338	 widest mode encountered.  */
9339      shift_mode
9340	= (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9341	   ? result_mode : mode);
9342
9343      /* Handle cases where the count is greater than the size of the mode
9344	 minus 1.  For ASHIFT, use the size minus one as the count (this can
9345	 occur when simplifying (lshiftrt (ashiftrt ..))).  For rotates,
9346	 take the count modulo the size.  For other shifts, the result is
9347	 zero.
9348
9349	 Since these shifts are being produced by the compiler by combining
9350	 multiple operations, each of which are defined, we know what the
9351	 result is supposed to be.  */
9352
9353      if (count > (unsigned int) (GET_MODE_BITSIZE (shift_mode) - 1))
9354	{
9355	  if (code == ASHIFTRT)
9356	    count = GET_MODE_BITSIZE (shift_mode) - 1;
9357	  else if (code == ROTATE || code == ROTATERT)
9358	    count %= GET_MODE_BITSIZE (shift_mode);
9359	  else
9360	    {
9361	      /* We can't simply return zero because there may be an
9362		 outer op.  */
9363	      varop = const0_rtx;
9364	      count = 0;
9365	      break;
9366	    }
9367	}
9368
9369      /* An arithmetic right shift of a quantity known to be -1 or 0
9370	 is a no-op.  */
9371      if (code == ASHIFTRT
9372	  && (num_sign_bit_copies (varop, shift_mode)
9373	      == GET_MODE_BITSIZE (shift_mode)))
9374	{
9375	  count = 0;
9376	  break;
9377	}
9378
9379      /* If we are doing an arithmetic right shift and discarding all but
9380	 the sign bit copies, this is equivalent to doing a shift by the
9381	 bitsize minus one.  Convert it into that shift because it will often
9382	 allow other simplifications.  */
9383
9384      if (code == ASHIFTRT
9385	  && (count + num_sign_bit_copies (varop, shift_mode)
9386	      >= GET_MODE_BITSIZE (shift_mode)))
9387	count = GET_MODE_BITSIZE (shift_mode) - 1;
9388
9389      /* We simplify the tests below and elsewhere by converting
9390	 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
9391	 `make_compound_operation' will convert it to an ASHIFTRT for
9392	 those machines (such as VAX) that don't have an LSHIFTRT.  */
9393      if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9394	  && code == ASHIFTRT
9395	  && ((nonzero_bits (varop, shift_mode)
9396	       & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
9397	      == 0))
9398	code = LSHIFTRT;
9399
9400      switch (GET_CODE (varop))
9401	{
9402	case SIGN_EXTEND:
9403	case ZERO_EXTEND:
9404	case SIGN_EXTRACT:
9405	case ZERO_EXTRACT:
9406	  new = expand_compound_operation (varop);
9407	  if (new != varop)
9408	    {
9409	      varop = new;
9410	      continue;
9411	    }
9412	  break;
9413
9414	case MEM:
9415	  /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
9416	     minus the width of a smaller mode, we can do this with a
9417	     SIGN_EXTEND or ZERO_EXTEND from the narrower memory location.  */
9418	  if ((code == ASHIFTRT || code == LSHIFTRT)
9419	      && ! mode_dependent_address_p (XEXP (varop, 0))
9420	      && ! MEM_VOLATILE_P (varop)
9421	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9422					 MODE_INT, 1)) != BLKmode)
9423	    {
9424	      new = adjust_address_nv (varop, tmode,
9425				       BYTES_BIG_ENDIAN ? 0
9426				       : count / BITS_PER_UNIT);
9427
9428	      varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9429				     : ZERO_EXTEND, mode, new);
9430	      count = 0;
9431	      continue;
9432	    }
9433	  break;
9434
9435	case USE:
9436	  /* Similar to the case above, except that we can only do this if
9437	     the resulting mode is the same as that of the underlying
9438	     MEM and adjust the address depending on the *bits* endianness
9439	     because of the way that bit-field extract insns are defined.  */
9440	  if ((code == ASHIFTRT || code == LSHIFTRT)
9441	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9442					 MODE_INT, 1)) != BLKmode
9443	      && tmode == GET_MODE (XEXP (varop, 0)))
9444	    {
9445	      if (BITS_BIG_ENDIAN)
9446		new = XEXP (varop, 0);
9447	      else
9448		{
9449		  new = copy_rtx (XEXP (varop, 0));
9450		  SUBST (XEXP (new, 0),
9451			 plus_constant (XEXP (new, 0),
9452					count / BITS_PER_UNIT));
9453		}
9454
9455	      varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9456				     : ZERO_EXTEND, mode, new);
9457	      count = 0;
9458	      continue;
9459	    }
9460	  break;
9461
9462	case SUBREG:
9463	  /* If VAROP is a SUBREG, strip it as long as the inner operand has
9464	     the same number of words as what we've seen so far.  Then store
9465	     the widest mode in MODE.  */
9466	  if (subreg_lowpart_p (varop)
9467	      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9468		  > GET_MODE_SIZE (GET_MODE (varop)))
9469	      && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9470				  + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9471		 == mode_words)
9472	    {
9473	      varop = SUBREG_REG (varop);
9474	      if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9475		mode = GET_MODE (varop);
9476	      continue;
9477	    }
9478	  break;
9479
9480	case MULT:
9481	  /* Some machines use MULT instead of ASHIFT because MULT
9482	     is cheaper.  But it is still better on those machines to
9483	     merge two shifts into one.  */
9484	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9485	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9486	    {
9487	      varop
9488		= gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
9489			      GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9490	      continue;
9491	    }
9492	  break;
9493
9494	case UDIV:
9495	  /* Similar, for when divides are cheaper.  */
9496	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9497	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9498	    {
9499	      varop
9500		= gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
9501			      GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9502	      continue;
9503	    }
9504	  break;
9505
9506	case ASHIFTRT:
9507	  /* If we are extracting just the sign bit of an arithmetic
9508	     right shift, that shift is not needed.  However, the sign
9509	     bit of a wider mode may be different from what would be
9510	     interpreted as the sign bit in a narrower mode, so, if
9511	     the result is narrower, don't discard the shift.  */
9512	  if (code == LSHIFTRT
9513	      && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9514	      && (GET_MODE_BITSIZE (result_mode)
9515		  >= GET_MODE_BITSIZE (GET_MODE (varop))))
9516	    {
9517	      varop = XEXP (varop, 0);
9518	      continue;
9519	    }
9520
9521	  /* ... fall through ...  */
9522
9523	case LSHIFTRT:
9524	case ASHIFT:
9525	case ROTATE:
9526	  /* Here we have two nested shifts.  The result is usually the
9527	     AND of a new shift with a mask.  We compute the result below.  */
9528	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9529	      && INTVAL (XEXP (varop, 1)) >= 0
9530	      && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
9531	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9532	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9533	    {
9534	      enum rtx_code first_code = GET_CODE (varop);
9535	      unsigned int first_count = INTVAL (XEXP (varop, 1));
9536	      unsigned HOST_WIDE_INT mask;
9537	      rtx mask_rtx;
9538
9539	      /* We have one common special case.  We can't do any merging if
9540		 the inner code is an ASHIFTRT of a smaller mode.  However, if
9541		 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9542		 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9543		 we can convert it to
9544		 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9545		 This simplifies certain SIGN_EXTEND operations.  */
9546	      if (code == ASHIFT && first_code == ASHIFTRT
9547		  && count == (unsigned int)
9548			      (GET_MODE_BITSIZE (result_mode)
9549			       - GET_MODE_BITSIZE (GET_MODE (varop))))
9550		{
9551		  /* C3 has the low-order C1 bits zero.  */
9552
9553		  mask = (GET_MODE_MASK (mode)
9554			  & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
9555
9556		  varop = simplify_and_const_int (NULL_RTX, result_mode,
9557						  XEXP (varop, 0), mask);
9558		  varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
9559						varop, count);
9560		  count = first_count;
9561		  code = ASHIFTRT;
9562		  continue;
9563		}
9564
9565	      /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9566		 than C1 high-order bits equal to the sign bit, we can convert
9567		 this to either an ASHIFT or an ASHIFTRT depending on the
9568		 two counts.
9569
9570		 We cannot do this if VAROP's mode is not SHIFT_MODE.  */
9571
9572	      if (code == ASHIFTRT && first_code == ASHIFT
9573		  && GET_MODE (varop) == shift_mode
9574		  && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9575		      > first_count))
9576		{
9577		  varop = XEXP (varop, 0);
9578
9579		  signed_count = count - first_count;
9580		  if (signed_count < 0)
9581		    count = -signed_count, code = ASHIFT;
9582		  else
9583		    count = signed_count;
9584
9585		  continue;
9586		}
9587
9588	      /* There are some cases we can't do.  If CODE is ASHIFTRT,
9589		 we can only do this if FIRST_CODE is also ASHIFTRT.
9590
9591		 We can't do the case when CODE is ROTATE and FIRST_CODE is
9592		 ASHIFTRT.
9593
9594		 If the mode of this shift is not the mode of the outer shift,
9595		 we can't do this if either shift is a right shift or ROTATE.
9596
9597		 Finally, we can't do any of these if the mode is too wide
9598		 unless the codes are the same.
9599
9600		 Handle the case where the shift codes are the same
9601		 first.  */
9602
9603	      if (code == first_code)
9604		{
9605		  if (GET_MODE (varop) != result_mode
9606		      && (code == ASHIFTRT || code == LSHIFTRT
9607			  || code == ROTATE))
9608		    break;
9609
9610		  count += first_count;
9611		  varop = XEXP (varop, 0);
9612		  continue;
9613		}
9614
9615	      if (code == ASHIFTRT
9616		  || (code == ROTATE && first_code == ASHIFTRT)
9617		  || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
9618		  || (GET_MODE (varop) != result_mode
9619		      && (first_code == ASHIFTRT || first_code == LSHIFTRT
9620			  || first_code == ROTATE
9621			  || code == ROTATE)))
9622		break;
9623
9624	      /* To compute the mask to apply after the shift, shift the
9625		 nonzero bits of the inner shift the same way the
9626		 outer shift will.  */
9627
9628	      mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
9629
9630	      mask_rtx
9631		= simplify_binary_operation (code, result_mode, mask_rtx,
9632					     GEN_INT (count));
9633
9634	      /* Give up if we can't compute an outer operation to use.  */
9635	      if (mask_rtx == 0
9636		  || GET_CODE (mask_rtx) != CONST_INT
9637		  || ! merge_outer_ops (&outer_op, &outer_const, AND,
9638					INTVAL (mask_rtx),
9639					result_mode, &complement_p))
9640		break;
9641
9642	      /* If the shifts are in the same direction, we add the
9643		 counts.  Otherwise, we subtract them.  */
9644	      signed_count = count;
9645	      if ((code == ASHIFTRT || code == LSHIFTRT)
9646		  == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9647		signed_count += first_count;
9648	      else
9649		signed_count -= first_count;
9650
9651	      /* If COUNT is positive, the new shift is usually CODE,
9652		 except for the two exceptions below, in which case it is
9653		 FIRST_CODE.  If the count is negative, FIRST_CODE should
9654		 always be used  */
9655	      if (signed_count > 0
9656		  && ((first_code == ROTATE && code == ASHIFT)
9657		      || (first_code == ASHIFTRT && code == LSHIFTRT)))
9658		code = first_code, count = signed_count;
9659	      else if (signed_count < 0)
9660		code = first_code, count = -signed_count;
9661	      else
9662		count = signed_count;
9663
9664	      varop = XEXP (varop, 0);
9665	      continue;
9666	    }
9667
9668	  /* If we have (A << B << C) for any shift, we can convert this to
9669	     (A << C << B).  This wins if A is a constant.  Only try this if
9670	     B is not a constant.  */
9671
9672	  else if (GET_CODE (varop) == code
9673		   && GET_CODE (XEXP (varop, 1)) != CONST_INT
9674		   && 0 != (new
9675			    = simplify_binary_operation (code, mode,
9676							 XEXP (varop, 0),
9677							 GEN_INT (count))))
9678	    {
9679	      varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1));
9680	      count = 0;
9681	      continue;
9682	    }
9683	  break;
9684
9685	case NOT:
9686	  /* Make this fit the case below.  */
9687	  varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9688			       GEN_INT (GET_MODE_MASK (mode)));
9689	  continue;
9690
9691	case IOR:
9692	case AND:
9693	case XOR:
9694	  /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9695	     with C the size of VAROP - 1 and the shift is logical if
9696	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9697	     we have an (le X 0) operation.   If we have an arithmetic shift
9698	     and STORE_FLAG_VALUE is 1 or we have a logical shift with
9699	     STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
9700
9701	  if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9702	      && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9703	      && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9704	      && (code == LSHIFTRT || code == ASHIFTRT)
9705	      && count == (unsigned int)
9706			  (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9707	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9708	    {
9709	      count = 0;
9710	      varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9711				  const0_rtx);
9712
9713	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9714		varop = gen_rtx_NEG (GET_MODE (varop), varop);
9715
9716	      continue;
9717	    }
9718
9719	  /* If we have (shift (logical)), move the logical to the outside
9720	     to allow it to possibly combine with another logical and the
9721	     shift to combine with another shift.  This also canonicalizes to
9722	     what a ZERO_EXTRACT looks like.  Also, some machines have
9723	     (and (shift)) insns.  */
9724
9725	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9726	      && (new = simplify_binary_operation (code, result_mode,
9727						   XEXP (varop, 1),
9728						   GEN_INT (count))) != 0
9729	      && GET_CODE (new) == CONST_INT
9730	      && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9731				  INTVAL (new), result_mode, &complement_p))
9732	    {
9733	      varop = XEXP (varop, 0);
9734	      continue;
9735	    }
9736
9737	  /* If we can't do that, try to simplify the shift in each arm of the
9738	     logical expression, make a new logical expression, and apply
9739	     the inverse distributive law.  */
9740	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT)
9741	    {
9742	      rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9743					      XEXP (varop, 0), count);
9744	      rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9745					      XEXP (varop, 1), count);
9746
9747	      varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
9748	      varop = apply_distributive_law (varop);
9749
9750	      count = 0;
9751	    }
9752	  break;
9753
9754	case EQ:
9755	  /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9756	     says that the sign bit can be tested, FOO has mode MODE, C is
9757	     GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9758	     that may be nonzero.  */
9759	  if (code == LSHIFTRT
9760	      && XEXP (varop, 1) == const0_rtx
9761	      && GET_MODE (XEXP (varop, 0)) == result_mode
9762	      && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9763	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9764	      && ((STORE_FLAG_VALUE
9765		   & ((HOST_WIDE_INT) 1
9766		      < (GET_MODE_BITSIZE (result_mode) - 1))))
9767	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9768	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9769				  (HOST_WIDE_INT) 1, result_mode,
9770				  &complement_p))
9771	    {
9772	      varop = XEXP (varop, 0);
9773	      count = 0;
9774	      continue;
9775	    }
9776	  break;
9777
9778	case NEG:
9779	  /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9780	     than the number of bits in the mode is equivalent to A.  */
9781	  if (code == LSHIFTRT
9782	      && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9783	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9784	    {
9785	      varop = XEXP (varop, 0);
9786	      count = 0;
9787	      continue;
9788	    }
9789
9790	  /* NEG commutes with ASHIFT since it is multiplication.  Move the
9791	     NEG outside to allow shifts to combine.  */
9792	  if (code == ASHIFT
9793	      && merge_outer_ops (&outer_op, &outer_const, NEG,
9794				  (HOST_WIDE_INT) 0, result_mode,
9795				  &complement_p))
9796	    {
9797	      varop = XEXP (varop, 0);
9798	      continue;
9799	    }
9800	  break;
9801
9802	case PLUS:
9803	  /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9804	     is one less than the number of bits in the mode is
9805	     equivalent to (xor A 1).  */
9806	  if (code == LSHIFTRT
9807	      && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9808	      && XEXP (varop, 1) == constm1_rtx
9809	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9810	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9811				  (HOST_WIDE_INT) 1, result_mode,
9812				  &complement_p))
9813	    {
9814	      count = 0;
9815	      varop = XEXP (varop, 0);
9816	      continue;
9817	    }
9818
9819	  /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9820	     that might be nonzero in BAR are those being shifted out and those
9821	     bits are known zero in FOO, we can replace the PLUS with FOO.
9822	     Similarly in the other operand order.  This code occurs when
9823	     we are computing the size of a variable-size array.  */
9824
9825	  if ((code == ASHIFTRT || code == LSHIFTRT)
9826	      && count < HOST_BITS_PER_WIDE_INT
9827	      && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9828	      && (nonzero_bits (XEXP (varop, 1), result_mode)
9829		  & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9830	    {
9831	      varop = XEXP (varop, 0);
9832	      continue;
9833	    }
9834	  else if ((code == ASHIFTRT || code == LSHIFTRT)
9835		   && count < HOST_BITS_PER_WIDE_INT
9836		   && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9837		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9838			    >> count)
9839		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9840			    & nonzero_bits (XEXP (varop, 1),
9841						 result_mode)))
9842	    {
9843	      varop = XEXP (varop, 1);
9844	      continue;
9845	    }
9846
9847	  /* (ashift (plus foo C) N) is (plus (ashift foo N) C').  */
9848	  if (code == ASHIFT
9849	      && GET_CODE (XEXP (varop, 1)) == CONST_INT
9850	      && (new = simplify_binary_operation (ASHIFT, result_mode,
9851						   XEXP (varop, 1),
9852						   GEN_INT (count))) != 0
9853	      && GET_CODE (new) == CONST_INT
9854	      && merge_outer_ops (&outer_op, &outer_const, PLUS,
9855				  INTVAL (new), result_mode, &complement_p))
9856	    {
9857	      varop = XEXP (varop, 0);
9858	      continue;
9859	    }
9860	  break;
9861
9862	case MINUS:
9863	  /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9864	     with C the size of VAROP - 1 and the shift is logical if
9865	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9866	     we have a (gt X 0) operation.  If the shift is arithmetic with
9867	     STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9868	     we have a (neg (gt X 0)) operation.  */
9869
9870	  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9871	      && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9872	      && count == (unsigned int)
9873			  (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9874	      && (code == LSHIFTRT || code == ASHIFTRT)
9875	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9876	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (varop, 0), 1))
9877		 == count
9878	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9879	    {
9880	      count = 0;
9881	      varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9882				  const0_rtx);
9883
9884	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9885		varop = gen_rtx_NEG (GET_MODE (varop), varop);
9886
9887	      continue;
9888	    }
9889	  break;
9890
9891	case TRUNCATE:
9892	  /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9893	     if the truncate does not affect the value.  */
9894	  if (code == LSHIFTRT
9895	      && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9896	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9897	      && (INTVAL (XEXP (XEXP (varop, 0), 1))
9898		  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9899		      - GET_MODE_BITSIZE (GET_MODE (varop)))))
9900	    {
9901	      rtx varop_inner = XEXP (varop, 0);
9902
9903	      varop_inner
9904		= gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9905				    XEXP (varop_inner, 0),
9906				    GEN_INT
9907				    (count + INTVAL (XEXP (varop_inner, 1))));
9908	      varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
9909	      count = 0;
9910	      continue;
9911	    }
9912	  break;
9913
9914	default:
9915	  break;
9916	}
9917
9918      break;
9919    }
9920
9921  /* We need to determine what mode to do the shift in.  If the shift is
9922     a right shift or ROTATE, we must always do it in the mode it was
9923     originally done in.  Otherwise, we can do it in MODE, the widest mode
9924     encountered.  The code we care about is that of the shift that will
9925     actually be done, not the shift that was originally requested.  */
9926  shift_mode
9927    = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9928       ? result_mode : mode);
9929
9930  /* We have now finished analyzing the shift.  The result should be
9931     a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places.  If
9932     OUTER_OP is non-NIL, it is an operation that needs to be applied
9933     to the result of the shift.  OUTER_CONST is the relevant constant,
9934     but we must turn off all bits turned off in the shift.
9935
9936     If we were passed a value for X, see if we can use any pieces of
9937     it.  If not, make new rtx.  */
9938
9939  if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
9940      && GET_CODE (XEXP (x, 1)) == CONST_INT
9941      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == count)
9942    const_rtx = XEXP (x, 1);
9943  else
9944    const_rtx = GEN_INT (count);
9945
9946  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9947      && GET_MODE (XEXP (x, 0)) == shift_mode
9948      && SUBREG_REG (XEXP (x, 0)) == varop)
9949    varop = XEXP (x, 0);
9950  else if (GET_MODE (varop) != shift_mode)
9951    varop = gen_lowpart_for_combine (shift_mode, varop);
9952
9953  /* If we can't make the SUBREG, try to return what we were given.  */
9954  if (GET_CODE (varop) == CLOBBER)
9955    return x ? x : varop;
9956
9957  new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9958  if (new != 0)
9959    x = new;
9960  else
9961    x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx);
9962
9963  /* If we have an outer operation and we just made a shift, it is
9964     possible that we could have simplified the shift were it not
9965     for the outer operation.  So try to do the simplification
9966     recursively.  */
9967
9968  if (outer_op != NIL && GET_CODE (x) == code
9969      && GET_CODE (XEXP (x, 1)) == CONST_INT)
9970    x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9971			      INTVAL (XEXP (x, 1)));
9972
9973  /* If we were doing an LSHIFTRT in a wider mode than it was originally,
9974     turn off all the bits that the shift would have turned off.  */
9975  if (orig_code == LSHIFTRT && result_mode != shift_mode)
9976    x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9977				GET_MODE_MASK (result_mode) >> orig_count);
9978
9979  /* Do the remainder of the processing in RESULT_MODE.  */
9980  x = gen_lowpart_for_combine (result_mode, x);
9981
9982  /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9983     operation.  */
9984  if (complement_p)
9985    x =simplify_gen_unary (NOT, result_mode, x, result_mode);
9986
9987  if (outer_op != NIL)
9988    {
9989      if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9990	outer_const = trunc_int_for_mode (outer_const, result_mode);
9991
9992      if (outer_op == AND)
9993	x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9994      else if (outer_op == SET)
9995	/* This means that we have determined that the result is
9996	   equivalent to a constant.  This should be rare.  */
9997	x = GEN_INT (outer_const);
9998      else if (GET_RTX_CLASS (outer_op) == '1')
9999	x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
10000      else
10001	x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
10002    }
10003
10004  return x;
10005}
10006
10007/* Like recog, but we receive the address of a pointer to a new pattern.
10008   We try to match the rtx that the pointer points to.
10009   If that fails, we may try to modify or replace the pattern,
10010   storing the replacement into the same pointer object.
10011
10012   Modifications include deletion or addition of CLOBBERs.
10013
10014   PNOTES is a pointer to a location where any REG_UNUSED notes added for
10015   the CLOBBERs are placed.
10016
10017   The value is the final insn code from the pattern ultimately matched,
10018   or -1.  */
10019
10020static int
10021recog_for_combine (pnewpat, insn, pnotes)
10022     rtx *pnewpat;
10023     rtx insn;
10024     rtx *pnotes;
10025{
10026  rtx pat = *pnewpat;
10027  int insn_code_number;
10028  int num_clobbers_to_add = 0;
10029  int i;
10030  rtx notes = 0;
10031  rtx dummy_insn;
10032
10033  /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
10034     we use to indicate that something didn't match.  If we find such a
10035     thing, force rejection.  */
10036  if (GET_CODE (pat) == PARALLEL)
10037    for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
10038      if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
10039	  && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
10040	return -1;
10041
10042  /* *pnewpat does not have to be actual PATTERN (insn), so make a dummy
10043     instruction for pattern recognition.  */
10044  dummy_insn = shallow_copy_rtx (insn);
10045  PATTERN (dummy_insn) = pat;
10046  REG_NOTES (dummy_insn) = 0;
10047
10048  insn_code_number = recog (pat, dummy_insn, &num_clobbers_to_add);
10049
10050  /* If it isn't, there is the possibility that we previously had an insn
10051     that clobbered some register as a side effect, but the combined
10052     insn doesn't need to do that.  So try once more without the clobbers
10053     unless this represents an ASM insn.  */
10054
10055  if (insn_code_number < 0 && ! check_asm_operands (pat)
10056      && GET_CODE (pat) == PARALLEL)
10057    {
10058      int pos;
10059
10060      for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
10061	if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
10062	  {
10063	    if (i != pos)
10064	      SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
10065	    pos++;
10066	  }
10067
10068      SUBST_INT (XVECLEN (pat, 0), pos);
10069
10070      if (pos == 1)
10071	pat = XVECEXP (pat, 0, 0);
10072
10073      PATTERN (dummy_insn) = pat;
10074      insn_code_number = recog (pat, dummy_insn, &num_clobbers_to_add);
10075    }
10076
10077  /* Recognize all noop sets, these will be killed by followup pass.  */
10078  if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
10079    insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
10080
10081  /* If we had any clobbers to add, make a new pattern than contains
10082     them.  Then check to make sure that all of them are dead.  */
10083  if (num_clobbers_to_add)
10084    {
10085      rtx newpat = gen_rtx_PARALLEL (VOIDmode,
10086				     rtvec_alloc (GET_CODE (pat) == PARALLEL
10087						  ? (XVECLEN (pat, 0)
10088						     + num_clobbers_to_add)
10089						  : num_clobbers_to_add + 1));
10090
10091      if (GET_CODE (pat) == PARALLEL)
10092	for (i = 0; i < XVECLEN (pat, 0); i++)
10093	  XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
10094      else
10095	XVECEXP (newpat, 0, 0) = pat;
10096
10097      add_clobbers (newpat, insn_code_number);
10098
10099      for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
10100	   i < XVECLEN (newpat, 0); i++)
10101	{
10102	  if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
10103	      && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
10104	    return -1;
10105	  notes = gen_rtx_EXPR_LIST (REG_UNUSED,
10106				     XEXP (XVECEXP (newpat, 0, i), 0), notes);
10107	}
10108      pat = newpat;
10109    }
10110
10111  *pnewpat = pat;
10112  *pnotes = notes;
10113
10114  return insn_code_number;
10115}
10116
10117/* Like gen_lowpart but for use by combine.  In combine it is not possible
10118   to create any new pseudoregs.  However, it is safe to create
10119   invalid memory addresses, because combine will try to recognize
10120   them and all they will do is make the combine attempt fail.
10121
10122   If for some reason this cannot do its job, an rtx
10123   (clobber (const_int 0)) is returned.
10124   An insn containing that will not be recognized.  */
10125
10126#undef gen_lowpart
10127
10128static rtx
10129gen_lowpart_for_combine (mode, x)
10130     enum machine_mode mode;
10131     rtx x;
10132{
10133  rtx result;
10134
10135  if (GET_MODE (x) == mode)
10136    return x;
10137
10138  /* We can only support MODE being wider than a word if X is a
10139     constant integer or has a mode the same size.  */
10140
10141  if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
10142      && ! ((GET_MODE (x) == VOIDmode
10143	     && (GET_CODE (x) == CONST_INT
10144		 || GET_CODE (x) == CONST_DOUBLE))
10145	    || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
10146    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10147
10148  /* X might be a paradoxical (subreg (mem)).  In that case, gen_lowpart
10149     won't know what to do.  So we will strip off the SUBREG here and
10150     process normally.  */
10151  if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
10152    {
10153      x = SUBREG_REG (x);
10154      if (GET_MODE (x) == mode)
10155	return x;
10156    }
10157
10158  result = gen_lowpart_common (mode, x);
10159#ifdef CANNOT_CHANGE_MODE_CLASS
10160  if (result != 0 && GET_CODE (result) == SUBREG)
10161    record_subregs_of_mode (result);
10162#endif
10163
10164  if (result)
10165    return result;
10166
10167  if (GET_CODE (x) == MEM)
10168    {
10169      int offset = 0;
10170
10171      /* Refuse to work on a volatile memory ref or one with a mode-dependent
10172	 address.  */
10173      if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
10174	return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10175
10176      /* If we want to refer to something bigger than the original memref,
10177	 generate a perverse subreg instead.  That will force a reload
10178	 of the original memref X.  */
10179      if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
10180	return gen_rtx_SUBREG (mode, x, 0);
10181
10182      if (WORDS_BIG_ENDIAN)
10183	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
10184		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
10185
10186      if (BYTES_BIG_ENDIAN)
10187	{
10188	  /* Adjust the address so that the address-after-the-data is
10189	     unchanged.  */
10190	  offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
10191		     - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
10192	}
10193
10194      return adjust_address_nv (x, mode, offset);
10195    }
10196
10197  /* If X is a comparison operator, rewrite it in a new mode.  This
10198     probably won't match, but may allow further simplifications.  */
10199  else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
10200    return gen_rtx_fmt_ee (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
10201
10202  /* If we couldn't simplify X any other way, just enclose it in a
10203     SUBREG.  Normally, this SUBREG won't match, but some patterns may
10204     include an explicit SUBREG or we may simplify it further in combine.  */
10205  else
10206    {
10207      int offset = 0;
10208      rtx res;
10209      enum machine_mode sub_mode = GET_MODE (x);
10210
10211      offset = subreg_lowpart_offset (mode, sub_mode);
10212      if (sub_mode == VOIDmode)
10213	{
10214	  sub_mode = int_mode_for_mode (mode);
10215	  x = gen_lowpart_common (sub_mode, x);
10216	}
10217      res = simplify_gen_subreg (mode, x, sub_mode, offset);
10218      if (res)
10219	return res;
10220      return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10221    }
10222}
10223
10224/* These routines make binary and unary operations by first seeing if they
10225   fold; if not, a new expression is allocated.  */
10226
10227static rtx
10228gen_binary (code, mode, op0, op1)
10229     enum rtx_code code;
10230     enum machine_mode mode;
10231     rtx op0, op1;
10232{
10233  rtx result;
10234  rtx tem;
10235
10236  if (GET_CODE (op0) == CLOBBER)
10237    return op0;
10238  else if (GET_CODE (op1) == CLOBBER)
10239    return op1;
10240
10241  if (GET_RTX_CLASS (code) == 'c'
10242      && swap_commutative_operands_p (op0, op1))
10243    tem = op0, op0 = op1, op1 = tem;
10244
10245  if (GET_RTX_CLASS (code) == '<')
10246    {
10247      enum machine_mode op_mode = GET_MODE (op0);
10248
10249      /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
10250	 just (REL_OP X Y).  */
10251      if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
10252	{
10253	  op1 = XEXP (op0, 1);
10254	  op0 = XEXP (op0, 0);
10255	  op_mode = GET_MODE (op0);
10256	}
10257
10258      if (op_mode == VOIDmode)
10259	op_mode = GET_MODE (op1);
10260      result = simplify_relational_operation (code, op_mode, op0, op1);
10261    }
10262  else
10263    result = simplify_binary_operation (code, mode, op0, op1);
10264
10265  if (result)
10266    return result;
10267
10268  /* Put complex operands first and constants second.  */
10269  if (GET_RTX_CLASS (code) == 'c'
10270      && swap_commutative_operands_p (op0, op1))
10271    return gen_rtx_fmt_ee (code, mode, op1, op0);
10272
10273  /* If we are turning off bits already known off in OP0, we need not do
10274     an AND.  */
10275  else if (code == AND && GET_CODE (op1) == CONST_INT
10276	   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
10277	   && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
10278    return op0;
10279
10280  return gen_rtx_fmt_ee (code, mode, op0, op1);
10281}
10282
10283/* Simplify a comparison between *POP0 and *POP1 where CODE is the
10284   comparison code that will be tested.
10285
10286   The result is a possibly different comparison code to use.  *POP0 and
10287   *POP1 may be updated.
10288
10289   It is possible that we might detect that a comparison is either always
10290   true or always false.  However, we do not perform general constant
10291   folding in combine, so this knowledge isn't useful.  Such tautologies
10292   should have been detected earlier.  Hence we ignore all such cases.  */
10293
10294static enum rtx_code
10295simplify_comparison (code, pop0, pop1)
10296     enum rtx_code code;
10297     rtx *pop0;
10298     rtx *pop1;
10299{
10300  rtx op0 = *pop0;
10301  rtx op1 = *pop1;
10302  rtx tem, tem1;
10303  int i;
10304  enum machine_mode mode, tmode;
10305
10306  /* Try a few ways of applying the same transformation to both operands.  */
10307  while (1)
10308    {
10309#ifndef WORD_REGISTER_OPERATIONS
10310      /* The test below this one won't handle SIGN_EXTENDs on these machines,
10311	 so check specially.  */
10312      if (code != GTU && code != GEU && code != LTU && code != LEU
10313	  && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
10314	  && GET_CODE (XEXP (op0, 0)) == ASHIFT
10315	  && GET_CODE (XEXP (op1, 0)) == ASHIFT
10316	  && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
10317	  && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
10318	  && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
10319	      == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
10320	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
10321	  && GET_CODE (XEXP (op1, 1)) == CONST_INT
10322	  && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10323	  && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
10324	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
10325	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
10326	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
10327	  && (INTVAL (XEXP (op0, 1))
10328	      == (GET_MODE_BITSIZE (GET_MODE (op0))
10329		  - (GET_MODE_BITSIZE
10330		     (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
10331	{
10332	  op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
10333	  op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
10334	}
10335#endif
10336
10337      /* If both operands are the same constant shift, see if we can ignore the
10338	 shift.  We can if the shift is a rotate or if the bits shifted out of
10339	 this shift are known to be zero for both inputs and if the type of
10340	 comparison is compatible with the shift.  */
10341      if (GET_CODE (op0) == GET_CODE (op1)
10342	  && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10343	  && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
10344	      || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
10345		  && (code != GT && code != LT && code != GE && code != LE))
10346	      || (GET_CODE (op0) == ASHIFTRT
10347		  && (code != GTU && code != LTU
10348		      && code != GEU && code != LEU)))
10349	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
10350	  && INTVAL (XEXP (op0, 1)) >= 0
10351	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10352	  && XEXP (op0, 1) == XEXP (op1, 1))
10353	{
10354	  enum machine_mode mode = GET_MODE (op0);
10355	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10356	  int shift_count = INTVAL (XEXP (op0, 1));
10357
10358	  if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
10359	    mask &= (mask >> shift_count) << shift_count;
10360	  else if (GET_CODE (op0) == ASHIFT)
10361	    mask = (mask & (mask << shift_count)) >> shift_count;
10362
10363	  if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
10364	      && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
10365	    op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
10366	  else
10367	    break;
10368	}
10369
10370      /* If both operands are AND's of a paradoxical SUBREG by constant, the
10371	 SUBREGs are of the same mode, and, in both cases, the AND would
10372	 be redundant if the comparison was done in the narrower mode,
10373	 do the comparison in the narrower mode (e.g., we are AND'ing with 1
10374	 and the operand's possibly nonzero bits are 0xffffff01; in that case
10375	 if we only care about QImode, we don't need the AND).  This case
10376	 occurs if the output mode of an scc insn is not SImode and
10377	 STORE_FLAG_VALUE == 1 (e.g., the 386).
10378
10379	 Similarly, check for a case where the AND's are ZERO_EXTEND
10380	 operations from some narrower mode even though a SUBREG is not
10381	 present.  */
10382
10383      else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
10384	       && GET_CODE (XEXP (op0, 1)) == CONST_INT
10385	       && GET_CODE (XEXP (op1, 1)) == CONST_INT)
10386	{
10387	  rtx inner_op0 = XEXP (op0, 0);
10388	  rtx inner_op1 = XEXP (op1, 0);
10389	  HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
10390	  HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
10391	  int changed = 0;
10392
10393	  if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
10394	      && (GET_MODE_SIZE (GET_MODE (inner_op0))
10395		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
10396	      && (GET_MODE (SUBREG_REG (inner_op0))
10397		  == GET_MODE (SUBREG_REG (inner_op1)))
10398	      && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
10399		  <= HOST_BITS_PER_WIDE_INT)
10400	      && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
10401					     GET_MODE (SUBREG_REG (inner_op0)))))
10402	      && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
10403					     GET_MODE (SUBREG_REG (inner_op1))))))
10404	    {
10405	      op0 = SUBREG_REG (inner_op0);
10406	      op1 = SUBREG_REG (inner_op1);
10407
10408	      /* The resulting comparison is always unsigned since we masked
10409		 off the original sign bit.  */
10410	      code = unsigned_condition (code);
10411
10412	      changed = 1;
10413	    }
10414
10415	  else if (c0 == c1)
10416	    for (tmode = GET_CLASS_NARROWEST_MODE
10417		 (GET_MODE_CLASS (GET_MODE (op0)));
10418		 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
10419	      if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
10420		{
10421		  op0 = gen_lowpart_for_combine (tmode, inner_op0);
10422		  op1 = gen_lowpart_for_combine (tmode, inner_op1);
10423		  code = unsigned_condition (code);
10424		  changed = 1;
10425		  break;
10426		}
10427
10428	  if (! changed)
10429	    break;
10430	}
10431
10432      /* If both operands are NOT, we can strip off the outer operation
10433	 and adjust the comparison code for swapped operands; similarly for
10434	 NEG, except that this must be an equality comparison.  */
10435      else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
10436	       || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
10437		   && (code == EQ || code == NE)))
10438	op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
10439
10440      else
10441	break;
10442    }
10443
10444  /* If the first operand is a constant, swap the operands and adjust the
10445     comparison code appropriately, but don't do this if the second operand
10446     is already a constant integer.  */
10447  if (swap_commutative_operands_p (op0, op1))
10448    {
10449      tem = op0, op0 = op1, op1 = tem;
10450      code = swap_condition (code);
10451    }
10452
10453  /* We now enter a loop during which we will try to simplify the comparison.
10454     For the most part, we only are concerned with comparisons with zero,
10455     but some things may really be comparisons with zero but not start
10456     out looking that way.  */
10457
10458  while (GET_CODE (op1) == CONST_INT)
10459    {
10460      enum machine_mode mode = GET_MODE (op0);
10461      unsigned int mode_width = GET_MODE_BITSIZE (mode);
10462      unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10463      int equality_comparison_p;
10464      int sign_bit_comparison_p;
10465      int unsigned_comparison_p;
10466      HOST_WIDE_INT const_op;
10467
10468      /* We only want to handle integral modes.  This catches VOIDmode,
10469	 CCmode, and the floating-point modes.  An exception is that we
10470	 can handle VOIDmode if OP0 is a COMPARE or a comparison
10471	 operation.  */
10472
10473      if (GET_MODE_CLASS (mode) != MODE_INT
10474	  && ! (mode == VOIDmode
10475		&& (GET_CODE (op0) == COMPARE
10476		    || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
10477	break;
10478
10479      /* Get the constant we are comparing against and turn off all bits
10480	 not on in our mode.  */
10481      const_op = INTVAL (op1);
10482      if (mode != VOIDmode)
10483	const_op = trunc_int_for_mode (const_op, mode);
10484      op1 = GEN_INT (const_op);
10485
10486      /* If we are comparing against a constant power of two and the value
10487	 being compared can only have that single bit nonzero (e.g., it was
10488	 `and'ed with that bit), we can replace this with a comparison
10489	 with zero.  */
10490      if (const_op
10491	  && (code == EQ || code == NE || code == GE || code == GEU
10492	      || code == LT || code == LTU)
10493	  && mode_width <= HOST_BITS_PER_WIDE_INT
10494	  && exact_log2 (const_op) >= 0
10495	  && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
10496	{
10497	  code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10498	  op1 = const0_rtx, const_op = 0;
10499	}
10500
10501      /* Similarly, if we are comparing a value known to be either -1 or
10502	 0 with -1, change it to the opposite comparison against zero.  */
10503
10504      if (const_op == -1
10505	  && (code == EQ || code == NE || code == GT || code == LE
10506	      || code == GEU || code == LTU)
10507	  && num_sign_bit_copies (op0, mode) == mode_width)
10508	{
10509	  code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10510	  op1 = const0_rtx, const_op = 0;
10511	}
10512
10513      /* Do some canonicalizations based on the comparison code.  We prefer
10514	 comparisons against zero and then prefer equality comparisons.
10515	 If we can reduce the size of a constant, we will do that too.  */
10516
10517      switch (code)
10518	{
10519	case LT:
10520	  /* < C is equivalent to <= (C - 1) */
10521	  if (const_op > 0)
10522	    {
10523	      const_op -= 1;
10524	      op1 = GEN_INT (const_op);
10525	      code = LE;
10526	      /* ... fall through to LE case below.  */
10527	    }
10528	  else
10529	    break;
10530
10531	case LE:
10532	  /* <= C is equivalent to < (C + 1); we do this for C < 0  */
10533	  if (const_op < 0)
10534	    {
10535	      const_op += 1;
10536	      op1 = GEN_INT (const_op);
10537	      code = LT;
10538	    }
10539
10540	  /* If we are doing a <= 0 comparison on a value known to have
10541	     a zero sign bit, we can replace this with == 0.  */
10542	  else if (const_op == 0
10543		   && mode_width <= HOST_BITS_PER_WIDE_INT
10544		   && (nonzero_bits (op0, mode)
10545		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10546	    code = EQ;
10547	  break;
10548
10549	case GE:
10550	  /* >= C is equivalent to > (C - 1).  */
10551	  if (const_op > 0)
10552	    {
10553	      const_op -= 1;
10554	      op1 = GEN_INT (const_op);
10555	      code = GT;
10556	      /* ... fall through to GT below.  */
10557	    }
10558	  else
10559	    break;
10560
10561	case GT:
10562	  /* > C is equivalent to >= (C + 1); we do this for C < 0.  */
10563	  if (const_op < 0)
10564	    {
10565	      const_op += 1;
10566	      op1 = GEN_INT (const_op);
10567	      code = GE;
10568	    }
10569
10570	  /* If we are doing a > 0 comparison on a value known to have
10571	     a zero sign bit, we can replace this with != 0.  */
10572	  else if (const_op == 0
10573		   && mode_width <= HOST_BITS_PER_WIDE_INT
10574		   && (nonzero_bits (op0, mode)
10575		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10576	    code = NE;
10577	  break;
10578
10579	case LTU:
10580	  /* < C is equivalent to <= (C - 1).  */
10581	  if (const_op > 0)
10582	    {
10583	      const_op -= 1;
10584	      op1 = GEN_INT (const_op);
10585	      code = LEU;
10586	      /* ... fall through ...  */
10587	    }
10588
10589	  /* (unsigned) < 0x80000000 is equivalent to >= 0.  */
10590	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10591		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10592	    {
10593	      const_op = 0, op1 = const0_rtx;
10594	      code = GE;
10595	      break;
10596	    }
10597	  else
10598	    break;
10599
10600	case LEU:
10601	  /* unsigned <= 0 is equivalent to == 0 */
10602	  if (const_op == 0)
10603	    code = EQ;
10604
10605	  /* (unsigned) <= 0x7fffffff is equivalent to >= 0.  */
10606	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10607		   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10608	    {
10609	      const_op = 0, op1 = const0_rtx;
10610	      code = GE;
10611	    }
10612	  break;
10613
10614	case GEU:
10615	  /* >= C is equivalent to < (C - 1).  */
10616	  if (const_op > 1)
10617	    {
10618	      const_op -= 1;
10619	      op1 = GEN_INT (const_op);
10620	      code = GTU;
10621	      /* ... fall through ...  */
10622	    }
10623
10624	  /* (unsigned) >= 0x80000000 is equivalent to < 0.  */
10625	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10626		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10627	    {
10628	      const_op = 0, op1 = const0_rtx;
10629	      code = LT;
10630	      break;
10631	    }
10632	  else
10633	    break;
10634
10635	case GTU:
10636	  /* unsigned > 0 is equivalent to != 0 */
10637	  if (const_op == 0)
10638	    code = NE;
10639
10640	  /* (unsigned) > 0x7fffffff is equivalent to < 0.  */
10641	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10642		    && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10643	    {
10644	      const_op = 0, op1 = const0_rtx;
10645	      code = LT;
10646	    }
10647	  break;
10648
10649	default:
10650	  break;
10651	}
10652
10653      /* Compute some predicates to simplify code below.  */
10654
10655      equality_comparison_p = (code == EQ || code == NE);
10656      sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10657      unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10658			       || code == GEU);
10659
10660      /* If this is a sign bit comparison and we can do arithmetic in
10661	 MODE, say that we will only be needing the sign bit of OP0.  */
10662      if (sign_bit_comparison_p
10663	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10664	op0 = force_to_mode (op0, mode,
10665			     ((HOST_WIDE_INT) 1
10666			      << (GET_MODE_BITSIZE (mode) - 1)),
10667			     NULL_RTX, 0);
10668
10669      /* Now try cases based on the opcode of OP0.  If none of the cases
10670	 does a "continue", we exit this loop immediately after the
10671	 switch.  */
10672
10673      switch (GET_CODE (op0))
10674	{
10675	case ZERO_EXTRACT:
10676	  /* If we are extracting a single bit from a variable position in
10677	     a constant that has only a single bit set and are comparing it
10678	     with zero, we can convert this into an equality comparison
10679	     between the position and the location of the single bit.  */
10680	  /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
10681	     have already reduced the shift count modulo the word size.  */
10682	  if (!SHIFT_COUNT_TRUNCATED
10683	      && GET_CODE (XEXP (op0, 0)) == CONST_INT
10684	      && XEXP (op0, 1) == const1_rtx
10685	      && equality_comparison_p && const_op == 0
10686	      && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
10687	    {
10688	      if (BITS_BIG_ENDIAN)
10689		{
10690		  enum machine_mode new_mode
10691		    = mode_for_extraction (EP_extzv, 1);
10692		  if (new_mode == MAX_MACHINE_MODE)
10693		    i = BITS_PER_WORD - 1 - i;
10694		  else
10695		    {
10696		      mode = new_mode;
10697		      i = (GET_MODE_BITSIZE (mode) - 1 - i);
10698		    }
10699		}
10700
10701	      op0 = XEXP (op0, 2);
10702	      op1 = GEN_INT (i);
10703	      const_op = i;
10704
10705	      /* Result is nonzero iff shift count is equal to I.  */
10706	      code = reverse_condition (code);
10707	      continue;
10708	    }
10709
10710	  /* ... fall through ...  */
10711
10712	case SIGN_EXTRACT:
10713	  tem = expand_compound_operation (op0);
10714	  if (tem != op0)
10715	    {
10716	      op0 = tem;
10717	      continue;
10718	    }
10719	  break;
10720
10721	case NOT:
10722	  /* If testing for equality, we can take the NOT of the constant.  */
10723	  if (equality_comparison_p
10724	      && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10725	    {
10726	      op0 = XEXP (op0, 0);
10727	      op1 = tem;
10728	      continue;
10729	    }
10730
10731	  /* If just looking at the sign bit, reverse the sense of the
10732	     comparison.  */
10733	  if (sign_bit_comparison_p)
10734	    {
10735	      op0 = XEXP (op0, 0);
10736	      code = (code == GE ? LT : GE);
10737	      continue;
10738	    }
10739	  break;
10740
10741	case NEG:
10742	  /* If testing for equality, we can take the NEG of the constant.  */
10743	  if (equality_comparison_p
10744	      && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10745	    {
10746	      op0 = XEXP (op0, 0);
10747	      op1 = tem;
10748	      continue;
10749	    }
10750
10751	  /* The remaining cases only apply to comparisons with zero.  */
10752	  if (const_op != 0)
10753	    break;
10754
10755	  /* When X is ABS or is known positive,
10756	     (neg X) is < 0 if and only if X != 0.  */
10757
10758	  if (sign_bit_comparison_p
10759	      && (GET_CODE (XEXP (op0, 0)) == ABS
10760		  || (mode_width <= HOST_BITS_PER_WIDE_INT
10761		      && (nonzero_bits (XEXP (op0, 0), mode)
10762			  & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
10763	    {
10764	      op0 = XEXP (op0, 0);
10765	      code = (code == LT ? NE : EQ);
10766	      continue;
10767	    }
10768
10769	  /* If we have NEG of something whose two high-order bits are the
10770	     same, we know that "(-a) < 0" is equivalent to "a > 0".  */
10771	  if (num_sign_bit_copies (op0, mode) >= 2)
10772	    {
10773	      op0 = XEXP (op0, 0);
10774	      code = swap_condition (code);
10775	      continue;
10776	    }
10777	  break;
10778
10779	case ROTATE:
10780	  /* If we are testing equality and our count is a constant, we
10781	     can perform the inverse operation on our RHS.  */
10782	  if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10783	      && (tem = simplify_binary_operation (ROTATERT, mode,
10784						   op1, XEXP (op0, 1))) != 0)
10785	    {
10786	      op0 = XEXP (op0, 0);
10787	      op1 = tem;
10788	      continue;
10789	    }
10790
10791	  /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10792	     a particular bit.  Convert it to an AND of a constant of that
10793	     bit.  This will be converted into a ZERO_EXTRACT.  */
10794	  if (const_op == 0 && sign_bit_comparison_p
10795	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10796	      && mode_width <= HOST_BITS_PER_WIDE_INT)
10797	    {
10798	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10799					    ((HOST_WIDE_INT) 1
10800					     << (mode_width - 1
10801						 - INTVAL (XEXP (op0, 1)))));
10802	      code = (code == LT ? NE : EQ);
10803	      continue;
10804	    }
10805
10806	  /* Fall through.  */
10807
10808	case ABS:
10809	  /* ABS is ignorable inside an equality comparison with zero.  */
10810	  if (const_op == 0 && equality_comparison_p)
10811	    {
10812	      op0 = XEXP (op0, 0);
10813	      continue;
10814	    }
10815	  break;
10816
10817	case SIGN_EXTEND:
10818	  /* Can simplify (compare (zero/sign_extend FOO) CONST)
10819	     to (compare FOO CONST) if CONST fits in FOO's mode and we
10820	     are either testing inequality or have an unsigned comparison
10821	     with ZERO_EXTEND or a signed comparison with SIGN_EXTEND.  */
10822	  if (! unsigned_comparison_p
10823	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10824		  <= HOST_BITS_PER_WIDE_INT)
10825	      && ((unsigned HOST_WIDE_INT) const_op
10826		  < (((unsigned HOST_WIDE_INT) 1
10827		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
10828	    {
10829	      op0 = XEXP (op0, 0);
10830	      continue;
10831	    }
10832	  break;
10833
10834	case SUBREG:
10835	  /* Check for the case where we are comparing A - C1 with C2, that is
10836
10837	       (subreg:MODE (plus (A) (-C1))) op (C2)
10838
10839	     with C1 a constant, and try to lift the SUBREG, i.e. to do the
10840	     comparison in the wider mode.  One of the following two conditions
10841	     must be true in order for this to be valid:
10842
10843	       1. The mode extension results in the same bit pattern being added
10844		  on both sides and the comparison is equality or unsigned.  As
10845		  C2 has been truncated to fit in MODE, the pattern can only be
10846		  all 0s or all 1s.
10847
10848	       2. The mode extension results in the sign bit being copied on
10849		  each side.
10850
10851	     The difficulty here is that we have predicates for A but not for
10852	     (A - C1) so we need to check that C1 is within proper bounds so
10853	     as to perturbate A as little as possible.  */
10854
10855	  if (mode_width <= HOST_BITS_PER_WIDE_INT
10856	      && subreg_lowpart_p (op0)
10857	      && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width
10858	      && GET_CODE (SUBREG_REG (op0)) == PLUS
10859	      && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT)
10860	    {
10861	      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
10862	      rtx a = XEXP (SUBREG_REG (op0), 0);
10863	      HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
10864
10865	      if ((c1 > 0
10866	           && (unsigned HOST_WIDE_INT) c1
10867		       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)
10868		   && (equality_comparison_p || unsigned_comparison_p)
10869		   /* (A - C1) zero-extends if it is positive and sign-extends
10870		      if it is negative, C2 both zero- and sign-extends.  */
10871		   && ((0 == (nonzero_bits (a, inner_mode)
10872			      & ~GET_MODE_MASK (mode))
10873			&& const_op >= 0)
10874		       /* (A - C1) sign-extends if it is positive and 1-extends
10875			  if it is negative, C2 both sign- and 1-extends.  */
10876		       || (num_sign_bit_copies (a, inner_mode)
10877			   > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10878					     - mode_width)
10879			   && const_op < 0)))
10880		  || ((unsigned HOST_WIDE_INT) c1
10881		       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2)
10882		      /* (A - C1) always sign-extends, like C2.  */
10883		      && num_sign_bit_copies (a, inner_mode)
10884			 > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10885					   - mode_width - 1)))
10886		{
10887		  op0 = SUBREG_REG (op0);
10888		  continue;
10889	        }
10890	    }
10891
10892	  /* If the inner mode is narrower and we are extracting the low part,
10893	     we can treat the SUBREG as if it were a ZERO_EXTEND.  */
10894	  if (subreg_lowpart_p (op0)
10895	      && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10896	    /* Fall through */ ;
10897	  else
10898	    break;
10899
10900	  /* ... fall through ...  */
10901
10902	case ZERO_EXTEND:
10903	  if ((unsigned_comparison_p || equality_comparison_p)
10904	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10905		  <= HOST_BITS_PER_WIDE_INT)
10906	      && ((unsigned HOST_WIDE_INT) const_op
10907		  < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10908	    {
10909	      op0 = XEXP (op0, 0);
10910	      continue;
10911	    }
10912	  break;
10913
10914	case PLUS:
10915	  /* (eq (plus X A) B) -> (eq X (minus B A)).  We can only do
10916	     this for equality comparisons due to pathological cases involving
10917	     overflows.  */
10918	  if (equality_comparison_p
10919	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10920							op1, XEXP (op0, 1))))
10921	    {
10922	      op0 = XEXP (op0, 0);
10923	      op1 = tem;
10924	      continue;
10925	    }
10926
10927	  /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0.  */
10928	  if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10929	      && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10930	    {
10931	      op0 = XEXP (XEXP (op0, 0), 0);
10932	      code = (code == LT ? EQ : NE);
10933	      continue;
10934	    }
10935	  break;
10936
10937	case MINUS:
10938	  /* We used to optimize signed comparisons against zero, but that
10939	     was incorrect.  Unsigned comparisons against zero (GTU, LEU)
10940	     arrive here as equality comparisons, or (GEU, LTU) are
10941	     optimized away.  No need to special-case them.  */
10942
10943	  /* (eq (minus A B) C) -> (eq A (plus B C)) or
10944	     (eq B (minus A C)), whichever simplifies.  We can only do
10945	     this for equality comparisons due to pathological cases involving
10946	     overflows.  */
10947	  if (equality_comparison_p
10948	      && 0 != (tem = simplify_binary_operation (PLUS, mode,
10949							XEXP (op0, 1), op1)))
10950	    {
10951	      op0 = XEXP (op0, 0);
10952	      op1 = tem;
10953	      continue;
10954	    }
10955
10956	  if (equality_comparison_p
10957	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10958							XEXP (op0, 0), op1)))
10959	    {
10960	      op0 = XEXP (op0, 1);
10961	      op1 = tem;
10962	      continue;
10963	    }
10964
10965	  /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10966	     of bits in X minus 1, is one iff X > 0.  */
10967	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10968	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10969	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1))
10970		 == mode_width - 1
10971	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10972	    {
10973	      op0 = XEXP (op0, 1);
10974	      code = (code == GE ? LE : GT);
10975	      continue;
10976	    }
10977	  break;
10978
10979	case XOR:
10980	  /* (eq (xor A B) C) -> (eq A (xor B C)).  This is a simplification
10981	     if C is zero or B is a constant.  */
10982	  if (equality_comparison_p
10983	      && 0 != (tem = simplify_binary_operation (XOR, mode,
10984							XEXP (op0, 1), op1)))
10985	    {
10986	      op0 = XEXP (op0, 0);
10987	      op1 = tem;
10988	      continue;
10989	    }
10990	  break;
10991
10992	case EQ:  case NE:
10993	case UNEQ:  case LTGT:
10994	case LT:  case LTU:  case UNLT:  case LE:  case LEU:  case UNLE:
10995	case GT:  case GTU:  case UNGT:  case GE:  case GEU:  case UNGE:
10996        case UNORDERED: case ORDERED:
10997	  /* We can't do anything if OP0 is a condition code value, rather
10998	     than an actual data value.  */
10999	  if (const_op != 0
11000#ifdef HAVE_cc0
11001	      || XEXP (op0, 0) == cc0_rtx
11002#endif
11003	      || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
11004	    break;
11005
11006	  /* Get the two operands being compared.  */
11007	  if (GET_CODE (XEXP (op0, 0)) == COMPARE)
11008	    tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
11009	  else
11010	    tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
11011
11012	  /* Check for the cases where we simply want the result of the
11013	     earlier test or the opposite of that result.  */
11014	  if (code == NE || code == EQ
11015	      || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
11016		  && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11017		  && (STORE_FLAG_VALUE
11018		      & (((HOST_WIDE_INT) 1
11019			  << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
11020		  && (code == LT || code == GE)))
11021	    {
11022	      enum rtx_code new_code;
11023	      if (code == LT || code == NE)
11024		new_code = GET_CODE (op0);
11025	      else
11026		new_code = combine_reversed_comparison_code (op0);
11027
11028	      if (new_code != UNKNOWN)
11029		{
11030		  code = new_code;
11031		  op0 = tem;
11032		  op1 = tem1;
11033		  continue;
11034		}
11035	    }
11036	  break;
11037
11038	case IOR:
11039	  /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
11040	     iff X <= 0.  */
11041	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
11042	      && XEXP (XEXP (op0, 0), 1) == constm1_rtx
11043	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
11044	    {
11045	      op0 = XEXP (op0, 1);
11046	      code = (code == GE ? GT : LE);
11047	      continue;
11048	    }
11049	  break;
11050
11051	case AND:
11052	  /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1).  This
11053	     will be converted to a ZERO_EXTRACT later.  */
11054	  if (const_op == 0 && equality_comparison_p
11055	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
11056	      && XEXP (XEXP (op0, 0), 0) == const1_rtx)
11057	    {
11058	      op0 = simplify_and_const_int
11059		(op0, mode, gen_rtx_LSHIFTRT (mode,
11060					      XEXP (op0, 1),
11061					      XEXP (XEXP (op0, 0), 1)),
11062		 (HOST_WIDE_INT) 1);
11063	      continue;
11064	    }
11065
11066	  /* If we are comparing (and (lshiftrt X C1) C2) for equality with
11067	     zero and X is a comparison and C1 and C2 describe only bits set
11068	     in STORE_FLAG_VALUE, we can compare with X.  */
11069	  if (const_op == 0 && equality_comparison_p
11070	      && mode_width <= HOST_BITS_PER_WIDE_INT
11071	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11072	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
11073	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
11074	      && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
11075	      && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
11076	    {
11077	      mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
11078		      << INTVAL (XEXP (XEXP (op0, 0), 1)));
11079	      if ((~STORE_FLAG_VALUE & mask) == 0
11080		  && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
11081		      || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
11082			  && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
11083		{
11084		  op0 = XEXP (XEXP (op0, 0), 0);
11085		  continue;
11086		}
11087	    }
11088
11089	  /* If we are doing an equality comparison of an AND of a bit equal
11090	     to the sign bit, replace this with a LT or GE comparison of
11091	     the underlying value.  */
11092	  if (equality_comparison_p
11093	      && const_op == 0
11094	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11095	      && mode_width <= HOST_BITS_PER_WIDE_INT
11096	      && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
11097		  == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
11098	    {
11099	      op0 = XEXP (op0, 0);
11100	      code = (code == EQ ? GE : LT);
11101	      continue;
11102	    }
11103
11104	  /* If this AND operation is really a ZERO_EXTEND from a narrower
11105	     mode, the constant fits within that mode, and this is either an
11106	     equality or unsigned comparison, try to do this comparison in
11107	     the narrower mode.  */
11108	  if ((equality_comparison_p || unsigned_comparison_p)
11109	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11110	      && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
11111				   & GET_MODE_MASK (mode))
11112				  + 1)) >= 0
11113	      && const_op >> i == 0
11114	      && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
11115	    {
11116	      op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
11117	      continue;
11118	    }
11119
11120	  /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
11121	     in both M1 and M2 and the SUBREG is either paradoxical or
11122	     represents the low part, permute the SUBREG and the AND and
11123	     try again.  */
11124	  if (GET_CODE (XEXP (op0, 0)) == SUBREG
11125	      /* Require an integral mode, to avoid creating something like
11126		 (AND:SF ...).  */
11127	      && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
11128	      && (0
11129#ifdef WORD_REGISTER_OPERATIONS
11130		  || ((mode_width
11131		       > (GET_MODE_BITSIZE
11132			   (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
11133		      && mode_width <= BITS_PER_WORD)
11134#endif
11135		  || ((mode_width
11136		       <= (GET_MODE_BITSIZE
11137			   (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
11138		      && subreg_lowpart_p (XEXP (op0, 0))))
11139#ifndef WORD_REGISTER_OPERATIONS
11140	      /* It is unsafe to commute the AND into the SUBREG if the SUBREG
11141		 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
11142		 As originally written the upper bits have a defined value
11143		 due to the AND operation.  However, if we commute the AND
11144		 inside the SUBREG then they no longer have defined values
11145		 and the meaning of the code has been changed.  */
11146	      && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
11147		  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
11148#endif
11149	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11150	      && mode_width <= HOST_BITS_PER_WIDE_INT
11151	      && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
11152		  <= HOST_BITS_PER_WIDE_INT)
11153	      && (INTVAL (XEXP (op0, 1)) & ~mask) == 0
11154	      && 0 == (~GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
11155		       & INTVAL (XEXP (op0, 1)))
11156	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) != mask
11157	      && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11158		  != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
11159
11160	    {
11161	      op0
11162		= gen_lowpart_for_combine
11163		  (mode,
11164		   gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
11165			       SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
11166	      continue;
11167	    }
11168
11169	  /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
11170	     (eq (and (lshiftrt X) 1) 0).  */
11171	  if (const_op == 0 && equality_comparison_p
11172	      && XEXP (op0, 1) == const1_rtx
11173	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
11174	      && GET_CODE (XEXP (XEXP (op0, 0), 0)) == NOT)
11175	    {
11176	      op0 = simplify_and_const_int
11177		(op0, mode,
11178		 gen_rtx_LSHIFTRT (mode, XEXP (XEXP (XEXP (op0, 0), 0), 0),
11179				   XEXP (XEXP (op0, 0), 1)),
11180		 (HOST_WIDE_INT) 1);
11181	      code = (code == NE ? EQ : NE);
11182	      continue;
11183	    }
11184	  break;
11185
11186	case ASHIFT:
11187	  /* If we have (compare (ashift FOO N) (const_int C)) and
11188	     the high order N bits of FOO (N+1 if an inequality comparison)
11189	     are known to be zero, we can do this by comparing FOO with C
11190	     shifted right N bits so long as the low-order N bits of C are
11191	     zero.  */
11192	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
11193	      && INTVAL (XEXP (op0, 1)) >= 0
11194	      && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
11195		  < HOST_BITS_PER_WIDE_INT)
11196	      && ((const_op
11197		   & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
11198	      && mode_width <= HOST_BITS_PER_WIDE_INT
11199	      && (nonzero_bits (XEXP (op0, 0), mode)
11200		  & ~(mask >> (INTVAL (XEXP (op0, 1))
11201			       + ! equality_comparison_p))) == 0)
11202	    {
11203	      /* We must perform a logical shift, not an arithmetic one,
11204		 as we want the top N bits of C to be zero.  */
11205	      unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
11206
11207	      temp >>= INTVAL (XEXP (op0, 1));
11208	      op1 = gen_int_mode (temp, mode);
11209	      op0 = XEXP (op0, 0);
11210	      continue;
11211	    }
11212
11213	  /* If we are doing a sign bit comparison, it means we are testing
11214	     a particular bit.  Convert it to the appropriate AND.  */
11215	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
11216	      && mode_width <= HOST_BITS_PER_WIDE_INT)
11217	    {
11218	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11219					    ((HOST_WIDE_INT) 1
11220					     << (mode_width - 1
11221						 - INTVAL (XEXP (op0, 1)))));
11222	      code = (code == LT ? NE : EQ);
11223	      continue;
11224	    }
11225
11226	  /* If this an equality comparison with zero and we are shifting
11227	     the low bit to the sign bit, we can convert this to an AND of the
11228	     low-order bit.  */
11229	  if (const_op == 0 && equality_comparison_p
11230	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11231	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11232		 == mode_width - 1)
11233	    {
11234	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11235					    (HOST_WIDE_INT) 1);
11236	      continue;
11237	    }
11238	  break;
11239
11240	case ASHIFTRT:
11241	  /* If this is an equality comparison with zero, we can do this
11242	     as a logical shift, which might be much simpler.  */
11243	  if (equality_comparison_p && const_op == 0
11244	      && GET_CODE (XEXP (op0, 1)) == CONST_INT)
11245	    {
11246	      op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
11247					  XEXP (op0, 0),
11248					  INTVAL (XEXP (op0, 1)));
11249	      continue;
11250	    }
11251
11252	  /* If OP0 is a sign extension and CODE is not an unsigned comparison,
11253	     do the comparison in a narrower mode.  */
11254	  if (! unsigned_comparison_p
11255	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11256	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
11257	      && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
11258	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11259					 MODE_INT, 1)) != BLKmode
11260	      && (((unsigned HOST_WIDE_INT) const_op
11261		   + (GET_MODE_MASK (tmode) >> 1) + 1)
11262		  <= GET_MODE_MASK (tmode)))
11263	    {
11264	      op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
11265	      continue;
11266	    }
11267
11268	  /* Likewise if OP0 is a PLUS of a sign extension with a
11269	     constant, which is usually represented with the PLUS
11270	     between the shifts.  */
11271	  if (! unsigned_comparison_p
11272	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11273	      && GET_CODE (XEXP (op0, 0)) == PLUS
11274	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
11275	      && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
11276	      && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
11277	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11278					 MODE_INT, 1)) != BLKmode
11279	      && (((unsigned HOST_WIDE_INT) const_op
11280		   + (GET_MODE_MASK (tmode) >> 1) + 1)
11281		  <= GET_MODE_MASK (tmode)))
11282	    {
11283	      rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
11284	      rtx add_const = XEXP (XEXP (op0, 0), 1);
11285	      rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const,
11286					  XEXP (op0, 1));
11287
11288	      op0 = gen_binary (PLUS, tmode,
11289				gen_lowpart_for_combine (tmode, inner),
11290				new_const);
11291	      continue;
11292	    }
11293
11294	  /* ... fall through ...  */
11295	case LSHIFTRT:
11296	  /* If we have (compare (xshiftrt FOO N) (const_int C)) and
11297	     the low order N bits of FOO are known to be zero, we can do this
11298	     by comparing FOO with C shifted left N bits so long as no
11299	     overflow occurs.  */
11300	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
11301	      && INTVAL (XEXP (op0, 1)) >= 0
11302	      && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
11303	      && mode_width <= HOST_BITS_PER_WIDE_INT
11304	      && (nonzero_bits (XEXP (op0, 0), mode)
11305		  & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
11306	      && (((unsigned HOST_WIDE_INT) const_op
11307		   + (GET_CODE (op0) != LSHIFTRT
11308		      ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
11309			 + 1)
11310		      : 0))
11311		  <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
11312	    {
11313	      /* If the shift was logical, then we must make the condition
11314		 unsigned.  */
11315	      if (GET_CODE (op0) == LSHIFTRT)
11316		code = unsigned_condition (code);
11317
11318	      const_op <<= INTVAL (XEXP (op0, 1));
11319	      op1 = GEN_INT (const_op);
11320	      op0 = XEXP (op0, 0);
11321	      continue;
11322	    }
11323
11324	  /* If we are using this shift to extract just the sign bit, we
11325	     can replace this with an LT or GE comparison.  */
11326	  if (const_op == 0
11327	      && (equality_comparison_p || sign_bit_comparison_p)
11328	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11329	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11330		 == mode_width - 1)
11331	    {
11332	      op0 = XEXP (op0, 0);
11333	      code = (code == NE || code == GT ? LT : GE);
11334	      continue;
11335	    }
11336	  break;
11337
11338	default:
11339	  break;
11340	}
11341
11342      break;
11343    }
11344
11345  /* Now make any compound operations involved in this comparison.  Then,
11346     check for an outmost SUBREG on OP0 that is not doing anything or is
11347     paradoxical.  The latter transformation must only be performed when
11348     it is known that the "extra" bits will be the same in op0 and op1 or
11349     that they don't matter.  There are three cases to consider:
11350
11351     1. SUBREG_REG (op0) is a register.  In this case the bits are don't
11352     care bits and we can assume they have any convenient value.  So
11353     making the transformation is safe.
11354
11355     2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
11356     In this case the upper bits of op0 are undefined.  We should not make
11357     the simplification in that case as we do not know the contents of
11358     those bits.
11359
11360     3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
11361     NIL.  In that case we know those bits are zeros or ones.  We must
11362     also be sure that they are the same as the upper bits of op1.
11363
11364     We can never remove a SUBREG for a non-equality comparison because
11365     the sign bit is in a different place in the underlying object.  */
11366
11367  op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
11368  op1 = make_compound_operation (op1, SET);
11369
11370  if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
11371      /* Case 3 above, to sometimes allow (subreg (mem x)), isn't
11372	 implemented.  */
11373      && GET_CODE (SUBREG_REG (op0)) == REG
11374      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11375      && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
11376      && (code == NE || code == EQ))
11377    {
11378      if (GET_MODE_SIZE (GET_MODE (op0))
11379	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
11380	{
11381	  op0 = SUBREG_REG (op0);
11382	  op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
11383	}
11384      else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
11385		<= HOST_BITS_PER_WIDE_INT)
11386	       && (nonzero_bits (SUBREG_REG (op0),
11387				 GET_MODE (SUBREG_REG (op0)))
11388		   & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11389	{
11390	  tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)), op1);
11391
11392	  if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
11393	       & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11394	    op0 = SUBREG_REG (op0), op1 = tem;
11395	}
11396    }
11397
11398  /* We now do the opposite procedure: Some machines don't have compare
11399     insns in all modes.  If OP0's mode is an integer mode smaller than a
11400     word and we can't do a compare in that mode, see if there is a larger
11401     mode for which we can do the compare.  There are a number of cases in
11402     which we can use the wider mode.  */
11403
11404  mode = GET_MODE (op0);
11405  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
11406      && GET_MODE_SIZE (mode) < UNITS_PER_WORD
11407      && ! have_insn_for (COMPARE, mode))
11408    for (tmode = GET_MODE_WIDER_MODE (mode);
11409	 (tmode != VOIDmode
11410	  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
11411	 tmode = GET_MODE_WIDER_MODE (tmode))
11412      if (have_insn_for (COMPARE, tmode))
11413	{
11414	  int zero_extended;
11415
11416	  /* If the only nonzero bits in OP0 and OP1 are those in the
11417	     narrower mode and this is an equality or unsigned comparison,
11418	     we can use the wider mode.  Similarly for sign-extended
11419	     values, in which case it is true for all comparisons.  */
11420	  zero_extended = ((code == EQ || code == NE
11421			    || code == GEU || code == GTU
11422			    || code == LEU || code == LTU)
11423			   && (nonzero_bits (op0, tmode)
11424			       & ~GET_MODE_MASK (mode)) == 0
11425			   && ((GET_CODE (op1) == CONST_INT
11426				|| (nonzero_bits (op1, tmode)
11427				    & ~GET_MODE_MASK (mode)) == 0)));
11428
11429	  if (zero_extended
11430	      || ((num_sign_bit_copies (op0, tmode)
11431		   > (unsigned int) (GET_MODE_BITSIZE (tmode)
11432				     - GET_MODE_BITSIZE (mode)))
11433		  && (num_sign_bit_copies (op1, tmode)
11434		      > (unsigned int) (GET_MODE_BITSIZE (tmode)
11435					- GET_MODE_BITSIZE (mode)))))
11436	    {
11437	      /* If OP0 is an AND and we don't have an AND in MODE either,
11438		 make a new AND in the proper mode.  */
11439	      if (GET_CODE (op0) == AND
11440		  && !have_insn_for (AND, mode))
11441		op0 = gen_binary (AND, tmode,
11442				  gen_lowpart_for_combine (tmode,
11443							   XEXP (op0, 0)),
11444				  gen_lowpart_for_combine (tmode,
11445							   XEXP (op0, 1)));
11446
11447	      op0 = gen_lowpart_for_combine (tmode, op0);
11448	      if (zero_extended && GET_CODE (op1) == CONST_INT)
11449		op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode));
11450	      op1 = gen_lowpart_for_combine (tmode, op1);
11451	      break;
11452	    }
11453
11454	  /* If this is a test for negative, we can make an explicit
11455	     test of the sign bit.  */
11456
11457	  if (op1 == const0_rtx && (code == LT || code == GE)
11458	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11459	    {
11460	      op0 = gen_binary (AND, tmode,
11461				gen_lowpart_for_combine (tmode, op0),
11462				GEN_INT ((HOST_WIDE_INT) 1
11463					 << (GET_MODE_BITSIZE (mode) - 1)));
11464	      code = (code == LT) ? NE : EQ;
11465	      break;
11466	    }
11467	}
11468
11469#ifdef CANONICALIZE_COMPARISON
11470  /* If this machine only supports a subset of valid comparisons, see if we
11471     can convert an unsupported one into a supported one.  */
11472  CANONICALIZE_COMPARISON (code, op0, op1);
11473#endif
11474
11475  *pop0 = op0;
11476  *pop1 = op1;
11477
11478  return code;
11479}
11480
11481/* Like jump.c' reversed_comparison_code, but use combine infrastructure for
11482   searching backward.  */
11483static enum rtx_code
11484combine_reversed_comparison_code (exp)
11485     rtx exp;
11486{
11487  enum rtx_code code1 = reversed_comparison_code (exp, NULL);
11488  rtx x;
11489
11490  if (code1 != UNKNOWN
11491      || GET_MODE_CLASS (GET_MODE (XEXP (exp, 0))) != MODE_CC)
11492    return code1;
11493  /* Otherwise try and find where the condition codes were last set and
11494     use that.  */
11495  x = get_last_value (XEXP (exp, 0));
11496  if (!x || GET_CODE (x) != COMPARE)
11497    return UNKNOWN;
11498  return reversed_comparison_code_parts (GET_CODE (exp),
11499					 XEXP (x, 0), XEXP (x, 1), NULL);
11500}
11501/* Return comparison with reversed code of EXP and operands OP0 and OP1.
11502   Return NULL_RTX in case we fail to do the reversal.  */
11503static rtx
11504reversed_comparison (exp, mode, op0, op1)
11505     rtx exp, op0, op1;
11506     enum machine_mode mode;
11507{
11508  enum rtx_code reversed_code = combine_reversed_comparison_code (exp);
11509  if (reversed_code == UNKNOWN)
11510    return NULL_RTX;
11511  else
11512    return gen_binary (reversed_code, mode, op0, op1);
11513}
11514
11515/* Utility function for following routine.  Called when X is part of a value
11516   being stored into reg_last_set_value.  Sets reg_last_set_table_tick
11517   for each register mentioned.  Similar to mention_regs in cse.c  */
11518
11519static void
11520update_table_tick (x)
11521     rtx x;
11522{
11523  enum rtx_code code = GET_CODE (x);
11524  const char *fmt = GET_RTX_FORMAT (code);
11525  int i;
11526
11527  if (code == REG)
11528    {
11529      unsigned int regno = REGNO (x);
11530      unsigned int endregno
11531	= regno + (regno < FIRST_PSEUDO_REGISTER
11532		   ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11533      unsigned int r;
11534
11535      for (r = regno; r < endregno; r++)
11536	reg_last_set_table_tick[r] = label_tick;
11537
11538      return;
11539    }
11540
11541  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11542    /* Note that we can't have an "E" in values stored; see
11543       get_last_value_validate.  */
11544    if (fmt[i] == 'e')
11545      {
11546	/* Check for identical subexpressions.  If x contains
11547	   identical subexpression we only have to traverse one of
11548	   them.  */
11549	if (i == 0
11550	    && (GET_RTX_CLASS (code) == '2'
11551		|| GET_RTX_CLASS (code) == 'c'))
11552	  {
11553	    /* Note that at this point x1 has already been
11554	       processed.  */
11555	    rtx x0 = XEXP (x, 0);
11556	    rtx x1 = XEXP (x, 1);
11557
11558	    /* If x0 and x1 are identical then there is no need to
11559	       process x0.  */
11560	    if (x0 == x1)
11561	      break;
11562
11563	    /* If x0 is identical to a subexpression of x1 then while
11564	       processing x1, x0 has already been processed.  Thus we
11565	       are done with x.  */
11566	    if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
11567		 || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
11568		&& (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11569	      break;
11570
11571	    /* If x1 is identical to a subexpression of x0 then we
11572	       still have to process the rest of x0.  */
11573	    if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
11574		 || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
11575		&& (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11576	      {
11577		update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
11578		break;
11579	      }
11580	  }
11581
11582	update_table_tick (XEXP (x, i));
11583      }
11584}
11585
11586/* Record that REG is set to VALUE in insn INSN.  If VALUE is zero, we
11587   are saying that the register is clobbered and we no longer know its
11588   value.  If INSN is zero, don't update reg_last_set; this is only permitted
11589   with VALUE also zero and is used to invalidate the register.  */
11590
11591static void
11592record_value_for_reg (reg, insn, value)
11593     rtx reg;
11594     rtx insn;
11595     rtx value;
11596{
11597  unsigned int regno = REGNO (reg);
11598  unsigned int endregno
11599    = regno + (regno < FIRST_PSEUDO_REGISTER
11600	       ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
11601  unsigned int i;
11602
11603  /* If VALUE contains REG and we have a previous value for REG, substitute
11604     the previous value.  */
11605  if (value && insn && reg_overlap_mentioned_p (reg, value))
11606    {
11607      rtx tem;
11608
11609      /* Set things up so get_last_value is allowed to see anything set up to
11610	 our insn.  */
11611      subst_low_cuid = INSN_CUID (insn);
11612      tem = get_last_value (reg);
11613
11614      /* If TEM is simply a binary operation with two CLOBBERs as operands,
11615	 it isn't going to be useful and will take a lot of time to process,
11616	 so just use the CLOBBER.  */
11617
11618      if (tem)
11619	{
11620	  if ((GET_RTX_CLASS (GET_CODE (tem)) == '2'
11621	       || GET_RTX_CLASS (GET_CODE (tem)) == 'c')
11622	      && GET_CODE (XEXP (tem, 0)) == CLOBBER
11623	      && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11624	    tem = XEXP (tem, 0);
11625
11626	  value = replace_rtx (copy_rtx (value), reg, tem);
11627	}
11628    }
11629
11630  /* For each register modified, show we don't know its value, that
11631     we don't know about its bitwise content, that its value has been
11632     updated, and that we don't know the location of the death of the
11633     register.  */
11634  for (i = regno; i < endregno; i++)
11635    {
11636      if (insn)
11637	reg_last_set[i] = insn;
11638
11639      reg_last_set_value[i] = 0;
11640      reg_last_set_mode[i] = 0;
11641      reg_last_set_nonzero_bits[i] = 0;
11642      reg_last_set_sign_bit_copies[i] = 0;
11643      reg_last_death[i] = 0;
11644    }
11645
11646  /* Mark registers that are being referenced in this value.  */
11647  if (value)
11648    update_table_tick (value);
11649
11650  /* Now update the status of each register being set.
11651     If someone is using this register in this block, set this register
11652     to invalid since we will get confused between the two lives in this
11653     basic block.  This makes using this register always invalid.  In cse, we
11654     scan the table to invalidate all entries using this register, but this
11655     is too much work for us.  */
11656
11657  for (i = regno; i < endregno; i++)
11658    {
11659      reg_last_set_label[i] = label_tick;
11660      if (value && reg_last_set_table_tick[i] == label_tick)
11661	reg_last_set_invalid[i] = 1;
11662      else
11663	reg_last_set_invalid[i] = 0;
11664    }
11665
11666  /* The value being assigned might refer to X (like in "x++;").  In that
11667     case, we must replace it with (clobber (const_int 0)) to prevent
11668     infinite loops.  */
11669  if (value && ! get_last_value_validate (&value, insn,
11670					  reg_last_set_label[regno], 0))
11671    {
11672      value = copy_rtx (value);
11673      if (! get_last_value_validate (&value, insn,
11674				     reg_last_set_label[regno], 1))
11675	value = 0;
11676    }
11677
11678  /* For the main register being modified, update the value, the mode, the
11679     nonzero bits, and the number of sign bit copies.  */
11680
11681  reg_last_set_value[regno] = value;
11682
11683  if (value)
11684    {
11685      enum machine_mode mode = GET_MODE (reg);
11686      subst_low_cuid = INSN_CUID (insn);
11687      reg_last_set_mode[regno] = mode;
11688      if (GET_MODE_CLASS (mode) == MODE_INT
11689	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11690	mode = nonzero_bits_mode;
11691      reg_last_set_nonzero_bits[regno] = nonzero_bits (value, mode);
11692      reg_last_set_sign_bit_copies[regno]
11693	= num_sign_bit_copies (value, GET_MODE (reg));
11694    }
11695}
11696
11697/* Called via note_stores from record_dead_and_set_regs to handle one
11698   SET or CLOBBER in an insn.  DATA is the instruction in which the
11699   set is occurring.  */
11700
11701static void
11702record_dead_and_set_regs_1 (dest, setter, data)
11703     rtx dest, setter;
11704     void *data;
11705{
11706  rtx record_dead_insn = (rtx) data;
11707
11708  if (GET_CODE (dest) == SUBREG)
11709    dest = SUBREG_REG (dest);
11710
11711  if (GET_CODE (dest) == REG)
11712    {
11713      /* If we are setting the whole register, we know its value.  Otherwise
11714	 show that we don't know the value.  We can handle SUBREG in
11715	 some cases.  */
11716      if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11717	record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11718      else if (GET_CODE (setter) == SET
11719	       && GET_CODE (SET_DEST (setter)) == SUBREG
11720	       && SUBREG_REG (SET_DEST (setter)) == dest
11721	       && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
11722	       && subreg_lowpart_p (SET_DEST (setter)))
11723	record_value_for_reg (dest, record_dead_insn,
11724			      gen_lowpart_for_combine (GET_MODE (dest),
11725						       SET_SRC (setter)));
11726      else
11727	record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11728    }
11729  else if (GET_CODE (dest) == MEM
11730	   /* Ignore pushes, they clobber nothing.  */
11731	   && ! push_operand (dest, GET_MODE (dest)))
11732    mem_last_set = INSN_CUID (record_dead_insn);
11733}
11734
11735/* Update the records of when each REG was most recently set or killed
11736   for the things done by INSN.  This is the last thing done in processing
11737   INSN in the combiner loop.
11738
11739   We update reg_last_set, reg_last_set_value, reg_last_set_mode,
11740   reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
11741   and also the similar information mem_last_set (which insn most recently
11742   modified memory) and last_call_cuid (which insn was the most recent
11743   subroutine call).  */
11744
11745static void
11746record_dead_and_set_regs (insn)
11747     rtx insn;
11748{
11749  rtx link;
11750  unsigned int i;
11751
11752  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11753    {
11754      if (REG_NOTE_KIND (link) == REG_DEAD
11755	  && GET_CODE (XEXP (link, 0)) == REG)
11756	{
11757	  unsigned int regno = REGNO (XEXP (link, 0));
11758	  unsigned int endregno
11759	    = regno + (regno < FIRST_PSEUDO_REGISTER
11760		       ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
11761		       : 1);
11762
11763	  for (i = regno; i < endregno; i++)
11764	    reg_last_death[i] = insn;
11765	}
11766      else if (REG_NOTE_KIND (link) == REG_INC)
11767	record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11768    }
11769
11770  if (GET_CODE (insn) == CALL_INSN)
11771    {
11772      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11773	if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
11774	  {
11775	    reg_last_set_value[i] = 0;
11776	    reg_last_set_mode[i] = 0;
11777	    reg_last_set_nonzero_bits[i] = 0;
11778	    reg_last_set_sign_bit_copies[i] = 0;
11779	    reg_last_death[i] = 0;
11780	  }
11781
11782      last_call_cuid = mem_last_set = INSN_CUID (insn);
11783
11784      /* Don't bother recording what this insn does.  It might set the
11785	 return value register, but we can't combine into a call
11786	 pattern anyway, so there's no point trying (and it may cause
11787	 a crash, if e.g. we wind up asking for last_set_value of a
11788	 SUBREG of the return value register).  */
11789      return;
11790    }
11791
11792  note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11793}
11794
11795/* If a SUBREG has the promoted bit set, it is in fact a property of the
11796   register present in the SUBREG, so for each such SUBREG go back and
11797   adjust nonzero and sign bit information of the registers that are
11798   known to have some zero/sign bits set.
11799
11800   This is needed because when combine blows the SUBREGs away, the
11801   information on zero/sign bits is lost and further combines can be
11802   missed because of that.  */
11803
11804static void
11805record_promoted_value (insn, subreg)
11806     rtx insn;
11807     rtx subreg;
11808{
11809  rtx links, set;
11810  unsigned int regno = REGNO (SUBREG_REG (subreg));
11811  enum machine_mode mode = GET_MODE (subreg);
11812
11813  if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11814    return;
11815
11816  for (links = LOG_LINKS (insn); links;)
11817    {
11818      insn = XEXP (links, 0);
11819      set = single_set (insn);
11820
11821      if (! set || GET_CODE (SET_DEST (set)) != REG
11822	  || REGNO (SET_DEST (set)) != regno
11823	  || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11824	{
11825	  links = XEXP (links, 1);
11826	  continue;
11827	}
11828
11829      if (reg_last_set[regno] == insn)
11830	{
11831	  if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
11832	    reg_last_set_nonzero_bits[regno] &= GET_MODE_MASK (mode);
11833	}
11834
11835      if (GET_CODE (SET_SRC (set)) == REG)
11836	{
11837	  regno = REGNO (SET_SRC (set));
11838	  links = LOG_LINKS (insn);
11839	}
11840      else
11841	break;
11842    }
11843}
11844
11845/* Scan X for promoted SUBREGs.  For each one found,
11846   note what it implies to the registers used in it.  */
11847
11848static void
11849check_promoted_subreg (insn, x)
11850     rtx insn;
11851     rtx x;
11852{
11853  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11854      && GET_CODE (SUBREG_REG (x)) == REG)
11855    record_promoted_value (insn, x);
11856  else
11857    {
11858      const char *format = GET_RTX_FORMAT (GET_CODE (x));
11859      int i, j;
11860
11861      for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
11862	switch (format[i])
11863	  {
11864	  case 'e':
11865	    check_promoted_subreg (insn, XEXP (x, i));
11866	    break;
11867	  case 'V':
11868	  case 'E':
11869	    if (XVEC (x, i) != 0)
11870	      for (j = 0; j < XVECLEN (x, i); j++)
11871		check_promoted_subreg (insn, XVECEXP (x, i, j));
11872	    break;
11873	  }
11874    }
11875}
11876
11877/* Utility routine for the following function.  Verify that all the registers
11878   mentioned in *LOC are valid when *LOC was part of a value set when
11879   label_tick == TICK.  Return 0 if some are not.
11880
11881   If REPLACE is nonzero, replace the invalid reference with
11882   (clobber (const_int 0)) and return 1.  This replacement is useful because
11883   we often can get useful information about the form of a value (e.g., if
11884   it was produced by a shift that always produces -1 or 0) even though
11885   we don't know exactly what registers it was produced from.  */
11886
11887static int
11888get_last_value_validate (loc, insn, tick, replace)
11889     rtx *loc;
11890     rtx insn;
11891     int tick;
11892     int replace;
11893{
11894  rtx x = *loc;
11895  const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
11896  int len = GET_RTX_LENGTH (GET_CODE (x));
11897  int i;
11898
11899  if (GET_CODE (x) == REG)
11900    {
11901      unsigned int regno = REGNO (x);
11902      unsigned int endregno
11903	= regno + (regno < FIRST_PSEUDO_REGISTER
11904		   ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11905      unsigned int j;
11906
11907      for (j = regno; j < endregno; j++)
11908	if (reg_last_set_invalid[j]
11909	    /* If this is a pseudo-register that was only set once and not
11910	       live at the beginning of the function, it is always valid.  */
11911	    || (! (regno >= FIRST_PSEUDO_REGISTER
11912		   && REG_N_SETS (regno) == 1
11913		   && (! REGNO_REG_SET_P
11914		       (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno)))
11915		&& reg_last_set_label[j] > tick))
11916	  {
11917	    if (replace)
11918	      *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11919	    return replace;
11920	  }
11921
11922      return 1;
11923    }
11924  /* If this is a memory reference, make sure that there were
11925     no stores after it that might have clobbered the value.  We don't
11926     have alias info, so we assume any store invalidates it.  */
11927  else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
11928	   && INSN_CUID (insn) <= mem_last_set)
11929    {
11930      if (replace)
11931	*loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11932      return replace;
11933    }
11934
11935  for (i = 0; i < len; i++)
11936    {
11937      if (fmt[i] == 'e')
11938	{
11939	  /* Check for identical subexpressions.  If x contains
11940	     identical subexpression we only have to traverse one of
11941	     them.  */
11942	  if (i == 1
11943	      && (GET_RTX_CLASS (GET_CODE (x)) == '2'
11944		  || GET_RTX_CLASS (GET_CODE (x)) == 'c'))
11945	    {
11946	      /* Note that at this point x0 has already been checked
11947		 and found valid.  */
11948	      rtx x0 = XEXP (x, 0);
11949	      rtx x1 = XEXP (x, 1);
11950
11951	      /* If x0 and x1 are identical then x is also valid.  */
11952	      if (x0 == x1)
11953		return 1;
11954
11955	      /* If x1 is identical to a subexpression of x0 then
11956		 while checking x0, x1 has already been checked.  Thus
11957		 it is valid and so as x.  */
11958	      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
11959		   || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
11960		  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11961		return 1;
11962
11963	      /* If x0 is identical to a subexpression of x1 then x is
11964		 valid iff the rest of x1 is valid.  */
11965	      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
11966		   || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
11967		  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11968		return
11969		  get_last_value_validate (&XEXP (x1,
11970						  x0 == XEXP (x1, 0) ? 1 : 0),
11971					   insn, tick, replace);
11972	    }
11973
11974	  if (get_last_value_validate (&XEXP (x, i), insn, tick,
11975				       replace) == 0)
11976	    return 0;
11977	}
11978      /* Don't bother with these.  They shouldn't occur anyway.  */
11979      else if (fmt[i] == 'E')
11980	return 0;
11981    }
11982
11983  /* If we haven't found a reason for it to be invalid, it is valid.  */
11984  return 1;
11985}
11986
11987/* Get the last value assigned to X, if known.  Some registers
11988   in the value may be replaced with (clobber (const_int 0)) if their value
11989   is known longer known reliably.  */
11990
11991static rtx
11992get_last_value (x)
11993     rtx x;
11994{
11995  unsigned int regno;
11996  rtx value;
11997
11998  /* If this is a non-paradoxical SUBREG, get the value of its operand and
11999     then convert it to the desired mode.  If this is a paradoxical SUBREG,
12000     we cannot predict what values the "extra" bits might have.  */
12001  if (GET_CODE (x) == SUBREG
12002      && subreg_lowpart_p (x)
12003      && (GET_MODE_SIZE (GET_MODE (x))
12004	  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
12005      && (value = get_last_value (SUBREG_REG (x))) != 0)
12006    return gen_lowpart_for_combine (GET_MODE (x), value);
12007
12008  if (GET_CODE (x) != REG)
12009    return 0;
12010
12011  regno = REGNO (x);
12012  value = reg_last_set_value[regno];
12013
12014  /* If we don't have a value, or if it isn't for this basic block and
12015     it's either a hard register, set more than once, or it's a live
12016     at the beginning of the function, return 0.
12017
12018     Because if it's not live at the beginning of the function then the reg
12019     is always set before being used (is never used without being set).
12020     And, if it's set only once, and it's always set before use, then all
12021     uses must have the same last value, even if it's not from this basic
12022     block.  */
12023
12024  if (value == 0
12025      || (reg_last_set_label[regno] != label_tick
12026	  && (regno < FIRST_PSEUDO_REGISTER
12027	      || REG_N_SETS (regno) != 1
12028	      || (REGNO_REG_SET_P
12029		  (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno)))))
12030    return 0;
12031
12032  /* If the value was set in a later insn than the ones we are processing,
12033     we can't use it even if the register was only set once.  */
12034  if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
12035    return 0;
12036
12037  /* If the value has all its registers valid, return it.  */
12038  if (get_last_value_validate (&value, reg_last_set[regno],
12039			       reg_last_set_label[regno], 0))
12040    return value;
12041
12042  /* Otherwise, make a copy and replace any invalid register with
12043     (clobber (const_int 0)).  If that fails for some reason, return 0.  */
12044
12045  value = copy_rtx (value);
12046  if (get_last_value_validate (&value, reg_last_set[regno],
12047			       reg_last_set_label[regno], 1))
12048    return value;
12049
12050  return 0;
12051}
12052
12053/* Return nonzero if expression X refers to a REG or to memory
12054   that is set in an instruction more recent than FROM_CUID.  */
12055
12056static int
12057use_crosses_set_p (x, from_cuid)
12058     rtx x;
12059     int from_cuid;
12060{
12061  const char *fmt;
12062  int i;
12063  enum rtx_code code = GET_CODE (x);
12064
12065  if (code == REG)
12066    {
12067      unsigned int regno = REGNO (x);
12068      unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
12069				 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
12070
12071#ifdef PUSH_ROUNDING
12072      /* Don't allow uses of the stack pointer to be moved,
12073	 because we don't know whether the move crosses a push insn.  */
12074      if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
12075	return 1;
12076#endif
12077      for (; regno < endreg; regno++)
12078	if (reg_last_set[regno]
12079	    && INSN_CUID (reg_last_set[regno]) > from_cuid)
12080	  return 1;
12081      return 0;
12082    }
12083
12084  if (code == MEM && mem_last_set > from_cuid)
12085    return 1;
12086
12087  fmt = GET_RTX_FORMAT (code);
12088
12089  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12090    {
12091      if (fmt[i] == 'E')
12092	{
12093	  int j;
12094	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12095	    if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
12096	      return 1;
12097	}
12098      else if (fmt[i] == 'e'
12099	       && use_crosses_set_p (XEXP (x, i), from_cuid))
12100	return 1;
12101    }
12102  return 0;
12103}
12104
12105/* Define three variables used for communication between the following
12106   routines.  */
12107
12108static unsigned int reg_dead_regno, reg_dead_endregno;
12109static int reg_dead_flag;
12110
12111/* Function called via note_stores from reg_dead_at_p.
12112
12113   If DEST is within [reg_dead_regno, reg_dead_endregno), set
12114   reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET.  */
12115
12116static void
12117reg_dead_at_p_1 (dest, x, data)
12118     rtx dest;
12119     rtx x;
12120     void *data ATTRIBUTE_UNUSED;
12121{
12122  unsigned int regno, endregno;
12123
12124  if (GET_CODE (dest) != REG)
12125    return;
12126
12127  regno = REGNO (dest);
12128  endregno = regno + (regno < FIRST_PSEUDO_REGISTER
12129		      ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
12130
12131  if (reg_dead_endregno > regno && reg_dead_regno < endregno)
12132    reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
12133}
12134
12135/* Return nonzero if REG is known to be dead at INSN.
12136
12137   We scan backwards from INSN.  If we hit a REG_DEAD note or a CLOBBER
12138   referencing REG, it is dead.  If we hit a SET referencing REG, it is
12139   live.  Otherwise, see if it is live or dead at the start of the basic
12140   block we are in.  Hard regs marked as being live in NEWPAT_USED_REGS
12141   must be assumed to be always live.  */
12142
12143static int
12144reg_dead_at_p (reg, insn)
12145     rtx reg;
12146     rtx insn;
12147{
12148  basic_block block;
12149  unsigned int i;
12150
12151  /* Set variables for reg_dead_at_p_1.  */
12152  reg_dead_regno = REGNO (reg);
12153  reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
12154					? HARD_REGNO_NREGS (reg_dead_regno,
12155							    GET_MODE (reg))
12156					: 1);
12157
12158  reg_dead_flag = 0;
12159
12160  /* Check that reg isn't mentioned in NEWPAT_USED_REGS.  */
12161  if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
12162    {
12163      for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12164	if (TEST_HARD_REG_BIT (newpat_used_regs, i))
12165	  return 0;
12166    }
12167
12168  /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
12169     beginning of function.  */
12170  for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
12171       insn = prev_nonnote_insn (insn))
12172    {
12173      note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
12174      if (reg_dead_flag)
12175	return reg_dead_flag == 1 ? 1 : 0;
12176
12177      if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
12178	return 1;
12179    }
12180
12181  /* Get the basic block that we were in.  */
12182  if (insn == 0)
12183    block = ENTRY_BLOCK_PTR->next_bb;
12184  else
12185    {
12186      FOR_EACH_BB (block)
12187	if (insn == block->head)
12188	  break;
12189
12190      if (block == EXIT_BLOCK_PTR)
12191	return 0;
12192    }
12193
12194  for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12195    if (REGNO_REG_SET_P (block->global_live_at_start, i))
12196      return 0;
12197
12198  return 1;
12199}
12200
12201/* Note hard registers in X that are used.  This code is similar to
12202   that in flow.c, but much simpler since we don't care about pseudos.  */
12203
12204static void
12205mark_used_regs_combine (x)
12206     rtx x;
12207{
12208  RTX_CODE code = GET_CODE (x);
12209  unsigned int regno;
12210  int i;
12211
12212  switch (code)
12213    {
12214    case LABEL_REF:
12215    case SYMBOL_REF:
12216    case CONST_INT:
12217    case CONST:
12218    case CONST_DOUBLE:
12219    case CONST_VECTOR:
12220    case PC:
12221    case ADDR_VEC:
12222    case ADDR_DIFF_VEC:
12223    case ASM_INPUT:
12224#ifdef HAVE_cc0
12225    /* CC0 must die in the insn after it is set, so we don't need to take
12226       special note of it here.  */
12227    case CC0:
12228#endif
12229      return;
12230
12231    case CLOBBER:
12232      /* If we are clobbering a MEM, mark any hard registers inside the
12233	 address as used.  */
12234      if (GET_CODE (XEXP (x, 0)) == MEM)
12235	mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
12236      return;
12237
12238    case REG:
12239      regno = REGNO (x);
12240      /* A hard reg in a wide mode may really be multiple registers.
12241	 If so, mark all of them just like the first.  */
12242      if (regno < FIRST_PSEUDO_REGISTER)
12243	{
12244	  unsigned int endregno, r;
12245
12246	  /* None of this applies to the stack, frame or arg pointers.  */
12247	  if (regno == STACK_POINTER_REGNUM
12248#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
12249	      || regno == HARD_FRAME_POINTER_REGNUM
12250#endif
12251#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
12252	      || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
12253#endif
12254	      || regno == FRAME_POINTER_REGNUM)
12255	    return;
12256
12257	  endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12258	  for (r = regno; r < endregno; r++)
12259	    SET_HARD_REG_BIT (newpat_used_regs, r);
12260	}
12261      return;
12262
12263    case SET:
12264      {
12265	/* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
12266	   the address.  */
12267	rtx testreg = SET_DEST (x);
12268
12269	while (GET_CODE (testreg) == SUBREG
12270	       || GET_CODE (testreg) == ZERO_EXTRACT
12271	       || GET_CODE (testreg) == SIGN_EXTRACT
12272	       || GET_CODE (testreg) == STRICT_LOW_PART)
12273	  testreg = XEXP (testreg, 0);
12274
12275	if (GET_CODE (testreg) == MEM)
12276	  mark_used_regs_combine (XEXP (testreg, 0));
12277
12278	mark_used_regs_combine (SET_SRC (x));
12279      }
12280      return;
12281
12282    default:
12283      break;
12284    }
12285
12286  /* Recursively scan the operands of this expression.  */
12287
12288  {
12289    const char *fmt = GET_RTX_FORMAT (code);
12290
12291    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12292      {
12293	if (fmt[i] == 'e')
12294	  mark_used_regs_combine (XEXP (x, i));
12295	else if (fmt[i] == 'E')
12296	  {
12297	    int j;
12298
12299	    for (j = 0; j < XVECLEN (x, i); j++)
12300	      mark_used_regs_combine (XVECEXP (x, i, j));
12301	  }
12302      }
12303  }
12304}
12305
12306/* Remove register number REGNO from the dead registers list of INSN.
12307
12308   Return the note used to record the death, if there was one.  */
12309
12310rtx
12311remove_death (regno, insn)
12312     unsigned int regno;
12313     rtx insn;
12314{
12315  rtx note = find_regno_note (insn, REG_DEAD, regno);
12316
12317  if (note)
12318    {
12319      REG_N_DEATHS (regno)--;
12320      remove_note (insn, note);
12321    }
12322
12323  return note;
12324}
12325
12326/* For each register (hardware or pseudo) used within expression X, if its
12327   death is in an instruction with cuid between FROM_CUID (inclusive) and
12328   TO_INSN (exclusive), put a REG_DEAD note for that register in the
12329   list headed by PNOTES.
12330
12331   That said, don't move registers killed by maybe_kill_insn.
12332
12333   This is done when X is being merged by combination into TO_INSN.  These
12334   notes will then be distributed as needed.  */
12335
12336static void
12337move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
12338     rtx x;
12339     rtx maybe_kill_insn;
12340     int from_cuid;
12341     rtx to_insn;
12342     rtx *pnotes;
12343{
12344  const char *fmt;
12345  int len, i;
12346  enum rtx_code code = GET_CODE (x);
12347
12348  if (code == REG)
12349    {
12350      unsigned int regno = REGNO (x);
12351      rtx where_dead = reg_last_death[regno];
12352      rtx before_dead, after_dead;
12353
12354      /* Don't move the register if it gets killed in between from and to.  */
12355      if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
12356	  && ! reg_referenced_p (x, maybe_kill_insn))
12357	return;
12358
12359      /* WHERE_DEAD could be a USE insn made by combine, so first we
12360	 make sure that we have insns with valid INSN_CUID values.  */
12361      before_dead = where_dead;
12362      while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
12363	before_dead = PREV_INSN (before_dead);
12364
12365      after_dead = where_dead;
12366      while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
12367	after_dead = NEXT_INSN (after_dead);
12368
12369      if (before_dead && after_dead
12370	  && INSN_CUID (before_dead) >= from_cuid
12371	  && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
12372	      || (where_dead != after_dead
12373		  && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
12374	{
12375	  rtx note = remove_death (regno, where_dead);
12376
12377	  /* It is possible for the call above to return 0.  This can occur
12378	     when reg_last_death points to I2 or I1 that we combined with.
12379	     In that case make a new note.
12380
12381	     We must also check for the case where X is a hard register
12382	     and NOTE is a death note for a range of hard registers
12383	     including X.  In that case, we must put REG_DEAD notes for
12384	     the remaining registers in place of NOTE.  */
12385
12386	  if (note != 0 && regno < FIRST_PSEUDO_REGISTER
12387	      && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12388		  > GET_MODE_SIZE (GET_MODE (x))))
12389	    {
12390	      unsigned int deadregno = REGNO (XEXP (note, 0));
12391	      unsigned int deadend
12392		= (deadregno + HARD_REGNO_NREGS (deadregno,
12393						 GET_MODE (XEXP (note, 0))));
12394	      unsigned int ourend
12395		= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12396	      unsigned int i;
12397
12398	      for (i = deadregno; i < deadend; i++)
12399		if (i < regno || i >= ourend)
12400		  REG_NOTES (where_dead)
12401		    = gen_rtx_EXPR_LIST (REG_DEAD,
12402					 regno_reg_rtx[i],
12403					 REG_NOTES (where_dead));
12404	    }
12405
12406	  /* If we didn't find any note, or if we found a REG_DEAD note that
12407	     covers only part of the given reg, and we have a multi-reg hard
12408	     register, then to be safe we must check for REG_DEAD notes
12409	     for each register other than the first.  They could have
12410	     their own REG_DEAD notes lying around.  */
12411	  else if ((note == 0
12412		    || (note != 0
12413			&& (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12414			    < GET_MODE_SIZE (GET_MODE (x)))))
12415		   && regno < FIRST_PSEUDO_REGISTER
12416		   && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
12417	    {
12418	      unsigned int ourend
12419		= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12420	      unsigned int i, offset;
12421	      rtx oldnotes = 0;
12422
12423	      if (note)
12424		offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
12425	      else
12426		offset = 1;
12427
12428	      for (i = regno + offset; i < ourend; i++)
12429		move_deaths (regno_reg_rtx[i],
12430			     maybe_kill_insn, from_cuid, to_insn, &oldnotes);
12431	    }
12432
12433	  if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
12434	    {
12435	      XEXP (note, 1) = *pnotes;
12436	      *pnotes = note;
12437	    }
12438	  else
12439	    *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
12440
12441	  REG_N_DEATHS (regno)++;
12442	}
12443
12444      return;
12445    }
12446
12447  else if (GET_CODE (x) == SET)
12448    {
12449      rtx dest = SET_DEST (x);
12450
12451      move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
12452
12453      /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
12454	 that accesses one word of a multi-word item, some
12455	 piece of everything register in the expression is used by
12456	 this insn, so remove any old death.  */
12457      /* ??? So why do we test for equality of the sizes?  */
12458
12459      if (GET_CODE (dest) == ZERO_EXTRACT
12460	  || GET_CODE (dest) == STRICT_LOW_PART
12461	  || (GET_CODE (dest) == SUBREG
12462	      && (((GET_MODE_SIZE (GET_MODE (dest))
12463		    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
12464		  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
12465		       + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
12466	{
12467	  move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
12468	  return;
12469	}
12470
12471      /* If this is some other SUBREG, we know it replaces the entire
12472	 value, so use that as the destination.  */
12473      if (GET_CODE (dest) == SUBREG)
12474	dest = SUBREG_REG (dest);
12475
12476      /* If this is a MEM, adjust deaths of anything used in the address.
12477	 For a REG (the only other possibility), the entire value is
12478	 being replaced so the old value is not used in this insn.  */
12479
12480      if (GET_CODE (dest) == MEM)
12481	move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
12482		     to_insn, pnotes);
12483      return;
12484    }
12485
12486  else if (GET_CODE (x) == CLOBBER)
12487    return;
12488
12489  len = GET_RTX_LENGTH (code);
12490  fmt = GET_RTX_FORMAT (code);
12491
12492  for (i = 0; i < len; i++)
12493    {
12494      if (fmt[i] == 'E')
12495	{
12496	  int j;
12497	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12498	    move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
12499			 to_insn, pnotes);
12500	}
12501      else if (fmt[i] == 'e')
12502	move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
12503    }
12504}
12505
12506/* Return 1 if X is the target of a bit-field assignment in BODY, the
12507   pattern of an insn.  X must be a REG.  */
12508
12509static int
12510reg_bitfield_target_p (x, body)
12511     rtx x;
12512     rtx body;
12513{
12514  int i;
12515
12516  if (GET_CODE (body) == SET)
12517    {
12518      rtx dest = SET_DEST (body);
12519      rtx target;
12520      unsigned int regno, tregno, endregno, endtregno;
12521
12522      if (GET_CODE (dest) == ZERO_EXTRACT)
12523	target = XEXP (dest, 0);
12524      else if (GET_CODE (dest) == STRICT_LOW_PART)
12525	target = SUBREG_REG (XEXP (dest, 0));
12526      else
12527	return 0;
12528
12529      if (GET_CODE (target) == SUBREG)
12530	target = SUBREG_REG (target);
12531
12532      if (GET_CODE (target) != REG)
12533	return 0;
12534
12535      tregno = REGNO (target), regno = REGNO (x);
12536      if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
12537	return target == x;
12538
12539      endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
12540      endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12541
12542      return endregno > tregno && regno < endtregno;
12543    }
12544
12545  else if (GET_CODE (body) == PARALLEL)
12546    for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
12547      if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
12548	return 1;
12549
12550  return 0;
12551}
12552
12553/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
12554   as appropriate.  I3 and I2 are the insns resulting from the combination
12555   insns including FROM (I2 may be zero).
12556
12557   Each note in the list is either ignored or placed on some insns, depending
12558   on the type of note.  */
12559
12560static void
12561distribute_notes (notes, from_insn, i3, i2)
12562     rtx notes;
12563     rtx from_insn;
12564     rtx i3, i2;
12565{
12566  rtx note, next_note;
12567  rtx tem;
12568
12569  for (note = notes; note; note = next_note)
12570    {
12571      rtx place = 0, place2 = 0;
12572
12573      /* If this NOTE references a pseudo register, ensure it references
12574	 the latest copy of that register.  */
12575      if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
12576	  && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
12577	XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
12578
12579      next_note = XEXP (note, 1);
12580      switch (REG_NOTE_KIND (note))
12581	{
12582	case REG_BR_PROB:
12583	case REG_BR_PRED:
12584	  /* Doesn't matter much where we put this, as long as it's somewhere.
12585	     It is preferable to keep these notes on branches, which is most
12586	     likely to be i3.  */
12587	  place = i3;
12588	  break;
12589
12590	case REG_VTABLE_REF:
12591	  /* ??? Should remain with *a particular* memory load.  Given the
12592	     nature of vtable data, the last insn seems relatively safe.  */
12593	  place = i3;
12594	  break;
12595
12596	case REG_NON_LOCAL_GOTO:
12597	  if (GET_CODE (i3) == JUMP_INSN)
12598	    place = i3;
12599	  else if (i2 && GET_CODE (i2) == JUMP_INSN)
12600	    place = i2;
12601	  else
12602	    abort ();
12603	  break;
12604
12605	case REG_EH_REGION:
12606	  /* These notes must remain with the call or trapping instruction.  */
12607	  if (GET_CODE (i3) == CALL_INSN)
12608	    place = i3;
12609	  else if (i2 && GET_CODE (i2) == CALL_INSN)
12610	    place = i2;
12611	  else if (flag_non_call_exceptions)
12612	    {
12613	      if (may_trap_p (i3))
12614		place = i3;
12615	      else if (i2 && may_trap_p (i2))
12616		place = i2;
12617	      /* ??? Otherwise assume we've combined things such that we
12618		 can now prove that the instructions can't trap.  Drop the
12619		 note in this case.  */
12620	    }
12621	  else
12622	    abort ();
12623	  break;
12624
12625	case REG_ALWAYS_RETURN:
12626	case REG_NORETURN:
12627	case REG_SETJMP:
12628	  /* These notes must remain with the call.  It should not be
12629	     possible for both I2 and I3 to be a call.  */
12630	  if (GET_CODE (i3) == CALL_INSN)
12631	    place = i3;
12632	  else if (i2 && GET_CODE (i2) == CALL_INSN)
12633	    place = i2;
12634	  else
12635	    abort ();
12636	  break;
12637
12638	case REG_UNUSED:
12639	  /* Any clobbers for i3 may still exist, and so we must process
12640	     REG_UNUSED notes from that insn.
12641
12642	     Any clobbers from i2 or i1 can only exist if they were added by
12643	     recog_for_combine.  In that case, recog_for_combine created the
12644	     necessary REG_UNUSED notes.  Trying to keep any original
12645	     REG_UNUSED notes from these insns can cause incorrect output
12646	     if it is for the same register as the original i3 dest.
12647	     In that case, we will notice that the register is set in i3,
12648	     and then add a REG_UNUSED note for the destination of i3, which
12649	     is wrong.  However, it is possible to have REG_UNUSED notes from
12650	     i2 or i1 for register which were both used and clobbered, so
12651	     we keep notes from i2 or i1 if they will turn into REG_DEAD
12652	     notes.  */
12653
12654	  /* If this register is set or clobbered in I3, put the note there
12655	     unless there is one already.  */
12656	  if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
12657	    {
12658	      if (from_insn != i3)
12659		break;
12660
12661	      if (! (GET_CODE (XEXP (note, 0)) == REG
12662		     ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12663		     : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12664		place = i3;
12665	    }
12666	  /* Otherwise, if this register is used by I3, then this register
12667	     now dies here, so we must put a REG_DEAD note here unless there
12668	     is one already.  */
12669	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12670		   && ! (GET_CODE (XEXP (note, 0)) == REG
12671			 ? find_regno_note (i3, REG_DEAD,
12672					    REGNO (XEXP (note, 0)))
12673			 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12674	    {
12675	      PUT_REG_NOTE_KIND (note, REG_DEAD);
12676	      place = i3;
12677	    }
12678	  break;
12679
12680	case REG_EQUAL:
12681	case REG_EQUIV:
12682	case REG_NOALIAS:
12683	  /* These notes say something about results of an insn.  We can
12684	     only support them if they used to be on I3 in which case they
12685	     remain on I3.  Otherwise they are ignored.
12686
12687	     If the note refers to an expression that is not a constant, we
12688	     must also ignore the note since we cannot tell whether the
12689	     equivalence is still true.  It might be possible to do
12690	     slightly better than this (we only have a problem if I2DEST
12691	     or I1DEST is present in the expression), but it doesn't
12692	     seem worth the trouble.  */
12693
12694	  if (from_insn == i3
12695	      && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12696	    place = i3;
12697	  break;
12698
12699	case REG_INC:
12700	case REG_NO_CONFLICT:
12701	  /* These notes say something about how a register is used.  They must
12702	     be present on any use of the register in I2 or I3.  */
12703	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12704	    place = i3;
12705
12706	  if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12707	    {
12708	      if (place)
12709		place2 = i2;
12710	      else
12711		place = i2;
12712	    }
12713	  break;
12714
12715	case REG_LABEL:
12716	  /* This can show up in several ways -- either directly in the
12717	     pattern, or hidden off in the constant pool with (or without?)
12718	     a REG_EQUAL note.  */
12719	  /* ??? Ignore the without-reg_equal-note problem for now.  */
12720	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12721	      || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12722		  && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12723		  && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12724	    place = i3;
12725
12726	  if (i2
12727	      && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12728		  || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12729		      && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12730		      && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12731	    {
12732	      if (place)
12733		place2 = i2;
12734	      else
12735		place = i2;
12736	    }
12737
12738	  /* Don't attach REG_LABEL note to a JUMP_INSN which has
12739	     JUMP_LABEL already.  Instead, decrement LABEL_NUSES.  */
12740	  if (place && GET_CODE (place) == JUMP_INSN && JUMP_LABEL (place))
12741	    {
12742	      if (JUMP_LABEL (place) != XEXP (note, 0))
12743		abort ();
12744	      if (GET_CODE (JUMP_LABEL (place)) == CODE_LABEL)
12745		LABEL_NUSES (JUMP_LABEL (place))--;
12746	      place = 0;
12747	    }
12748	  if (place2 && GET_CODE (place2) == JUMP_INSN && JUMP_LABEL (place2))
12749	    {
12750	      if (JUMP_LABEL (place2) != XEXP (note, 0))
12751		abort ();
12752	      if (GET_CODE (JUMP_LABEL (place2)) == CODE_LABEL)
12753		LABEL_NUSES (JUMP_LABEL (place2))--;
12754	      place2 = 0;
12755	    }
12756	  break;
12757
12758	case REG_NONNEG:
12759	case REG_WAS_0:
12760	  /* These notes say something about the value of a register prior
12761	     to the execution of an insn.  It is too much trouble to see
12762	     if the note is still correct in all situations.  It is better
12763	     to simply delete it.  */
12764	  break;
12765
12766	case REG_RETVAL:
12767	  /* If the insn previously containing this note still exists,
12768	     put it back where it was.  Otherwise move it to the previous
12769	     insn.  Adjust the corresponding REG_LIBCALL note.  */
12770	  if (GET_CODE (from_insn) != NOTE)
12771	    place = from_insn;
12772	  else
12773	    {
12774	      tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
12775	      place = prev_real_insn (from_insn);
12776	      if (tem && place)
12777		XEXP (tem, 0) = place;
12778	      /* If we're deleting the last remaining instruction of a
12779		 libcall sequence, don't add the notes.  */
12780	      else if (XEXP (note, 0) == from_insn)
12781		tem = place = 0;
12782	    }
12783	  break;
12784
12785	case REG_LIBCALL:
12786	  /* This is handled similarly to REG_RETVAL.  */
12787	  if (GET_CODE (from_insn) != NOTE)
12788	    place = from_insn;
12789	  else
12790	    {
12791	      tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
12792	      place = next_real_insn (from_insn);
12793	      if (tem && place)
12794		XEXP (tem, 0) = place;
12795	      /* If we're deleting the last remaining instruction of a
12796		 libcall sequence, don't add the notes.  */
12797	      else if (XEXP (note, 0) == from_insn)
12798		tem = place = 0;
12799	    }
12800	  break;
12801
12802	case REG_DEAD:
12803	  /* If the register is used as an input in I3, it dies there.
12804	     Similarly for I2, if it is nonzero and adjacent to I3.
12805
12806	     If the register is not used as an input in either I3 or I2
12807	     and it is not one of the registers we were supposed to eliminate,
12808	     there are two possibilities.  We might have a non-adjacent I2
12809	     or we might have somehow eliminated an additional register
12810	     from a computation.  For example, we might have had A & B where
12811	     we discover that B will always be zero.  In this case we will
12812	     eliminate the reference to A.
12813
12814	     In both cases, we must search to see if we can find a previous
12815	     use of A and put the death note there.  */
12816
12817	  if (from_insn
12818	      && GET_CODE (from_insn) == CALL_INSN
12819	      && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12820	    place = from_insn;
12821	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12822	    place = i3;
12823	  else if (i2 != 0 && next_nonnote_insn (i2) == i3
12824		   && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12825	    place = i2;
12826
12827	  if (place == 0)
12828	    {
12829	      basic_block bb = this_basic_block;
12830
12831	      for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
12832		{
12833		  if (! INSN_P (tem))
12834		    {
12835		      if (tem == bb->head)
12836			break;
12837		      continue;
12838		    }
12839
12840		  /* If the register is being set at TEM, see if that is all
12841		     TEM is doing.  If so, delete TEM.  Otherwise, make this
12842		     into a REG_UNUSED note instead. Don't delete sets to
12843		     global register vars.  */
12844		  if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
12845		       || !global_regs[REGNO (XEXP (note, 0))])
12846		      && reg_set_p (XEXP (note, 0), PATTERN (tem)))
12847		    {
12848		      rtx set = single_set (tem);
12849		      rtx inner_dest = 0;
12850#ifdef HAVE_cc0
12851		      rtx cc0_setter = NULL_RTX;
12852#endif
12853
12854		      if (set != 0)
12855			for (inner_dest = SET_DEST (set);
12856			     (GET_CODE (inner_dest) == STRICT_LOW_PART
12857			      || GET_CODE (inner_dest) == SUBREG
12858			      || GET_CODE (inner_dest) == ZERO_EXTRACT);
12859			     inner_dest = XEXP (inner_dest, 0))
12860			  ;
12861
12862		      /* Verify that it was the set, and not a clobber that
12863			 modified the register.
12864
12865			 CC0 targets must be careful to maintain setter/user
12866			 pairs.  If we cannot delete the setter due to side
12867			 effects, mark the user with an UNUSED note instead
12868			 of deleting it.  */
12869
12870		      if (set != 0 && ! side_effects_p (SET_SRC (set))
12871			  && rtx_equal_p (XEXP (note, 0), inner_dest)
12872#ifdef HAVE_cc0
12873			  && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12874			      || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12875				  && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12876#endif
12877			  )
12878			{
12879			  /* Move the notes and links of TEM elsewhere.
12880			     This might delete other dead insns recursively.
12881			     First set the pattern to something that won't use
12882			     any register.  */
12883
12884			  PATTERN (tem) = pc_rtx;
12885
12886			  distribute_notes (REG_NOTES (tem), tem, tem,
12887					    NULL_RTX);
12888			  distribute_links (LOG_LINKS (tem));
12889
12890			  PUT_CODE (tem, NOTE);
12891			  NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
12892			  NOTE_SOURCE_FILE (tem) = 0;
12893
12894#ifdef HAVE_cc0
12895			  /* Delete the setter too.  */
12896			  if (cc0_setter)
12897			    {
12898			      PATTERN (cc0_setter) = pc_rtx;
12899
12900			      distribute_notes (REG_NOTES (cc0_setter),
12901						cc0_setter, cc0_setter,
12902						NULL_RTX);
12903			      distribute_links (LOG_LINKS (cc0_setter));
12904
12905			      PUT_CODE (cc0_setter, NOTE);
12906			      NOTE_LINE_NUMBER (cc0_setter)
12907				= NOTE_INSN_DELETED;
12908			      NOTE_SOURCE_FILE (cc0_setter) = 0;
12909			    }
12910#endif
12911			}
12912		      /* If the register is both set and used here, put the
12913			 REG_DEAD note here, but place a REG_UNUSED note
12914			 here too unless there already is one.  */
12915		      else if (reg_referenced_p (XEXP (note, 0),
12916						 PATTERN (tem)))
12917			{
12918			  place = tem;
12919
12920			  if (! find_regno_note (tem, REG_UNUSED,
12921						 REGNO (XEXP (note, 0))))
12922			    REG_NOTES (tem)
12923			      = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0),
12924						   REG_NOTES (tem));
12925			}
12926		      else
12927			{
12928			  PUT_REG_NOTE_KIND (note, REG_UNUSED);
12929
12930			  /*  If there isn't already a REG_UNUSED note, put one
12931			      here.  */
12932			  if (! find_regno_note (tem, REG_UNUSED,
12933						 REGNO (XEXP (note, 0))))
12934			    place = tem;
12935			  break;
12936			}
12937		    }
12938		  else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12939			   || (GET_CODE (tem) == CALL_INSN
12940			       && find_reg_fusage (tem, USE, XEXP (note, 0))))
12941		    {
12942		      place = tem;
12943
12944		      /* If we are doing a 3->2 combination, and we have a
12945			 register which formerly died in i3 and was not used
12946			 by i2, which now no longer dies in i3 and is used in
12947			 i2 but does not die in i2, and place is between i2
12948			 and i3, then we may need to move a link from place to
12949			 i2.  */
12950		      if (i2 && INSN_UID (place) <= max_uid_cuid
12951			  && INSN_CUID (place) > INSN_CUID (i2)
12952			  && from_insn
12953			  && INSN_CUID (from_insn) > INSN_CUID (i2)
12954			  && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12955			{
12956			  rtx links = LOG_LINKS (place);
12957			  LOG_LINKS (place) = 0;
12958			  distribute_links (links);
12959			}
12960		      break;
12961		    }
12962
12963		  if (tem == bb->head)
12964		    break;
12965		}
12966
12967	      /* We haven't found an insn for the death note and it
12968		 is still a REG_DEAD note, but we have hit the beginning
12969		 of the block.  If the existing life info says the reg
12970		 was dead, there's nothing left to do.  Otherwise, we'll
12971		 need to do a global life update after combine.  */
12972	      if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12973		  && REGNO_REG_SET_P (bb->global_live_at_start,
12974				      REGNO (XEXP (note, 0))))
12975		{
12976		  SET_BIT (refresh_blocks, this_basic_block->index);
12977		  need_refresh = 1;
12978		}
12979	    }
12980
12981	  /* If the register is set or already dead at PLACE, we needn't do
12982	     anything with this note if it is still a REG_DEAD note.
12983	     We can here if it is set at all, not if is it totally replace,
12984	     which is what `dead_or_set_p' checks, so also check for it being
12985	     set partially.  */
12986
12987	  if (place && REG_NOTE_KIND (note) == REG_DEAD)
12988	    {
12989	      unsigned int regno = REGNO (XEXP (note, 0));
12990
12991	      /* Similarly, if the instruction on which we want to place
12992		 the note is a noop, we'll need do a global live update
12993		 after we remove them in delete_noop_moves.  */
12994	      if (noop_move_p (place))
12995		{
12996		  SET_BIT (refresh_blocks, this_basic_block->index);
12997		  need_refresh = 1;
12998		}
12999
13000	      if (dead_or_set_p (place, XEXP (note, 0))
13001		  || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
13002		{
13003		  /* Unless the register previously died in PLACE, clear
13004		     reg_last_death.  [I no longer understand why this is
13005		     being done.] */
13006		  if (reg_last_death[regno] != place)
13007		    reg_last_death[regno] = 0;
13008		  place = 0;
13009		}
13010	      else
13011		reg_last_death[regno] = place;
13012
13013	      /* If this is a death note for a hard reg that is occupying
13014		 multiple registers, ensure that we are still using all
13015		 parts of the object.  If we find a piece of the object
13016		 that is unused, we must arrange for an appropriate REG_DEAD
13017		 note to be added for it.  However, we can't just emit a USE
13018		 and tag the note to it, since the register might actually
13019		 be dead; so we recourse, and the recursive call then finds
13020		 the previous insn that used this register.  */
13021
13022	      if (place && regno < FIRST_PSEUDO_REGISTER
13023		  && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
13024		{
13025		  unsigned int endregno
13026		    = regno + HARD_REGNO_NREGS (regno,
13027						GET_MODE (XEXP (note, 0)));
13028		  int all_used = 1;
13029		  unsigned int i;
13030
13031		  for (i = regno; i < endregno; i++)
13032		    if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
13033			 && ! find_regno_fusage (place, USE, i))
13034			|| dead_or_set_regno_p (place, i))
13035		      all_used = 0;
13036
13037		  if (! all_used)
13038		    {
13039		      /* Put only REG_DEAD notes for pieces that are
13040			 not already dead or set.  */
13041
13042		      for (i = regno; i < endregno;
13043			   i += HARD_REGNO_NREGS (i, reg_raw_mode[i]))
13044			{
13045			  rtx piece = regno_reg_rtx[i];
13046			  basic_block bb = this_basic_block;
13047
13048			  if (! dead_or_set_p (place, piece)
13049			      && ! reg_bitfield_target_p (piece,
13050							  PATTERN (place)))
13051			    {
13052			      rtx new_note
13053				= gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
13054
13055			      distribute_notes (new_note, place, place,
13056						NULL_RTX);
13057			    }
13058			  else if (! refers_to_regno_p (i, i + 1,
13059							PATTERN (place), 0)
13060				   && ! find_regno_fusage (place, USE, i))
13061			    for (tem = PREV_INSN (place); ;
13062				 tem = PREV_INSN (tem))
13063			      {
13064				if (! INSN_P (tem))
13065				  {
13066				    if (tem == bb->head)
13067				      {
13068					SET_BIT (refresh_blocks,
13069						 this_basic_block->index);
13070					need_refresh = 1;
13071					break;
13072				      }
13073				    continue;
13074				  }
13075				if (dead_or_set_p (tem, piece)
13076				    || reg_bitfield_target_p (piece,
13077							      PATTERN (tem)))
13078				  {
13079				    REG_NOTES (tem)
13080				      = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
13081							   REG_NOTES (tem));
13082				    break;
13083				  }
13084			      }
13085
13086			}
13087
13088		      place = 0;
13089		    }
13090		}
13091	    }
13092	  break;
13093
13094	default:
13095	  /* Any other notes should not be present at this point in the
13096	     compilation.  */
13097	  abort ();
13098	}
13099
13100      if (place)
13101	{
13102	  XEXP (note, 1) = REG_NOTES (place);
13103	  REG_NOTES (place) = note;
13104	}
13105      else if ((REG_NOTE_KIND (note) == REG_DEAD
13106		|| REG_NOTE_KIND (note) == REG_UNUSED)
13107	       && GET_CODE (XEXP (note, 0)) == REG)
13108	REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
13109
13110      if (place2)
13111	{
13112	  if ((REG_NOTE_KIND (note) == REG_DEAD
13113	       || REG_NOTE_KIND (note) == REG_UNUSED)
13114	      && GET_CODE (XEXP (note, 0)) == REG)
13115	    REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
13116
13117	  REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
13118					       REG_NOTE_KIND (note),
13119					       XEXP (note, 0),
13120					       REG_NOTES (place2));
13121	}
13122    }
13123}
13124
13125/* Similarly to above, distribute the LOG_LINKS that used to be present on
13126   I3, I2, and I1 to new locations.  This is also called to add a link
13127   pointing at I3 when I3's destination is changed.  */
13128
13129static void
13130distribute_links (links)
13131     rtx links;
13132{
13133  rtx link, next_link;
13134
13135  for (link = links; link; link = next_link)
13136    {
13137      rtx place = 0;
13138      rtx insn;
13139      rtx set, reg;
13140
13141      next_link = XEXP (link, 1);
13142
13143      /* If the insn that this link points to is a NOTE or isn't a single
13144	 set, ignore it.  In the latter case, it isn't clear what we
13145	 can do other than ignore the link, since we can't tell which
13146	 register it was for.  Such links wouldn't be used by combine
13147	 anyway.
13148
13149	 It is not possible for the destination of the target of the link to
13150	 have been changed by combine.  The only potential of this is if we
13151	 replace I3, I2, and I1 by I3 and I2.  But in that case the
13152	 destination of I2 also remains unchanged.  */
13153
13154      if (GET_CODE (XEXP (link, 0)) == NOTE
13155	  || (set = single_set (XEXP (link, 0))) == 0)
13156	continue;
13157
13158      reg = SET_DEST (set);
13159      while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
13160	     || GET_CODE (reg) == SIGN_EXTRACT
13161	     || GET_CODE (reg) == STRICT_LOW_PART)
13162	reg = XEXP (reg, 0);
13163
13164      /* A LOG_LINK is defined as being placed on the first insn that uses
13165	 a register and points to the insn that sets the register.  Start
13166	 searching at the next insn after the target of the link and stop
13167	 when we reach a set of the register or the end of the basic block.
13168
13169	 Note that this correctly handles the link that used to point from
13170	 I3 to I2.  Also note that not much searching is typically done here
13171	 since most links don't point very far away.  */
13172
13173      for (insn = NEXT_INSN (XEXP (link, 0));
13174	   (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
13175		     || this_basic_block->next_bb->head != insn));
13176	   insn = NEXT_INSN (insn))
13177	if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
13178	  {
13179	    if (reg_referenced_p (reg, PATTERN (insn)))
13180	      place = insn;
13181	    break;
13182	  }
13183	else if (GET_CODE (insn) == CALL_INSN
13184		 && find_reg_fusage (insn, USE, reg))
13185	  {
13186	    place = insn;
13187	    break;
13188	  }
13189
13190      /* If we found a place to put the link, place it there unless there
13191	 is already a link to the same insn as LINK at that point.  */
13192
13193      if (place)
13194	{
13195	  rtx link2;
13196
13197	  for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
13198	    if (XEXP (link2, 0) == XEXP (link, 0))
13199	      break;
13200
13201	  if (link2 == 0)
13202	    {
13203	      XEXP (link, 1) = LOG_LINKS (place);
13204	      LOG_LINKS (place) = link;
13205
13206	      /* Set added_links_insn to the earliest insn we added a
13207		 link to.  */
13208	      if (added_links_insn == 0
13209		  || INSN_CUID (added_links_insn) > INSN_CUID (place))
13210		added_links_insn = place;
13211	    }
13212	}
13213    }
13214}
13215
13216/* Compute INSN_CUID for INSN, which is an insn made by combine.  */
13217
13218static int
13219insn_cuid (insn)
13220     rtx insn;
13221{
13222  while (insn != 0 && INSN_UID (insn) > max_uid_cuid
13223	 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
13224    insn = NEXT_INSN (insn);
13225
13226  if (INSN_UID (insn) > max_uid_cuid)
13227    abort ();
13228
13229  return INSN_CUID (insn);
13230}
13231
13232void
13233dump_combine_stats (file)
13234     FILE *file;
13235{
13236  fnotice
13237    (file,
13238     ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
13239     combine_attempts, combine_merges, combine_extras, combine_successes);
13240}
13241
13242void
13243dump_combine_total_stats (file)
13244     FILE *file;
13245{
13246  fnotice
13247    (file,
13248     "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
13249     total_attempts, total_merges, total_extras, total_successes);
13250}
13251