combine.c revision 122180
117683Spst/* Optimize by combining instructions for GNU compiler.
217683Spst   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
317683Spst   1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
417683Spst
517683SpstThis file is part of GCC.
617683Spst
717683SpstGCC is free software; you can redistribute it and/or modify it under
817683Spstthe terms of the GNU General Public License as published by the Free
917683SpstSoftware Foundation; either version 2, or (at your option) any later
1017683Spstversion.
1117683Spst
1217683SpstGCC is distributed in the hope that it will be useful, but WITHOUT ANY
1317683SpstWARRANTY; without even the implied warranty of MERCHANTABILITY or
1417683SpstFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1517683Spstfor more details.
1617683Spst
1717683SpstYou should have received a copy of the GNU General Public License
1817683Spstalong with GCC; see the file COPYING.  If not, write to the Free
1917683SpstSoftware Foundation, 59 Temple Place - Suite 330, Boston, MA
2026175Sfenner02111-1307, USA.  */
2117683Spst
2217683Spst/* This module is essentially the "combiner" phase of the U. of Arizona
2317683Spst   Portable Optimizer, but redone to work on our list-structured
2417683Spst   representation for RTL instead of their string representation.
2517683Spst
2626175Sfenner   The LOG_LINKS of each insn identify the most recent assignment
2726175Sfenner   to each REG used in the insn.  It is a list of previous insns,
2875107Sfenner   each of which contains a SET for a REG that is used in this insn
2926175Sfenner   and not used or set in between.  LOG_LINKs never cross basic blocks.
3026175Sfenner   They were set up by the preceding pass (lifetime analysis).
3175107Sfenner
3275107Sfenner   We try to combine each pair of insns joined by a logical link.
3375107Sfenner   We also try to combine triples of insns A, B and C when
3475107Sfenner   C has a link back to B and B has a link back to A.
3517683Spst
3617683Spst   LOG_LINKS does not have links for use of the CC0.  They don't
3717683Spst   need to, because the insn that sets the CC0 is always immediately
3817683Spst   before the insn that tests it.  So we always regard a branch
3917683Spst   insn as having a logical link to the preceding insn.  The same is true
4017683Spst   for an insn explicitly using CC0.
4117683Spst
4217683Spst   We check (with use_crosses_set_p) to avoid combining in such a way
4317683Spst   as to move a computation to a place where its value would be different.
4417683Spst
4517683Spst   Combination is done by mathematically substituting the previous
4617683Spst   insn(s) values for the regs they set into the expressions in
4717683Spst   the later insns that refer to these regs.  If the result is a valid insn
4817683Spst   for our target machine, according to the machine description,
4917683Spst   we install it, delete the earlier insns, and update the data flow
5017683Spst   information (LOG_LINKS and REG_NOTES) for what we did.
5117683Spst
5217683Spst   There are a few exceptions where the dataflow information created by
5317683Spst   flow.c aren't completely updated:
5417683Spst
5517683Spst   - reg_live_length is not updated
5617683Spst   - reg_n_refs is not adjusted in the rare case when a register is
5717683Spst     no longer required in a computation
5817683Spst   - there are extremely rare cases (see distribute_regnotes) when a
5917683Spst     REG_DEAD note is lost
6017683Spst   - a LOG_LINKS entry that refers to an insn with multiple SETs may be
6117683Spst     removed because there is no way to know which register it was
6217683Spst     linking
6317683Spst
6417683Spst   To simplify substitution, we combine only when the earlier insn(s)
6517683Spst   consist of only a single assignment.  To simplify updating afterward,
6617683Spst   we never combine when a subroutine call appears in the middle.
6717683Spst
6817683Spst   Since we do not represent assignments to CC0 explicitly except when that
6917683Spst   is all an insn does, there is no LOG_LINKS entry in an insn that uses
7017683Spst   the condition code for the insn that set the condition code.
7117683Spst   Fortunately, these two insns must be consecutive.
7217683Spst   Therefore, every JUMP_INSN is taken to have an implicit logical link
7317683Spst   to the preceding insn.  This is not quite right, since non-jumps can
7417683Spst   also use the condition code; but in practice such insns would not
7517683Spst   combine anyway.  */
7617683Spst
7717683Spst#include "config.h"
7817683Spst#include "system.h"
7917683Spst#include "rtl.h"
8017683Spst#include "tm_p.h"
8117683Spst#include "flags.h"
8217683Spst#include "regs.h"
8317683Spst#include "hard-reg-set.h"
8417683Spst#include "basic-block.h"
8517683Spst#include "insn-config.h"
8617683Spst#include "function.h"
8717683Spst/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
8817683Spst#include "expr.h"
8917683Spst#include "insn-attr.h"
9017683Spst#include "recog.h"
9117683Spst#include "real.h"
9217683Spst#include "toplev.h"
9317683Spst
9417683Spst/* It is not safe to use ordinary gen_lowpart in combine.
9517683Spst   Use gen_lowpart_for_combine instead.  See comments there.  */
9617683Spst#define gen_lowpart dont_use_gen_lowpart_you_dummy
9717683Spst
9817683Spst/* Number of attempts to combine instructions in this function.  */
9917683Spst
10017683Spststatic int combine_attempts;
10117683Spst
10217683Spst/* Number of attempts that got as far as substitution in this function.  */
10317683Spst
10417683Spststatic int combine_merges;
10517683Spst
10617683Spst/* Number of instructions combined with added SETs in this function.  */
10717683Spst
10817683Spststatic int combine_extras;
10917683Spst
11017683Spst/* Number of instructions combined in this function.  */
11117683Spst
11217683Spststatic int combine_successes;
11375107Sfenner
11417683Spst/* Totals over entire compilation.  */
11517683Spst
11617683Spststatic int total_attempts, total_merges, total_extras, total_successes;
11717683Spst
11817683Spst
11917683Spst/* Vector mapping INSN_UIDs to cuids.
12017683Spst   The cuids are like uids but increase monotonically always.
12117683Spst   Combine always uses cuids so that it can compare them.
12217683Spst   But actually renumbering the uids, which we used to do,
12317683Spst   proves to be a bad idea because it makes it hard to compare
12417683Spst   the dumps produced by earlier passes with those from later passes.  */
12517683Spst
12617683Spststatic int *uid_cuid;
12717683Spststatic int max_uid_cuid;
12817683Spst
12917683Spst/* Get the cuid of an insn.  */
13017683Spst
13117683Spst#define INSN_CUID(INSN) \
13217683Spst(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
13317683Spst
13417683Spst/* In case BITS_PER_WORD == HOST_BITS_PER_WIDE_INT, shifting by
13517683Spst   BITS_PER_WORD would invoke undefined behavior.  Work around it.  */
13617683Spst
13717683Spst#define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \
13817683Spst  (((unsigned HOST_WIDE_INT) (val) << (BITS_PER_WORD - 1)) << 1)
13917683Spst
14017683Spst#define nonzero_bits(X, M) \
14117683Spst  cached_nonzero_bits (X, M, NULL_RTX, VOIDmode, 0)
14217683Spst
14317683Spst#define num_sign_bit_copies(X, M) \
14417683Spst  cached_num_sign_bit_copies (X, M, NULL_RTX, VOIDmode, 0)
14517683Spst
14617683Spst/* Maximum register number, which is the size of the tables below.  */
14717683Spst
14817683Spststatic unsigned int combine_max_regno;
14917683Spst
15017683Spst/* Record last point of death of (hard or pseudo) register n.  */
15117683Spst
15217683Spststatic rtx *reg_last_death;
15317683Spst
15417683Spst/* Record last point of modification of (hard or pseudo) register n.  */
15517683Spst
15617683Spststatic rtx *reg_last_set;
15717683Spst
15817683Spst/* Record the cuid of the last insn that invalidated memory
15917683Spst   (anything that writes memory, and subroutine calls, but not pushes).  */
16017683Spst
16117683Spststatic int mem_last_set;
16217683Spst
16317683Spst/* Record the cuid of the last CALL_INSN
16417683Spst   so we can tell whether a potential combination crosses any calls.  */
16517683Spst
16617683Spststatic int last_call_cuid;
16717683Spst
16817683Spst/* When `subst' is called, this is the insn that is being modified
16917683Spst   (by combining in a previous insn).  The PATTERN of this insn
17017683Spst   is still the old pattern partially modified and it should not be
17117683Spst   looked at, but this may be used to examine the successors of the insn
17217683Spst   to judge whether a simplification is valid.  */
17317683Spst
17417683Spststatic rtx subst_insn;
17517683Spst
17617683Spst/* This is an insn that belongs before subst_insn, but is not currently
17717683Spst   on the insn chain.  */
17817683Spst
17917683Spststatic rtx subst_prev_insn;
18017683Spst
18117683Spst/* This is the lowest CUID that `subst' is currently dealing with.
18217683Spst   get_last_value will not return a value if the register was set at or
18317683Spst   after this CUID.  If not for this mechanism, we could get confused if
18417683Spst   I2 or I1 in try_combine were an insn that used the old value of a register
18575107Sfenner   to obtain a new value.  In that case, we might erroneously get the
18675107Sfenner   new value of the register when we wanted the old one.  */
18717683Spst
18817683Spststatic int subst_low_cuid;
18917683Spst
19017683Spst/* This contains any hard registers that are used in newpat; reg_dead_at_p
19117683Spst   must consider all these registers to be always live.  */
19217683Spst
19317683Spststatic HARD_REG_SET newpat_used_regs;
19417683Spst
19517683Spst/* This is an insn to which a LOG_LINKS entry has been added.  If this
19617683Spst   insn is the earlier than I2 or I3, combine should rescan starting at
19775107Sfenner   that location.  */
19875107Sfenner
19917683Spststatic rtx added_links_insn;
20017683Spst
20117683Spst/* Basic block in which we are performing combines.  */
20217683Spststatic basic_block this_basic_block;
20317683Spst
20417683Spst/* A bitmap indicating which blocks had registers go dead at entry.
20517683Spst   After combine, we'll need to re-do global life analysis with
20617683Spst   those blocks as starting points.  */
20717683Spststatic sbitmap refresh_blocks;
20817683Spststatic int need_refresh;
20917683Spst
21017683Spst/* The next group of arrays allows the recording of the last value assigned
21117683Spst   to (hard or pseudo) register n.  We use this information to see if an
21217683Spst   operation being processed is redundant given a prior operation performed
21317683Spst   on the register.  For example, an `and' with a constant is redundant if
21417683Spst   all the zero bits are already known to be turned off.
21517683Spst
21675107Sfenner   We use an approach similar to that used by cse, but change it in the
21717683Spst   following ways:
21817683Spst
21917683Spst   (1) We do not want to reinitialize at each label.
22017683Spst   (2) It is useful, but not critical, to know the actual value assigned
22117683Spst       to a register.  Often just its form is helpful.
22217683Spst
22317683Spst   Therefore, we maintain the following arrays:
22417683Spst
22517683Spst   reg_last_set_value		the last value assigned
22675107Sfenner   reg_last_set_label		records the value of label_tick when the
22717683Spst				register was assigned
22817683Spst   reg_last_set_table_tick	records the value of label_tick when a
22975107Sfenner				value using the register is assigned
23075107Sfenner   reg_last_set_invalid		set to nonzero when it is not valid
23117683Spst				to use the value of this register in some
23217683Spst				register's value
23317683Spst
23417683Spst   To understand the usage of these tables, it is important to understand
23517683Spst   the distinction between the value in reg_last_set_value being valid
23675107Sfenner   and the register being validly contained in some other expression in the
23775107Sfenner   table.
23817683Spst
23917683Spst   Entry I in reg_last_set_value is valid if it is nonzero, and either
24017683Spst   reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
24175107Sfenner
24275107Sfenner   Register I may validly appear in any expression returned for the value
24317683Spst   of another register if reg_n_sets[i] is 1.  It may also appear in the
24417683Spst   value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
24517683Spst   reg_last_set_invalid[j] is zero.
24617683Spst
24717683Spst   If an expression is found in the table containing a register which may
24817683Spst   not validly appear in an expression, the register is replaced by
24917683Spst   something that won't match, (clobber (const_int 0)).
25017683Spst
25175107Sfenner   reg_last_set_invalid[i] is set nonzero when register I is being assigned
25275107Sfenner   to and reg_last_set_table_tick[i] == label_tick.  */
25317683Spst
25417683Spst/* Record last value assigned to (hard or pseudo) register n.  */
25517683Spst
25617683Spststatic rtx *reg_last_set_value;
25717683Spst
25817683Spst/* Record the value of label_tick when the value for register n is placed in
25917683Spst   reg_last_set_value[n].  */
26017683Spst
26117683Spststatic int *reg_last_set_label;
26217683Spst
26375107Sfenner/* Record the value of label_tick when an expression involving register n
26417683Spst   is placed in reg_last_set_value.  */
26517683Spst
26617683Spststatic int *reg_last_set_table_tick;
26717683Spst
26817683Spst/* Set nonzero if references to register n in expressions should not be
26917683Spst   used.  */
27017683Spst
27117683Spststatic char *reg_last_set_invalid;
27217683Spst
27375107Sfenner/* Incremented for each label.  */
27475107Sfenner
27517683Spststatic int label_tick;
27617683Spst
27717683Spst/* Some registers that are set more than once and used in more than one
27817683Spst   basic block are nevertheless always set in similar ways.  For example,
27917683Spst   a QImode register may be loaded from memory in two places on a machine
28017683Spst   where byte loads zero extend.
28117683Spst
28217683Spst   We record in the following array what we know about the nonzero
28317683Spst   bits of a register, specifically which bits are known to be zero.
28417683Spst
28517683Spst   If an entry is zero, it means that we don't know anything special.  */
28617683Spst
28717683Spststatic unsigned HOST_WIDE_INT *reg_nonzero_bits;
28817683Spst
28917683Spst/* Mode used to compute significance in reg_nonzero_bits.  It is the largest
29075107Sfenner   integer mode that can fit in HOST_BITS_PER_WIDE_INT.  */
29117683Spst
29217683Spststatic enum machine_mode nonzero_bits_mode;
29317683Spst
29417683Spst/* Nonzero if we know that a register has some leading bits that are always
29517683Spst   equal to the sign bit.  */
29617683Spst
29717683Spststatic unsigned char *reg_sign_bit_copies;
29817683Spst
29917683Spst/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
30017683Spst   It is zero while computing them and after combine has completed.  This
30117683Spst   former test prevents propagating values based on previously set values,
30217683Spst   which can be incorrect if a variable is modified in a loop.  */
30317683Spst
30417683Spststatic int nonzero_sign_valid;
30575107Sfenner
30675107Sfenner/* These arrays are maintained in parallel with reg_last_set_value
30717683Spst   and are used to store the mode in which the register was last set,
30817683Spst   the bits that were known to be zero when it was last set, and the
309   number of sign bits copies it was known to have when it was last set.  */
310
311static enum machine_mode *reg_last_set_mode;
312static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
313static char *reg_last_set_sign_bit_copies;
314
315/* Record one modification to rtl structure
316   to be undone by storing old_contents into *where.
317   is_int is 1 if the contents are an int.  */
318
319struct undo
320{
321  struct undo *next;
322  int is_int;
323  union {rtx r; int i;} old_contents;
324  union {rtx *r; int *i;} where;
325};
326
327/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
328   num_undo says how many are currently recorded.
329
330   other_insn is nonzero if we have modified some other insn in the process
331   of working on subst_insn.  It must be verified too.  */
332
333struct undobuf
334{
335  struct undo *undos;
336  struct undo *frees;
337  rtx other_insn;
338};
339
340static struct undobuf undobuf;
341
342/* Number of times the pseudo being substituted for
343   was found and replaced.  */
344
345static int n_occurrences;
346
347static void do_SUBST			PARAMS ((rtx *, rtx));
348static void do_SUBST_INT		PARAMS ((int *, int));
349static void init_reg_last_arrays	PARAMS ((void));
350static void setup_incoming_promotions   PARAMS ((void));
351static void set_nonzero_bits_and_sign_copies  PARAMS ((rtx, rtx, void *));
352static int cant_combine_insn_p	PARAMS ((rtx));
353static int can_combine_p	PARAMS ((rtx, rtx, rtx, rtx, rtx *, rtx *));
354static int sets_function_arg_p	PARAMS ((rtx));
355static int combinable_i3pat	PARAMS ((rtx, rtx *, rtx, rtx, int, rtx *));
356static int contains_muldiv	PARAMS ((rtx));
357static rtx try_combine		PARAMS ((rtx, rtx, rtx, int *));
358static void undo_all		PARAMS ((void));
359static void undo_commit		PARAMS ((void));
360static rtx *find_split_point	PARAMS ((rtx *, rtx));
361static rtx subst		PARAMS ((rtx, rtx, rtx, int, int));
362static rtx combine_simplify_rtx	PARAMS ((rtx, enum machine_mode, int, int));
363static rtx simplify_if_then_else  PARAMS ((rtx));
364static rtx simplify_set		PARAMS ((rtx));
365static rtx simplify_logical	PARAMS ((rtx, int));
366static rtx expand_compound_operation  PARAMS ((rtx));
367static rtx expand_field_assignment  PARAMS ((rtx));
368static rtx make_extraction	PARAMS ((enum machine_mode, rtx, HOST_WIDE_INT,
369					 rtx, unsigned HOST_WIDE_INT, int,
370					 int, int));
371static rtx extract_left_shift	PARAMS ((rtx, int));
372static rtx make_compound_operation  PARAMS ((rtx, enum rtx_code));
373static int get_pos_from_mask	PARAMS ((unsigned HOST_WIDE_INT,
374					 unsigned HOST_WIDE_INT *));
375static rtx force_to_mode	PARAMS ((rtx, enum machine_mode,
376					 unsigned HOST_WIDE_INT, rtx, int));
377static rtx if_then_else_cond	PARAMS ((rtx, rtx *, rtx *));
378static rtx known_cond		PARAMS ((rtx, enum rtx_code, rtx, rtx));
379static int rtx_equal_for_field_assignment_p PARAMS ((rtx, rtx));
380static rtx make_field_assignment  PARAMS ((rtx));
381static rtx apply_distributive_law  PARAMS ((rtx));
382static rtx simplify_and_const_int  PARAMS ((rtx, enum machine_mode, rtx,
383					    unsigned HOST_WIDE_INT));
384static unsigned HOST_WIDE_INT cached_nonzero_bits
385				PARAMS ((rtx, enum machine_mode, rtx,
386					 enum machine_mode,
387					 unsigned HOST_WIDE_INT));
388static unsigned HOST_WIDE_INT nonzero_bits1
389				PARAMS ((rtx, enum machine_mode, rtx,
390					 enum machine_mode,
391					 unsigned HOST_WIDE_INT));
392static unsigned int cached_num_sign_bit_copies
393				PARAMS ((rtx, enum machine_mode, rtx,
394					 enum machine_mode, unsigned int));
395static unsigned int num_sign_bit_copies1
396				PARAMS ((rtx, enum machine_mode, rtx,
397					 enum machine_mode, unsigned int));
398static int merge_outer_ops	PARAMS ((enum rtx_code *, HOST_WIDE_INT *,
399					 enum rtx_code, HOST_WIDE_INT,
400					 enum machine_mode, int *));
401static rtx simplify_shift_const	PARAMS ((rtx, enum rtx_code, enum machine_mode,
402					 rtx, int));
403static int recog_for_combine	PARAMS ((rtx *, rtx, rtx *));
404static rtx gen_lowpart_for_combine  PARAMS ((enum machine_mode, rtx));
405static rtx gen_binary		PARAMS ((enum rtx_code, enum machine_mode,
406					 rtx, rtx));
407static enum rtx_code simplify_comparison  PARAMS ((enum rtx_code, rtx *, rtx *));
408static void update_table_tick	PARAMS ((rtx));
409static void record_value_for_reg  PARAMS ((rtx, rtx, rtx));
410static void check_promoted_subreg PARAMS ((rtx, rtx));
411static void record_dead_and_set_regs_1  PARAMS ((rtx, rtx, void *));
412static void record_dead_and_set_regs  PARAMS ((rtx));
413static int get_last_value_validate  PARAMS ((rtx *, rtx, int, int));
414static rtx get_last_value	PARAMS ((rtx));
415static int use_crosses_set_p	PARAMS ((rtx, int));
416static void reg_dead_at_p_1	PARAMS ((rtx, rtx, void *));
417static int reg_dead_at_p	PARAMS ((rtx, rtx));
418static void move_deaths		PARAMS ((rtx, rtx, int, rtx, rtx *));
419static int reg_bitfield_target_p  PARAMS ((rtx, rtx));
420static void distribute_notes	PARAMS ((rtx, rtx, rtx, rtx, rtx, rtx));
421static void distribute_links	PARAMS ((rtx));
422static void mark_used_regs_combine PARAMS ((rtx));
423static int insn_cuid		PARAMS ((rtx));
424static void record_promoted_value PARAMS ((rtx, rtx));
425static rtx reversed_comparison  PARAMS ((rtx, enum machine_mode, rtx, rtx));
426static enum rtx_code combine_reversed_comparison_code PARAMS ((rtx));
427static void adjust_for_new_dest PARAMS ((rtx));
428
429/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
430   insn.  The substitution can be undone by undo_all.  If INTO is already
431   set to NEWVAL, do not record this change.  Because computing NEWVAL might
432   also call SUBST, we have to compute it before we put anything into
433   the undo table.  */
434
435static void
436do_SUBST (into, newval)
437     rtx *into, newval;
438{
439  struct undo *buf;
440  rtx oldval = *into;
441
442  if (oldval == newval)
443    return;
444
445  /* We'd like to catch as many invalid transformations here as
446     possible.  Unfortunately, there are way too many mode changes
447     that are perfectly valid, so we'd waste too much effort for
448     little gain doing the checks here.  Focus on catching invalid
449     transformations involving integer constants.  */
450  if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
451      && GET_CODE (newval) == CONST_INT)
452    {
453      /* Sanity check that we're replacing oldval with a CONST_INT
454	 that is a valid sign-extension for the original mode.  */
455      if (INTVAL (newval) != trunc_int_for_mode (INTVAL (newval),
456						 GET_MODE (oldval)))
457	abort ();
458
459      /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
460	 CONST_INT is not valid, because after the replacement, the
461	 original mode would be gone.  Unfortunately, we can't tell
462	 when do_SUBST is called to replace the operand thereof, so we
463	 perform this test on oldval instead, checking whether an
464	 invalid replacement took place before we got here.  */
465      if ((GET_CODE (oldval) == SUBREG
466	   && GET_CODE (SUBREG_REG (oldval)) == CONST_INT)
467	  || (GET_CODE (oldval) == ZERO_EXTEND
468	      && GET_CODE (XEXP (oldval, 0)) == CONST_INT))
469	abort ();
470     }
471
472  if (undobuf.frees)
473    buf = undobuf.frees, undobuf.frees = buf->next;
474  else
475    buf = (struct undo *) xmalloc (sizeof (struct undo));
476
477  buf->is_int = 0;
478  buf->where.r = into;
479  buf->old_contents.r = oldval;
480  *into = newval;
481
482  buf->next = undobuf.undos, undobuf.undos = buf;
483}
484
485#define SUBST(INTO, NEWVAL)	do_SUBST(&(INTO), (NEWVAL))
486
487/* Similar to SUBST, but NEWVAL is an int expression.  Note that substitution
488   for the value of a HOST_WIDE_INT value (including CONST_INT) is
489   not safe.  */
490
491static void
492do_SUBST_INT (into, newval)
493     int *into, newval;
494{
495  struct undo *buf;
496  int oldval = *into;
497
498  if (oldval == newval)
499    return;
500
501  if (undobuf.frees)
502    buf = undobuf.frees, undobuf.frees = buf->next;
503  else
504    buf = (struct undo *) xmalloc (sizeof (struct undo));
505
506  buf->is_int = 1;
507  buf->where.i = into;
508  buf->old_contents.i = oldval;
509  *into = newval;
510
511  buf->next = undobuf.undos, undobuf.undos = buf;
512}
513
514#define SUBST_INT(INTO, NEWVAL)  do_SUBST_INT(&(INTO), (NEWVAL))
515
516/* Main entry point for combiner.  F is the first insn of the function.
517   NREGS is the first unused pseudo-reg number.
518
519   Return nonzero if the combiner has turned an indirect jump
520   instruction into a direct jump.  */
521int
522combine_instructions (f, nregs)
523     rtx f;
524     unsigned int nregs;
525{
526  rtx insn, next;
527#ifdef HAVE_cc0
528  rtx prev;
529#endif
530  int i;
531  rtx links, nextlinks;
532
533  int new_direct_jump_p = 0;
534
535  combine_attempts = 0;
536  combine_merges = 0;
537  combine_extras = 0;
538  combine_successes = 0;
539
540  combine_max_regno = nregs;
541
542  reg_nonzero_bits = ((unsigned HOST_WIDE_INT *)
543		      xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT)));
544  reg_sign_bit_copies
545    = (unsigned char *) xcalloc (nregs, sizeof (unsigned char));
546
547  reg_last_death = (rtx *) xmalloc (nregs * sizeof (rtx));
548  reg_last_set = (rtx *) xmalloc (nregs * sizeof (rtx));
549  reg_last_set_value = (rtx *) xmalloc (nregs * sizeof (rtx));
550  reg_last_set_table_tick = (int *) xmalloc (nregs * sizeof (int));
551  reg_last_set_label = (int *) xmalloc (nregs * sizeof (int));
552  reg_last_set_invalid = (char *) xmalloc (nregs * sizeof (char));
553  reg_last_set_mode
554    = (enum machine_mode *) xmalloc (nregs * sizeof (enum machine_mode));
555  reg_last_set_nonzero_bits
556    = (unsigned HOST_WIDE_INT *) xmalloc (nregs * sizeof (HOST_WIDE_INT));
557  reg_last_set_sign_bit_copies
558    = (char *) xmalloc (nregs * sizeof (char));
559
560  init_reg_last_arrays ();
561
562  init_recog_no_volatile ();
563
564  /* Compute maximum uid value so uid_cuid can be allocated.  */
565
566  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
567    if (INSN_UID (insn) > i)
568      i = INSN_UID (insn);
569
570  uid_cuid = (int *) xmalloc ((i + 1) * sizeof (int));
571  max_uid_cuid = i;
572
573  nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
574
575  /* Don't use reg_nonzero_bits when computing it.  This can cause problems
576     when, for example, we have j <<= 1 in a loop.  */
577
578  nonzero_sign_valid = 0;
579
580  /* Compute the mapping from uids to cuids.
581     Cuids are numbers assigned to insns, like uids,
582     except that cuids increase monotonically through the code.
583
584     Scan all SETs and see if we can deduce anything about what
585     bits are known to be zero for some registers and how many copies
586     of the sign bit are known to exist for those registers.
587
588     Also set any known values so that we can use it while searching
589     for what bits are known to be set.  */
590
591  label_tick = 1;
592
593  /* We need to initialize it here, because record_dead_and_set_regs may call
594     get_last_value.  */
595  subst_prev_insn = NULL_RTX;
596
597  setup_incoming_promotions ();
598
599  refresh_blocks = sbitmap_alloc (last_basic_block);
600  sbitmap_zero (refresh_blocks);
601  need_refresh = 0;
602
603  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
604    {
605      uid_cuid[INSN_UID (insn)] = ++i;
606      subst_low_cuid = i;
607      subst_insn = insn;
608
609      if (INSN_P (insn))
610	{
611	  note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
612		       NULL);
613	  record_dead_and_set_regs (insn);
614
615#ifdef AUTO_INC_DEC
616	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
617	    if (REG_NOTE_KIND (links) == REG_INC)
618	      set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
619						NULL);
620#endif
621	}
622
623      if (GET_CODE (insn) == CODE_LABEL)
624	label_tick++;
625    }
626
627  nonzero_sign_valid = 1;
628
629  /* Now scan all the insns in forward order.  */
630
631  label_tick = 1;
632  last_call_cuid = 0;
633  mem_last_set = 0;
634  init_reg_last_arrays ();
635  setup_incoming_promotions ();
636
637  FOR_EACH_BB (this_basic_block)
638    {
639      for (insn = this_basic_block->head;
640           insn != NEXT_INSN (this_basic_block->end);
641	   insn = next ? next : NEXT_INSN (insn))
642	{
643	  next = 0;
644
645	  if (GET_CODE (insn) == CODE_LABEL)
646	    label_tick++;
647
648	  else if (INSN_P (insn))
649	    {
650	      /* See if we know about function return values before this
651		 insn based upon SUBREG flags.  */
652	      check_promoted_subreg (insn, PATTERN (insn));
653
654	      /* Try this insn with each insn it links back to.  */
655
656	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
657		if ((next = try_combine (insn, XEXP (links, 0),
658					 NULL_RTX, &new_direct_jump_p)) != 0)
659		  goto retry;
660
661	      /* Try each sequence of three linked insns ending with this one.  */
662
663	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
664		{
665		  rtx link = XEXP (links, 0);
666
667		  /* If the linked insn has been replaced by a note, then there
668		     is no point in pursuing this chain any further.  */
669		  if (GET_CODE (link) == NOTE)
670		    continue;
671
672		  for (nextlinks = LOG_LINKS (link);
673		       nextlinks;
674		       nextlinks = XEXP (nextlinks, 1))
675		    if ((next = try_combine (insn, link,
676					     XEXP (nextlinks, 0),
677					     &new_direct_jump_p)) != 0)
678		      goto retry;
679		}
680
681#ifdef HAVE_cc0
682	      /* Try to combine a jump insn that uses CC0
683		 with a preceding insn that sets CC0, and maybe with its
684		 logical predecessor as well.
685		 This is how we make decrement-and-branch insns.
686		 We need this special code because data flow connections
687		 via CC0 do not get entered in LOG_LINKS.  */
688
689	      if (GET_CODE (insn) == JUMP_INSN
690		  && (prev = prev_nonnote_insn (insn)) != 0
691		  && GET_CODE (prev) == INSN
692		  && sets_cc0_p (PATTERN (prev)))
693		{
694		  if ((next = try_combine (insn, prev,
695					   NULL_RTX, &new_direct_jump_p)) != 0)
696		    goto retry;
697
698		  for (nextlinks = LOG_LINKS (prev); nextlinks;
699		       nextlinks = XEXP (nextlinks, 1))
700		    if ((next = try_combine (insn, prev,
701					     XEXP (nextlinks, 0),
702					     &new_direct_jump_p)) != 0)
703		      goto retry;
704		}
705
706	      /* Do the same for an insn that explicitly references CC0.  */
707	      if (GET_CODE (insn) == INSN
708		  && (prev = prev_nonnote_insn (insn)) != 0
709		  && GET_CODE (prev) == INSN
710		  && sets_cc0_p (PATTERN (prev))
711		  && GET_CODE (PATTERN (insn)) == SET
712		  && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
713		{
714		  if ((next = try_combine (insn, prev,
715					   NULL_RTX, &new_direct_jump_p)) != 0)
716		    goto retry;
717
718		  for (nextlinks = LOG_LINKS (prev); nextlinks;
719		       nextlinks = XEXP (nextlinks, 1))
720		    if ((next = try_combine (insn, prev,
721					     XEXP (nextlinks, 0),
722					     &new_direct_jump_p)) != 0)
723		      goto retry;
724		}
725
726	      /* Finally, see if any of the insns that this insn links to
727		 explicitly references CC0.  If so, try this insn, that insn,
728		 and its predecessor if it sets CC0.  */
729	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
730		if (GET_CODE (XEXP (links, 0)) == INSN
731		    && GET_CODE (PATTERN (XEXP (links, 0))) == SET
732		    && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
733		    && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
734		    && GET_CODE (prev) == INSN
735		    && sets_cc0_p (PATTERN (prev))
736		    && (next = try_combine (insn, XEXP (links, 0),
737					    prev, &new_direct_jump_p)) != 0)
738		  goto retry;
739#endif
740
741	      /* Try combining an insn with two different insns whose results it
742		 uses.  */
743	      for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
744		for (nextlinks = XEXP (links, 1); nextlinks;
745		     nextlinks = XEXP (nextlinks, 1))
746		  if ((next = try_combine (insn, XEXP (links, 0),
747					   XEXP (nextlinks, 0),
748					   &new_direct_jump_p)) != 0)
749		    goto retry;
750
751	      if (GET_CODE (insn) != NOTE)
752		record_dead_and_set_regs (insn);
753
754	    retry:
755	      ;
756	    }
757	}
758    }
759  clear_bb_flags ();
760
761  EXECUTE_IF_SET_IN_SBITMAP (refresh_blocks, 0, i,
762			     BASIC_BLOCK (i)->flags |= BB_DIRTY);
763  new_direct_jump_p |= purge_all_dead_edges (0);
764  delete_noop_moves (f);
765
766  update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
767				    PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
768				    | PROP_KILL_DEAD_CODE);
769
770  /* Clean up.  */
771  sbitmap_free (refresh_blocks);
772  free (reg_nonzero_bits);
773  free (reg_sign_bit_copies);
774  free (reg_last_death);
775  free (reg_last_set);
776  free (reg_last_set_value);
777  free (reg_last_set_table_tick);
778  free (reg_last_set_label);
779  free (reg_last_set_invalid);
780  free (reg_last_set_mode);
781  free (reg_last_set_nonzero_bits);
782  free (reg_last_set_sign_bit_copies);
783  free (uid_cuid);
784
785  {
786    struct undo *undo, *next;
787    for (undo = undobuf.frees; undo; undo = next)
788      {
789	next = undo->next;
790	free (undo);
791      }
792    undobuf.frees = 0;
793  }
794
795  total_attempts += combine_attempts;
796  total_merges += combine_merges;
797  total_extras += combine_extras;
798  total_successes += combine_successes;
799
800  nonzero_sign_valid = 0;
801
802  /* Make recognizer allow volatile MEMs again.  */
803  init_recog ();
804
805  return new_direct_jump_p;
806}
807
808/* Wipe the reg_last_xxx arrays in preparation for another pass.  */
809
810static void
811init_reg_last_arrays ()
812{
813  unsigned int nregs = combine_max_regno;
814
815  memset ((char *) reg_last_death, 0, nregs * sizeof (rtx));
816  memset ((char *) reg_last_set, 0, nregs * sizeof (rtx));
817  memset ((char *) reg_last_set_value, 0, nregs * sizeof (rtx));
818  memset ((char *) reg_last_set_table_tick, 0, nregs * sizeof (int));
819  memset ((char *) reg_last_set_label, 0, nregs * sizeof (int));
820  memset (reg_last_set_invalid, 0, nregs * sizeof (char));
821  memset ((char *) reg_last_set_mode, 0, nregs * sizeof (enum machine_mode));
822  memset ((char *) reg_last_set_nonzero_bits, 0, nregs * sizeof (HOST_WIDE_INT));
823  memset (reg_last_set_sign_bit_copies, 0, nregs * sizeof (char));
824}
825
826/* Set up any promoted values for incoming argument registers.  */
827
828static void
829setup_incoming_promotions ()
830{
831#ifdef PROMOTE_FUNCTION_ARGS
832  unsigned int regno;
833  rtx reg;
834  enum machine_mode mode;
835  int unsignedp;
836  rtx first = get_insns ();
837
838#ifndef OUTGOING_REGNO
839#define OUTGOING_REGNO(N) N
840#endif
841  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
842    /* Check whether this register can hold an incoming pointer
843       argument.  FUNCTION_ARG_REGNO_P tests outgoing register
844       numbers, so translate if necessary due to register windows.  */
845    if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
846	&& (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
847      {
848	record_value_for_reg
849	  (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
850				       : SIGN_EXTEND),
851				      GET_MODE (reg),
852				      gen_rtx_CLOBBER (mode, const0_rtx)));
853      }
854#endif
855}
856
857/* Called via note_stores.  If X is a pseudo that is narrower than
858   HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
859
860   If we are setting only a portion of X and we can't figure out what
861   portion, assume all bits will be used since we don't know what will
862   be happening.
863
864   Similarly, set how many bits of X are known to be copies of the sign bit
865   at all locations in the function.  This is the smallest number implied
866   by any set of X.  */
867
868static void
869set_nonzero_bits_and_sign_copies (x, set, data)
870     rtx x;
871     rtx set;
872     void *data ATTRIBUTE_UNUSED;
873{
874  unsigned int num;
875
876  if (GET_CODE (x) == REG
877      && REGNO (x) >= FIRST_PSEUDO_REGISTER
878      /* If this register is undefined at the start of the file, we can't
879	 say what its contents were.  */
880      && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, REGNO (x))
881      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
882    {
883      if (set == 0 || GET_CODE (set) == CLOBBER)
884	{
885	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
886	  reg_sign_bit_copies[REGNO (x)] = 1;
887	  return;
888	}
889
890      /* If this is a complex assignment, see if we can convert it into a
891	 simple assignment.  */
892      set = expand_field_assignment (set);
893
894      /* If this is a simple assignment, or we have a paradoxical SUBREG,
895	 set what we know about X.  */
896
897      if (SET_DEST (set) == x
898	  || (GET_CODE (SET_DEST (set)) == SUBREG
899	      && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
900		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
901	      && SUBREG_REG (SET_DEST (set)) == x))
902	{
903	  rtx src = SET_SRC (set);
904
905#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
906	  /* If X is narrower than a word and SRC is a non-negative
907	     constant that would appear negative in the mode of X,
908	     sign-extend it for use in reg_nonzero_bits because some
909	     machines (maybe most) will actually do the sign-extension
910	     and this is the conservative approach.
911
912	     ??? For 2.5, try to tighten up the MD files in this regard
913	     instead of this kludge.  */
914
915	  if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
916	      && GET_CODE (src) == CONST_INT
917	      && INTVAL (src) > 0
918	      && 0 != (INTVAL (src)
919		       & ((HOST_WIDE_INT) 1
920			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
921	    src = GEN_INT (INTVAL (src)
922			   | ((HOST_WIDE_INT) (-1)
923			      << GET_MODE_BITSIZE (GET_MODE (x))));
924#endif
925
926	  /* Don't call nonzero_bits if it cannot change anything.  */
927	  if (reg_nonzero_bits[REGNO (x)] != ~(unsigned HOST_WIDE_INT) 0)
928	    reg_nonzero_bits[REGNO (x)]
929	      |= nonzero_bits (src, nonzero_bits_mode);
930	  num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
931	  if (reg_sign_bit_copies[REGNO (x)] == 0
932	      || reg_sign_bit_copies[REGNO (x)] > num)
933	    reg_sign_bit_copies[REGNO (x)] = num;
934	}
935      else
936	{
937	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
938	  reg_sign_bit_copies[REGNO (x)] = 1;
939	}
940    }
941}
942
943/* See if INSN can be combined into I3.  PRED and SUCC are optionally
944   insns that were previously combined into I3 or that will be combined
945   into the merger of INSN and I3.
946
947   Return 0 if the combination is not allowed for any reason.
948
949   If the combination is allowed, *PDEST will be set to the single
950   destination of INSN and *PSRC to the single source, and this function
951   will return 1.  */
952
953static int
954can_combine_p (insn, i3, pred, succ, pdest, psrc)
955     rtx insn;
956     rtx i3;
957     rtx pred ATTRIBUTE_UNUSED;
958     rtx succ;
959     rtx *pdest, *psrc;
960{
961  int i;
962  rtx set = 0, src, dest;
963  rtx p;
964#ifdef AUTO_INC_DEC
965  rtx link;
966#endif
967  int all_adjacent = (succ ? (next_active_insn (insn) == succ
968			      && next_active_insn (succ) == i3)
969		      : next_active_insn (insn) == i3);
970
971  /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
972     or a PARALLEL consisting of such a SET and CLOBBERs.
973
974     If INSN has CLOBBER parallel parts, ignore them for our processing.
975     By definition, these happen during the execution of the insn.  When it
976     is merged with another insn, all bets are off.  If they are, in fact,
977     needed and aren't also supplied in I3, they may be added by
978     recog_for_combine.  Otherwise, it won't match.
979
980     We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
981     note.
982
983     Get the source and destination of INSN.  If more than one, can't
984     combine.  */
985
986  if (GET_CODE (PATTERN (insn)) == SET)
987    set = PATTERN (insn);
988  else if (GET_CODE (PATTERN (insn)) == PARALLEL
989	   && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
990    {
991      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
992	{
993	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
994
995	  switch (GET_CODE (elt))
996	    {
997	    /* This is important to combine floating point insns
998	       for the SH4 port.  */
999	    case USE:
1000	      /* Combining an isolated USE doesn't make sense.
1001		 We depend here on combinable_i3pat to reject them.  */
1002	      /* The code below this loop only verifies that the inputs of
1003		 the SET in INSN do not change.  We call reg_set_between_p
1004		 to verify that the REG in the USE does not change between
1005		 I3 and INSN.
1006		 If the USE in INSN was for a pseudo register, the matching
1007		 insn pattern will likely match any register; combining this
1008		 with any other USE would only be safe if we knew that the
1009		 used registers have identical values, or if there was
1010		 something to tell them apart, e.g. different modes.  For
1011		 now, we forgo such complicated tests and simply disallow
1012		 combining of USES of pseudo registers with any other USE.  */
1013	      if (GET_CODE (XEXP (elt, 0)) == REG
1014		  && GET_CODE (PATTERN (i3)) == PARALLEL)
1015		{
1016		  rtx i3pat = PATTERN (i3);
1017		  int i = XVECLEN (i3pat, 0) - 1;
1018		  unsigned int regno = REGNO (XEXP (elt, 0));
1019
1020		  do
1021		    {
1022		      rtx i3elt = XVECEXP (i3pat, 0, i);
1023
1024		      if (GET_CODE (i3elt) == USE
1025			  && GET_CODE (XEXP (i3elt, 0)) == REG
1026			  && (REGNO (XEXP (i3elt, 0)) == regno
1027			      ? reg_set_between_p (XEXP (elt, 0),
1028						   PREV_INSN (insn), i3)
1029			      : regno >= FIRST_PSEUDO_REGISTER))
1030			return 0;
1031		    }
1032		  while (--i >= 0);
1033		}
1034	      break;
1035
1036	      /* We can ignore CLOBBERs.  */
1037	    case CLOBBER:
1038	      break;
1039
1040	    case SET:
1041	      /* Ignore SETs whose result isn't used but not those that
1042		 have side-effects.  */
1043	      if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1044		  && ! side_effects_p (elt))
1045		break;
1046
1047	      /* If we have already found a SET, this is a second one and
1048		 so we cannot combine with this insn.  */
1049	      if (set)
1050		return 0;
1051
1052	      set = elt;
1053	      break;
1054
1055	    default:
1056	      /* Anything else means we can't combine.  */
1057	      return 0;
1058	    }
1059	}
1060
1061      if (set == 0
1062	  /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1063	     so don't do anything with it.  */
1064	  || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1065	return 0;
1066    }
1067  else
1068    return 0;
1069
1070  if (set == 0)
1071    return 0;
1072
1073  set = expand_field_assignment (set);
1074  src = SET_SRC (set), dest = SET_DEST (set);
1075
1076  /* Don't eliminate a store in the stack pointer.  */
1077  if (dest == stack_pointer_rtx
1078      /* If we couldn't eliminate a field assignment, we can't combine.  */
1079      || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
1080      /* Don't combine with an insn that sets a register to itself if it has
1081	 a REG_EQUAL note.  This may be part of a REG_NO_CONFLICT sequence.  */
1082      || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1083      /* Can't merge an ASM_OPERANDS.  */
1084      || GET_CODE (src) == ASM_OPERANDS
1085      /* Can't merge a function call.  */
1086      || GET_CODE (src) == CALL
1087      /* Don't eliminate a function call argument.  */
1088      || (GET_CODE (i3) == CALL_INSN
1089	  && (find_reg_fusage (i3, USE, dest)
1090	      || (GET_CODE (dest) == REG
1091		  && REGNO (dest) < FIRST_PSEUDO_REGISTER
1092		  && global_regs[REGNO (dest)])))
1093      /* Don't substitute into an incremented register.  */
1094      || FIND_REG_INC_NOTE (i3, dest)
1095      || (succ && FIND_REG_INC_NOTE (succ, dest))
1096#if 0
1097      /* Don't combine the end of a libcall into anything.  */
1098      /* ??? This gives worse code, and appears to be unnecessary, since no
1099	 pass after flow uses REG_LIBCALL/REG_RETVAL notes.  Local-alloc does
1100	 use REG_RETVAL notes for noconflict blocks, but other code here
1101	 makes sure that those insns don't disappear.  */
1102      || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1103#endif
1104      /* Make sure that DEST is not used after SUCC but before I3.  */
1105      || (succ && ! all_adjacent
1106	  && reg_used_between_p (dest, succ, i3))
1107      /* Make sure that the value that is to be substituted for the register
1108	 does not use any registers whose values alter in between.  However,
1109	 If the insns are adjacent, a use can't cross a set even though we
1110	 think it might (this can happen for a sequence of insns each setting
1111	 the same destination; reg_last_set of that register might point to
1112	 a NOTE).  If INSN has a REG_EQUIV note, the register is always
1113	 equivalent to the memory so the substitution is valid even if there
1114	 are intervening stores.  Also, don't move a volatile asm or
1115	 UNSPEC_VOLATILE across any other insns.  */
1116      || (! all_adjacent
1117	  && (((GET_CODE (src) != MEM
1118		|| ! find_reg_note (insn, REG_EQUIV, src))
1119	       && use_crosses_set_p (src, INSN_CUID (insn)))
1120	      || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1121	      || GET_CODE (src) == UNSPEC_VOLATILE))
1122      /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1123	 better register allocation by not doing the combine.  */
1124      || find_reg_note (i3, REG_NO_CONFLICT, dest)
1125      || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1126      /* Don't combine across a CALL_INSN, because that would possibly
1127	 change whether the life span of some REGs crosses calls or not,
1128	 and it is a pain to update that information.
1129	 Exception: if source is a constant, moving it later can't hurt.
1130	 Accept that special case, because it helps -fforce-addr a lot.  */
1131      || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1132    return 0;
1133
1134  /* DEST must either be a REG or CC0.  */
1135  if (GET_CODE (dest) == REG)
1136    {
1137      /* If register alignment is being enforced for multi-word items in all
1138	 cases except for parameters, it is possible to have a register copy
1139	 insn referencing a hard register that is not allowed to contain the
1140	 mode being copied and which would not be valid as an operand of most
1141	 insns.  Eliminate this problem by not combining with such an insn.
1142
1143	 Also, on some machines we don't want to extend the life of a hard
1144	 register.  */
1145
1146      if (GET_CODE (src) == REG
1147	  && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1148	       && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1149	      /* Don't extend the life of a hard register unless it is
1150		 user variable (if we have few registers) or it can't
1151		 fit into the desired register (meaning something special
1152		 is going on).
1153		 Also avoid substituting a return register into I3, because
1154		 reload can't handle a conflict with constraints of other
1155		 inputs.  */
1156	      || (REGNO (src) < FIRST_PSEUDO_REGISTER
1157		  && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1158	return 0;
1159    }
1160  else if (GET_CODE (dest) != CC0)
1161    return 0;
1162
1163  /* Don't substitute for a register intended as a clobberable operand.
1164     Similarly, don't substitute an expression containing a register that
1165     will be clobbered in I3.  */
1166  if (GET_CODE (PATTERN (i3)) == PARALLEL)
1167    for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1168      if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
1169	  && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1170				       src)
1171	      || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
1172	return 0;
1173
1174  /* If INSN contains anything volatile, or is an `asm' (whether volatile
1175     or not), reject, unless nothing volatile comes between it and I3 */
1176
1177  if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1178    {
1179      /* Make sure succ doesn't contain a volatile reference.  */
1180      if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1181        return 0;
1182
1183      for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1184        if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1185	  return 0;
1186    }
1187
1188  /* If INSN is an asm, and DEST is a hard register, reject, since it has
1189     to be an explicit register variable, and was chosen for a reason.  */
1190
1191  if (GET_CODE (src) == ASM_OPERANDS
1192      && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1193    return 0;
1194
1195  /* If there are any volatile insns between INSN and I3, reject, because
1196     they might affect machine state.  */
1197
1198  for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1199    if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1200      return 0;
1201
1202  /* If INSN or I2 contains an autoincrement or autodecrement,
1203     make sure that register is not used between there and I3,
1204     and not already used in I3 either.
1205     Also insist that I3 not be a jump; if it were one
1206     and the incremented register were spilled, we would lose.  */
1207
1208#ifdef AUTO_INC_DEC
1209  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1210    if (REG_NOTE_KIND (link) == REG_INC
1211	&& (GET_CODE (i3) == JUMP_INSN
1212	    || reg_used_between_p (XEXP (link, 0), insn, i3)
1213	    || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1214      return 0;
1215#endif
1216
1217#ifdef HAVE_cc0
1218  /* Don't combine an insn that follows a CC0-setting insn.
1219     An insn that uses CC0 must not be separated from the one that sets it.
1220     We do, however, allow I2 to follow a CC0-setting insn if that insn
1221     is passed as I1; in that case it will be deleted also.
1222     We also allow combining in this case if all the insns are adjacent
1223     because that would leave the two CC0 insns adjacent as well.
1224     It would be more logical to test whether CC0 occurs inside I1 or I2,
1225     but that would be much slower, and this ought to be equivalent.  */
1226
1227  p = prev_nonnote_insn (insn);
1228  if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1229      && ! all_adjacent)
1230    return 0;
1231#endif
1232
1233  /* If we get here, we have passed all the tests and the combination is
1234     to be allowed.  */
1235
1236  *pdest = dest;
1237  *psrc = src;
1238
1239  return 1;
1240}
1241
1242/* Check if PAT is an insn - or a part of it - used to set up an
1243   argument for a function in a hard register.  */
1244
1245static int
1246sets_function_arg_p (pat)
1247     rtx pat;
1248{
1249  int i;
1250  rtx inner_dest;
1251
1252  switch (GET_CODE (pat))
1253    {
1254    case INSN:
1255      return sets_function_arg_p (PATTERN (pat));
1256
1257    case PARALLEL:
1258      for (i = XVECLEN (pat, 0); --i >= 0;)
1259	if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1260	  return 1;
1261
1262      break;
1263
1264    case SET:
1265      inner_dest = SET_DEST (pat);
1266      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1267	     || GET_CODE (inner_dest) == SUBREG
1268	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
1269	inner_dest = XEXP (inner_dest, 0);
1270
1271      return (GET_CODE (inner_dest) == REG
1272	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1273	      && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1274
1275    default:
1276      break;
1277    }
1278
1279  return 0;
1280}
1281
1282/* LOC is the location within I3 that contains its pattern or the component
1283   of a PARALLEL of the pattern.  We validate that it is valid for combining.
1284
1285   One problem is if I3 modifies its output, as opposed to replacing it
1286   entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1287   so would produce an insn that is not equivalent to the original insns.
1288
1289   Consider:
1290
1291         (set (reg:DI 101) (reg:DI 100))
1292	 (set (subreg:SI (reg:DI 101) 0) <foo>)
1293
1294   This is NOT equivalent to:
1295
1296         (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1297		    (set (reg:DI 101) (reg:DI 100))])
1298
1299   Not only does this modify 100 (in which case it might still be valid
1300   if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1301
1302   We can also run into a problem if I2 sets a register that I1
1303   uses and I1 gets directly substituted into I3 (not via I2).  In that
1304   case, we would be getting the wrong value of I2DEST into I3, so we
1305   must reject the combination.  This case occurs when I2 and I1 both
1306   feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1307   If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
1308   of a SET must prevent combination from occurring.
1309
1310   Before doing the above check, we first try to expand a field assignment
1311   into a set of logical operations.
1312
1313   If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
1314   we place a register that is both set and used within I3.  If more than one
1315   such register is detected, we fail.
1316
1317   Return 1 if the combination is valid, zero otherwise.  */
1318
1319static int
1320combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1321     rtx i3;
1322     rtx *loc;
1323     rtx i2dest;
1324     rtx i1dest;
1325     int i1_not_in_src;
1326     rtx *pi3dest_killed;
1327{
1328  rtx x = *loc;
1329
1330  if (GET_CODE (x) == SET)
1331    {
1332      rtx set = expand_field_assignment (x);
1333      rtx dest = SET_DEST (set);
1334      rtx src = SET_SRC (set);
1335      rtx inner_dest = dest;
1336
1337#if 0
1338      rtx inner_src = src;
1339#endif
1340
1341      SUBST (*loc, set);
1342
1343      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1344	     || GET_CODE (inner_dest) == SUBREG
1345	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
1346	inner_dest = XEXP (inner_dest, 0);
1347
1348  /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1349     was added.  */
1350#if 0
1351      while (GET_CODE (inner_src) == STRICT_LOW_PART
1352	     || GET_CODE (inner_src) == SUBREG
1353	     || GET_CODE (inner_src) == ZERO_EXTRACT)
1354	inner_src = XEXP (inner_src, 0);
1355
1356      /* If it is better that two different modes keep two different pseudos,
1357	 avoid combining them.  This avoids producing the following pattern
1358	 on a 386:
1359	  (set (subreg:SI (reg/v:QI 21) 0)
1360	       (lshiftrt:SI (reg/v:SI 20)
1361	           (const_int 24)))
1362	 If that were made, reload could not handle the pair of
1363	 reg 20/21, since it would try to get any GENERAL_REGS
1364	 but some of them don't handle QImode.  */
1365
1366      if (rtx_equal_p (inner_src, i2dest)
1367	  && GET_CODE (inner_dest) == REG
1368	  && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1369	return 0;
1370#endif
1371
1372      /* Check for the case where I3 modifies its output, as
1373	 discussed above.  */
1374      if ((inner_dest != dest
1375	   && (reg_overlap_mentioned_p (i2dest, inner_dest)
1376	       || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1377
1378	  /* This is the same test done in can_combine_p except we can't test
1379	     all_adjacent; we don't have to, since this instruction will stay
1380	     in place, thus we are not considering increasing the lifetime of
1381	     INNER_DEST.
1382
1383	     Also, if this insn sets a function argument, combining it with
1384	     something that might need a spill could clobber a previous
1385	     function argument; the all_adjacent test in can_combine_p also
1386	     checks this; here, we do a more specific test for this case.  */
1387
1388	  || (GET_CODE (inner_dest) == REG
1389	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1390	      && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1391					GET_MODE (inner_dest))))
1392	  || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1393	return 0;
1394
1395      /* If DEST is used in I3, it is being killed in this insn,
1396	 so record that for later.
1397	 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1398	 STACK_POINTER_REGNUM, since these are always considered to be
1399	 live.  Similarly for ARG_POINTER_REGNUM if it is fixed.  */
1400      if (pi3dest_killed && GET_CODE (dest) == REG
1401	  && reg_referenced_p (dest, PATTERN (i3))
1402	  && REGNO (dest) != FRAME_POINTER_REGNUM
1403#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1404	  && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1405#endif
1406#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1407	  && (REGNO (dest) != ARG_POINTER_REGNUM
1408	      || ! fixed_regs [REGNO (dest)])
1409#endif
1410	  && REGNO (dest) != STACK_POINTER_REGNUM)
1411	{
1412	  if (*pi3dest_killed)
1413	    return 0;
1414
1415	  *pi3dest_killed = dest;
1416	}
1417    }
1418
1419  else if (GET_CODE (x) == PARALLEL)
1420    {
1421      int i;
1422
1423      for (i = 0; i < XVECLEN (x, 0); i++)
1424	if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1425				i1_not_in_src, pi3dest_killed))
1426	  return 0;
1427    }
1428
1429  return 1;
1430}
1431
1432/* Return 1 if X is an arithmetic expression that contains a multiplication
1433   and division.  We don't count multiplications by powers of two here.  */
1434
1435static int
1436contains_muldiv (x)
1437     rtx x;
1438{
1439  switch (GET_CODE (x))
1440    {
1441    case MOD:  case DIV:  case UMOD:  case UDIV:
1442      return 1;
1443
1444    case MULT:
1445      return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1446		&& exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1447    default:
1448      switch (GET_RTX_CLASS (GET_CODE (x)))
1449	{
1450	case 'c':  case '<':  case '2':
1451	  return contains_muldiv (XEXP (x, 0))
1452	    || contains_muldiv (XEXP (x, 1));
1453
1454	case '1':
1455	  return contains_muldiv (XEXP (x, 0));
1456
1457	default:
1458	  return 0;
1459	}
1460    }
1461}
1462
1463/* Determine whether INSN can be used in a combination.  Return nonzero if
1464   not.  This is used in try_combine to detect early some cases where we
1465   can't perform combinations.  */
1466
1467static int
1468cant_combine_insn_p (insn)
1469     rtx insn;
1470{
1471  rtx set;
1472  rtx src, dest;
1473
1474  /* If this isn't really an insn, we can't do anything.
1475     This can occur when flow deletes an insn that it has merged into an
1476     auto-increment address.  */
1477  if (! INSN_P (insn))
1478    return 1;
1479
1480  /* Never combine loads and stores involving hard regs.  The register
1481     allocator can usually handle such reg-reg moves by tying.  If we allow
1482     the combiner to make substitutions of hard regs, we risk aborting in
1483     reload on machines that have SMALL_REGISTER_CLASSES.
1484     As an exception, we allow combinations involving fixed regs; these are
1485     not available to the register allocator so there's no risk involved.  */
1486
1487  set = single_set (insn);
1488  if (! set)
1489    return 0;
1490  src = SET_SRC (set);
1491  dest = SET_DEST (set);
1492  if (GET_CODE (src) == SUBREG)
1493    src = SUBREG_REG (src);
1494  if (GET_CODE (dest) == SUBREG)
1495    dest = SUBREG_REG (dest);
1496  if (REG_P (src) && REG_P (dest)
1497      && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1498	   && ! fixed_regs[REGNO (src)])
1499	  || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1500	      && ! fixed_regs[REGNO (dest)])))
1501    return 1;
1502
1503  return 0;
1504}
1505
1506/* Adjust INSN after we made a change to its destination.
1507
1508   Changing the destination can invalidate notes that say something about
1509   the results of the insn and a LOG_LINK pointing to the insn.  */
1510
1511static void
1512adjust_for_new_dest (insn)
1513     rtx insn;
1514{
1515  rtx *loc;
1516
1517  /* For notes, be conservative and simply remove them.  */
1518  loc = &REG_NOTES (insn);
1519  while (*loc)
1520    {
1521      enum reg_note kind = REG_NOTE_KIND (*loc);
1522      if (kind == REG_EQUAL || kind == REG_EQUIV)
1523	*loc = XEXP (*loc, 1);
1524      else
1525	loc = &XEXP (*loc, 1);
1526    }
1527
1528  /* The new insn will have a destination that was previously the destination
1529     of an insn just above it.  Call distribute_links to make a LOG_LINK from
1530     the next use of that destination.  */
1531  distribute_links (gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX));
1532}
1533
1534/* Try to combine the insns I1 and I2 into I3.
1535   Here I1 and I2 appear earlier than I3.
1536   I1 can be zero; then we combine just I2 into I3.
1537
1538   If we are combining three insns and the resulting insn is not recognized,
1539   try splitting it into two insns.  If that happens, I2 and I3 are retained
1540   and I1 is pseudo-deleted by turning it into a NOTE.  Otherwise, I1 and I2
1541   are pseudo-deleted.
1542
1543   Return 0 if the combination does not work.  Then nothing is changed.
1544   If we did the combination, return the insn at which combine should
1545   resume scanning.
1546
1547   Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
1548   new direct jump instruction.  */
1549
1550static rtx
1551try_combine (i3, i2, i1, new_direct_jump_p)
1552     rtx i3, i2, i1;
1553     int *new_direct_jump_p;
1554{
1555  /* New patterns for I3 and I2, respectively.  */
1556  rtx newpat, newi2pat = 0;
1557  int substed_i2 = 0, substed_i1 = 0;
1558  /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead.  */
1559  int added_sets_1, added_sets_2;
1560  /* Total number of SETs to put into I3.  */
1561  int total_sets;
1562  /* Nonzero is I2's body now appears in I3.  */
1563  int i2_is_used;
1564  /* INSN_CODEs for new I3, new I2, and user of condition code.  */
1565  int insn_code_number, i2_code_number = 0, other_code_number = 0;
1566  /* Contains I3 if the destination of I3 is used in its source, which means
1567     that the old life of I3 is being killed.  If that usage is placed into
1568     I2 and not in I3, a REG_DEAD note must be made.  */
1569  rtx i3dest_killed = 0;
1570  /* SET_DEST and SET_SRC of I2 and I1.  */
1571  rtx i2dest, i2src, i1dest = 0, i1src = 0;
1572  /* PATTERN (I2), or a copy of it in certain cases.  */
1573  rtx i2pat;
1574  /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC.  */
1575  int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1576  int i1_feeds_i3 = 0;
1577  /* Notes that must be added to REG_NOTES in I3 and I2.  */
1578  rtx new_i3_notes, new_i2_notes;
1579  /* Notes that we substituted I3 into I2 instead of the normal case.  */
1580  int i3_subst_into_i2 = 0;
1581  /* Notes that I1, I2 or I3 is a MULT operation.  */
1582  int have_mult = 0;
1583
1584  int maxreg;
1585  rtx temp;
1586  rtx link;
1587  int i;
1588
1589  /* Exit early if one of the insns involved can't be used for
1590     combinations.  */
1591  if (cant_combine_insn_p (i3)
1592      || cant_combine_insn_p (i2)
1593      || (i1 && cant_combine_insn_p (i1))
1594      /* We also can't do anything if I3 has a
1595	 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1596	 libcall.  */
1597#if 0
1598      /* ??? This gives worse code, and appears to be unnecessary, since no
1599	 pass after flow uses REG_LIBCALL/REG_RETVAL notes.  */
1600      || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1601#endif
1602      )
1603    return 0;
1604
1605  combine_attempts++;
1606  undobuf.other_insn = 0;
1607
1608  /* Reset the hard register usage information.  */
1609  CLEAR_HARD_REG_SET (newpat_used_regs);
1610
1611  /* If I1 and I2 both feed I3, they can be in any order.  To simplify the
1612     code below, set I1 to be the earlier of the two insns.  */
1613  if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1614    temp = i1, i1 = i2, i2 = temp;
1615
1616  added_links_insn = 0;
1617
1618  /* First check for one important special-case that the code below will
1619     not handle.  Namely, the case where I1 is zero, I2 is a PARALLEL
1620     and I3 is a SET whose SET_SRC is a SET_DEST in I2.  In that case,
1621     we may be able to replace that destination with the destination of I3.
1622     This occurs in the common code where we compute both a quotient and
1623     remainder into a structure, in which case we want to do the computation
1624     directly into the structure to avoid register-register copies.
1625
1626     Note that this case handles both multiple sets in I2 and also
1627     cases where I2 has a number of CLOBBER or PARALLELs.
1628
1629     We make very conservative checks below and only try to handle the
1630     most common cases of this.  For example, we only handle the case
1631     where I2 and I3 are adjacent to avoid making difficult register
1632     usage tests.  */
1633
1634  if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1635      && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1636      && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1637      && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1638      && GET_CODE (PATTERN (i2)) == PARALLEL
1639      && ! side_effects_p (SET_DEST (PATTERN (i3)))
1640      /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1641	 below would need to check what is inside (and reg_overlap_mentioned_p
1642	 doesn't support those codes anyway).  Don't allow those destinations;
1643	 the resulting insn isn't likely to be recognized anyway.  */
1644      && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1645      && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1646      && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1647				    SET_DEST (PATTERN (i3)))
1648      && next_real_insn (i2) == i3)
1649    {
1650      rtx p2 = PATTERN (i2);
1651
1652      /* Make sure that the destination of I3,
1653	 which we are going to substitute into one output of I2,
1654	 is not used within another output of I2.  We must avoid making this:
1655	 (parallel [(set (mem (reg 69)) ...)
1656		    (set (reg 69) ...)])
1657	 which is not well-defined as to order of actions.
1658	 (Besides, reload can't handle output reloads for this.)
1659
1660	 The problem can also happen if the dest of I3 is a memory ref,
1661	 if another dest in I2 is an indirect memory ref.  */
1662      for (i = 0; i < XVECLEN (p2, 0); i++)
1663	if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1664	     || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1665	    && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1666					SET_DEST (XVECEXP (p2, 0, i))))
1667	  break;
1668
1669      if (i == XVECLEN (p2, 0))
1670	for (i = 0; i < XVECLEN (p2, 0); i++)
1671	  if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1672	       || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1673	      && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1674	    {
1675	      combine_merges++;
1676
1677	      subst_insn = i3;
1678	      subst_low_cuid = INSN_CUID (i2);
1679
1680	      added_sets_2 = added_sets_1 = 0;
1681	      i2dest = SET_SRC (PATTERN (i3));
1682
1683	      /* Replace the dest in I2 with our dest and make the resulting
1684		 insn the new pattern for I3.  Then skip to where we
1685		 validate the pattern.  Everything was set up above.  */
1686	      SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1687		     SET_DEST (PATTERN (i3)));
1688
1689	      newpat = p2;
1690	      i3_subst_into_i2 = 1;
1691	      goto validate_replacement;
1692	    }
1693    }
1694
1695  /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1696     one of those words to another constant, merge them by making a new
1697     constant.  */
1698  if (i1 == 0
1699      && (temp = single_set (i2)) != 0
1700      && (GET_CODE (SET_SRC (temp)) == CONST_INT
1701	  || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1702      && GET_CODE (SET_DEST (temp)) == REG
1703      && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1704      && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1705      && GET_CODE (PATTERN (i3)) == SET
1706      && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1707      && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1708      && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1709      && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1710      && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1711    {
1712      HOST_WIDE_INT lo, hi;
1713
1714      if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1715	lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1716      else
1717	{
1718	  lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1719	  hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1720	}
1721
1722      if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
1723	{
1724	  /* We don't handle the case of the target word being wider
1725	     than a host wide int.  */
1726	  if (HOST_BITS_PER_WIDE_INT < BITS_PER_WORD)
1727	    abort ();
1728
1729	  lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
1730	  lo |= (INTVAL (SET_SRC (PATTERN (i3)))
1731		 & (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1732	}
1733      else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1734	hi = INTVAL (SET_SRC (PATTERN (i3)));
1735      else if (HOST_BITS_PER_WIDE_INT >= 2 * BITS_PER_WORD)
1736	{
1737	  int sign = -(int) ((unsigned HOST_WIDE_INT) lo
1738			     >> (HOST_BITS_PER_WIDE_INT - 1));
1739
1740	  lo &= ~ (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1741		   (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1742	  lo |= (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1743		 (INTVAL (SET_SRC (PATTERN (i3)))));
1744	  if (hi == sign)
1745	    hi = lo < 0 ? -1 : 0;
1746	}
1747      else
1748	/* We don't handle the case of the higher word not fitting
1749	   entirely in either hi or lo.  */
1750	abort ();
1751
1752      combine_merges++;
1753      subst_insn = i3;
1754      subst_low_cuid = INSN_CUID (i2);
1755      added_sets_2 = added_sets_1 = 0;
1756      i2dest = SET_DEST (temp);
1757
1758      SUBST (SET_SRC (temp),
1759	     immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1760
1761      newpat = PATTERN (i2);
1762      goto validate_replacement;
1763    }
1764
1765#ifndef HAVE_cc0
1766  /* If we have no I1 and I2 looks like:
1767	(parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1768		   (set Y OP)])
1769     make up a dummy I1 that is
1770	(set Y OP)
1771     and change I2 to be
1772        (set (reg:CC X) (compare:CC Y (const_int 0)))
1773
1774     (We can ignore any trailing CLOBBERs.)
1775
1776     This undoes a previous combination and allows us to match a branch-and-
1777     decrement insn.  */
1778
1779  if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1780      && XVECLEN (PATTERN (i2), 0) >= 2
1781      && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1782      && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1783	  == MODE_CC)
1784      && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1785      && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1786      && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1787      && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1788      && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1789		      SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1790    {
1791      for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1792	if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1793	  break;
1794
1795      if (i == 1)
1796	{
1797	  /* We make I1 with the same INSN_UID as I2.  This gives it
1798	     the same INSN_CUID for value tracking.  Our fake I1 will
1799	     never appear in the insn stream so giving it the same INSN_UID
1800	     as I2 will not cause a problem.  */
1801
1802	  subst_prev_insn = i1
1803	    = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1804			    BLOCK_FOR_INSN (i2), INSN_SCOPE (i2),
1805			    XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1806			    NULL_RTX);
1807
1808	  SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1809	  SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1810		 SET_DEST (PATTERN (i1)));
1811	}
1812    }
1813#endif
1814
1815  /* Verify that I2 and I1 are valid for combining.  */
1816  if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1817      || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1818    {
1819      undo_all ();
1820      return 0;
1821    }
1822
1823  /* Record whether I2DEST is used in I2SRC and similarly for the other
1824     cases.  Knowing this will help in register status updating below.  */
1825  i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1826  i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1827  i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1828
1829  /* See if I1 directly feeds into I3.  It does if I1DEST is not used
1830     in I2SRC.  */
1831  i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1832
1833  /* Ensure that I3's pattern can be the destination of combines.  */
1834  if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1835			  i1 && i2dest_in_i1src && i1_feeds_i3,
1836			  &i3dest_killed))
1837    {
1838      undo_all ();
1839      return 0;
1840    }
1841
1842  /* See if any of the insns is a MULT operation.  Unless one is, we will
1843     reject a combination that is, since it must be slower.  Be conservative
1844     here.  */
1845  if (GET_CODE (i2src) == MULT
1846      || (i1 != 0 && GET_CODE (i1src) == MULT)
1847      || (GET_CODE (PATTERN (i3)) == SET
1848	  && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1849    have_mult = 1;
1850
1851  /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1852     We used to do this EXCEPT in one case: I3 has a post-inc in an
1853     output operand.  However, that exception can give rise to insns like
1854	mov r3,(r3)+
1855     which is a famous insn on the PDP-11 where the value of r3 used as the
1856     source was model-dependent.  Avoid this sort of thing.  */
1857
1858#if 0
1859  if (!(GET_CODE (PATTERN (i3)) == SET
1860	&& GET_CODE (SET_SRC (PATTERN (i3))) == REG
1861	&& GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1862	&& (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1863	    || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1864    /* It's not the exception.  */
1865#endif
1866#ifdef AUTO_INC_DEC
1867    for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1868      if (REG_NOTE_KIND (link) == REG_INC
1869	  && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1870	      || (i1 != 0
1871		  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1872	{
1873	  undo_all ();
1874	  return 0;
1875	}
1876#endif
1877
1878  /* See if the SETs in I1 or I2 need to be kept around in the merged
1879     instruction: whenever the value set there is still needed past I3.
1880     For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1881
1882     For the SET in I1, we have two cases:  If I1 and I2 independently
1883     feed into I3, the set in I1 needs to be kept around if I1DEST dies
1884     or is set in I3.  Otherwise (if I1 feeds I2 which feeds I3), the set
1885     in I1 needs to be kept around unless I1DEST dies or is set in either
1886     I2 or I3.  We can distinguish these cases by seeing if I2SRC mentions
1887     I1DEST.  If so, we know I1 feeds into I2.  */
1888
1889  added_sets_2 = ! dead_or_set_p (i3, i2dest);
1890
1891  added_sets_1
1892    = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1893	       : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1894
1895  /* If the set in I2 needs to be kept around, we must make a copy of
1896     PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1897     PATTERN (I2), we are only substituting for the original I1DEST, not into
1898     an already-substituted copy.  This also prevents making self-referential
1899     rtx.  If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1900     I2DEST.  */
1901
1902  i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1903	   ? gen_rtx_SET (VOIDmode, i2dest, i2src)
1904	   : PATTERN (i2));
1905
1906  if (added_sets_2)
1907    i2pat = copy_rtx (i2pat);
1908
1909  combine_merges++;
1910
1911  /* Substitute in the latest insn for the regs set by the earlier ones.  */
1912
1913  maxreg = max_reg_num ();
1914
1915  subst_insn = i3;
1916
1917  /* It is possible that the source of I2 or I1 may be performing an
1918     unneeded operation, such as a ZERO_EXTEND of something that is known
1919     to have the high part zero.  Handle that case by letting subst look at
1920     the innermost one of them.
1921
1922     Another way to do this would be to have a function that tries to
1923     simplify a single insn instead of merging two or more insns.  We don't
1924     do this because of the potential of infinite loops and because
1925     of the potential extra memory required.  However, doing it the way
1926     we are is a bit of a kludge and doesn't catch all cases.
1927
1928     But only do this if -fexpensive-optimizations since it slows things down
1929     and doesn't usually win.  */
1930
1931  if (flag_expensive_optimizations)
1932    {
1933      /* Pass pc_rtx so no substitutions are done, just simplifications.
1934	 The cases that we are interested in here do not involve the few
1935	 cases were is_replaced is checked.  */
1936      if (i1)
1937	{
1938	  subst_low_cuid = INSN_CUID (i1);
1939	  i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1940	}
1941      else
1942	{
1943	  subst_low_cuid = INSN_CUID (i2);
1944	  i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1945	}
1946    }
1947
1948#ifndef HAVE_cc0
1949  /* Many machines that don't use CC0 have insns that can both perform an
1950     arithmetic operation and set the condition code.  These operations will
1951     be represented as a PARALLEL with the first element of the vector
1952     being a COMPARE of an arithmetic operation with the constant zero.
1953     The second element of the vector will set some pseudo to the result
1954     of the same arithmetic operation.  If we simplify the COMPARE, we won't
1955     match such a pattern and so will generate an extra insn.   Here we test
1956     for this case, where both the comparison and the operation result are
1957     needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1958     I2SRC.  Later we will make the PARALLEL that contains I2.  */
1959
1960  if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1961      && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1962      && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1963      && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1964    {
1965#ifdef EXTRA_CC_MODES
1966      rtx *cc_use;
1967      enum machine_mode compare_mode;
1968#endif
1969
1970      newpat = PATTERN (i3);
1971      SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1972
1973      i2_is_used = 1;
1974
1975#ifdef EXTRA_CC_MODES
1976      /* See if a COMPARE with the operand we substituted in should be done
1977	 with the mode that is currently being used.  If not, do the same
1978	 processing we do in `subst' for a SET; namely, if the destination
1979	 is used only once, try to replace it with a register of the proper
1980	 mode and also replace the COMPARE.  */
1981      if (undobuf.other_insn == 0
1982	  && (cc_use = find_single_use (SET_DEST (newpat), i3,
1983					&undobuf.other_insn))
1984	  && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1985					      i2src, const0_rtx))
1986	      != GET_MODE (SET_DEST (newpat))))
1987	{
1988	  unsigned int regno = REGNO (SET_DEST (newpat));
1989	  rtx new_dest = gen_rtx_REG (compare_mode, regno);
1990
1991	  if (regno < FIRST_PSEUDO_REGISTER
1992	      || (REG_N_SETS (regno) == 1 && ! added_sets_2
1993		  && ! REG_USERVAR_P (SET_DEST (newpat))))
1994	    {
1995	      if (regno >= FIRST_PSEUDO_REGISTER)
1996		SUBST (regno_reg_rtx[regno], new_dest);
1997
1998	      SUBST (SET_DEST (newpat), new_dest);
1999	      SUBST (XEXP (*cc_use, 0), new_dest);
2000	      SUBST (SET_SRC (newpat),
2001		     gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
2002	    }
2003	  else
2004	    undobuf.other_insn = 0;
2005	}
2006#endif
2007    }
2008  else
2009#endif
2010    {
2011      n_occurrences = 0;		/* `subst' counts here */
2012
2013      /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
2014	 need to make a unique copy of I2SRC each time we substitute it
2015	 to avoid self-referential rtl.  */
2016
2017      subst_low_cuid = INSN_CUID (i2);
2018      newpat = subst (PATTERN (i3), i2dest, i2src, 0,
2019		      ! i1_feeds_i3 && i1dest_in_i1src);
2020      substed_i2 = 1;
2021
2022      /* Record whether i2's body now appears within i3's body.  */
2023      i2_is_used = n_occurrences;
2024    }
2025
2026  /* If we already got a failure, don't try to do more.  Otherwise,
2027     try to substitute in I1 if we have it.  */
2028
2029  if (i1 && GET_CODE (newpat) != CLOBBER)
2030    {
2031      /* Before we can do this substitution, we must redo the test done
2032	 above (see detailed comments there) that ensures  that I1DEST
2033	 isn't mentioned in any SETs in NEWPAT that are field assignments.  */
2034
2035      if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
2036			      0, (rtx*) 0))
2037	{
2038	  undo_all ();
2039	  return 0;
2040	}
2041
2042      n_occurrences = 0;
2043      subst_low_cuid = INSN_CUID (i1);
2044      newpat = subst (newpat, i1dest, i1src, 0, 0);
2045      substed_i1 = 1;
2046    }
2047
2048  /* Fail if an autoincrement side-effect has been duplicated.  Be careful
2049     to count all the ways that I2SRC and I1SRC can be used.  */
2050  if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
2051       && i2_is_used + added_sets_2 > 1)
2052      || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2053	  && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
2054	      > 1))
2055      /* Fail if we tried to make a new register (we used to abort, but there's
2056	 really no reason to).  */
2057      || max_reg_num () != maxreg
2058      /* Fail if we couldn't do something and have a CLOBBER.  */
2059      || GET_CODE (newpat) == CLOBBER
2060      /* Fail if this new pattern is a MULT and we didn't have one before
2061	 at the outer level.  */
2062      || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
2063	  && ! have_mult))
2064    {
2065      undo_all ();
2066      return 0;
2067    }
2068
2069  /* If the actions of the earlier insns must be kept
2070     in addition to substituting them into the latest one,
2071     we must make a new PARALLEL for the latest insn
2072     to hold additional the SETs.  */
2073
2074  if (added_sets_1 || added_sets_2)
2075    {
2076      combine_extras++;
2077
2078      if (GET_CODE (newpat) == PARALLEL)
2079	{
2080	  rtvec old = XVEC (newpat, 0);
2081	  total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
2082	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2083	  memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
2084		  sizeof (old->elem[0]) * old->num_elem);
2085	}
2086      else
2087	{
2088	  rtx old = newpat;
2089	  total_sets = 1 + added_sets_1 + added_sets_2;
2090	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2091	  XVECEXP (newpat, 0, 0) = old;
2092	}
2093
2094      if (added_sets_1)
2095	XVECEXP (newpat, 0, --total_sets)
2096	  = (GET_CODE (PATTERN (i1)) == PARALLEL
2097	     ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
2098
2099      if (added_sets_2)
2100	{
2101	  /* If there is no I1, use I2's body as is.  We used to also not do
2102	     the subst call below if I2 was substituted into I3,
2103	     but that could lose a simplification.  */
2104	  if (i1 == 0)
2105	    XVECEXP (newpat, 0, --total_sets) = i2pat;
2106	  else
2107	    /* See comment where i2pat is assigned.  */
2108	    XVECEXP (newpat, 0, --total_sets)
2109	      = subst (i2pat, i1dest, i1src, 0, 0);
2110	}
2111    }
2112
2113  /* We come here when we are replacing a destination in I2 with the
2114     destination of I3.  */
2115 validate_replacement:
2116
2117  /* Note which hard regs this insn has as inputs.  */
2118  mark_used_regs_combine (newpat);
2119
2120  /* Is the result of combination a valid instruction?  */
2121  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2122
2123  /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2124     the second SET's destination is a register that is unused.  In that case,
2125     we just need the first SET.   This can occur when simplifying a divmod
2126     insn.  We *must* test for this case here because the code below that
2127     splits two independent SETs doesn't handle this case correctly when it
2128     updates the register status.  Also check the case where the first
2129     SET's destination is unused.  That would not cause incorrect code, but
2130     does cause an unneeded insn to remain.  */
2131
2132  if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2133      && XVECLEN (newpat, 0) == 2
2134      && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2135      && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2136      && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
2137      && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
2138      && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
2139      && asm_noperands (newpat) < 0)
2140    {
2141      newpat = XVECEXP (newpat, 0, 0);
2142      insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2143    }
2144
2145  else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2146	   && XVECLEN (newpat, 0) == 2
2147	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2148	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2149	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
2150	   && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
2151	   && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
2152	   && asm_noperands (newpat) < 0)
2153    {
2154      newpat = XVECEXP (newpat, 0, 1);
2155      insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2156
2157      if (insn_code_number >= 0)
2158	{
2159	  /* If we will be able to accept this, we have made a change to the
2160	     destination of I3.  This requires us to do a few adjustments.  */
2161	  PATTERN (i3) = newpat;
2162	  adjust_for_new_dest (i3);
2163	}
2164    }
2165
2166  /* If we were combining three insns and the result is a simple SET
2167     with no ASM_OPERANDS that wasn't recognized, try to split it into two
2168     insns.  There are two ways to do this.  It can be split using a
2169     machine-specific method (like when you have an addition of a large
2170     constant) or by combine in the function find_split_point.  */
2171
2172  if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2173      && asm_noperands (newpat) < 0)
2174    {
2175      rtx m_split, *split;
2176      rtx ni2dest = i2dest;
2177
2178      /* See if the MD file can split NEWPAT.  If it can't, see if letting it
2179	 use I2DEST as a scratch register will help.  In the latter case,
2180	 convert I2DEST to the mode of the source of NEWPAT if we can.  */
2181
2182      m_split = split_insns (newpat, i3);
2183
2184      /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2185	 inputs of NEWPAT.  */
2186
2187      /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2188	 possible to try that as a scratch reg.  This would require adding
2189	 more code to make it work though.  */
2190
2191      if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
2192	{
2193	  /* If I2DEST is a hard register or the only use of a pseudo,
2194	     we can change its mode.  */
2195	  if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
2196	      && GET_MODE (SET_DEST (newpat)) != VOIDmode
2197	      && GET_CODE (i2dest) == REG
2198	      && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2199		  || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2200		      && ! REG_USERVAR_P (i2dest))))
2201	    ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
2202				   REGNO (i2dest));
2203
2204	  m_split = split_insns (gen_rtx_PARALLEL
2205				 (VOIDmode,
2206				  gen_rtvec (2, newpat,
2207					     gen_rtx_CLOBBER (VOIDmode,
2208							      ni2dest))),
2209				 i3);
2210	  /* If the split with the mode-changed register didn't work, try
2211	     the original register.  */
2212	  if (! m_split && ni2dest != i2dest)
2213	    {
2214	      ni2dest = i2dest;
2215	      m_split = split_insns (gen_rtx_PARALLEL
2216				     (VOIDmode,
2217				      gen_rtvec (2, newpat,
2218						 gen_rtx_CLOBBER (VOIDmode,
2219								  i2dest))),
2220				     i3);
2221	    }
2222	}
2223
2224      if (m_split && NEXT_INSN (m_split) == NULL_RTX)
2225	{
2226	  m_split = PATTERN (m_split);
2227	  insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2228	  if (insn_code_number >= 0)
2229	    newpat = m_split;
2230	}
2231      else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
2232	       && (next_real_insn (i2) == i3
2233		   || ! use_crosses_set_p (PATTERN (m_split), INSN_CUID (i2))))
2234	{
2235	  rtx i2set, i3set;
2236	  rtx newi3pat = PATTERN (NEXT_INSN (m_split));
2237	  newi2pat = PATTERN (m_split);
2238
2239	  i3set = single_set (NEXT_INSN (m_split));
2240	  i2set = single_set (m_split);
2241
2242	  /* In case we changed the mode of I2DEST, replace it in the
2243	     pseudo-register table here.  We can't do it above in case this
2244	     code doesn't get executed and we do a split the other way.  */
2245
2246	  if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2247	    SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2248
2249	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2250
2251	  /* If I2 or I3 has multiple SETs, we won't know how to track
2252	     register status, so don't use these insns.  If I2's destination
2253	     is used between I2 and I3, we also can't use these insns.  */
2254
2255	  if (i2_code_number >= 0 && i2set && i3set
2256	      && (next_real_insn (i2) == i3
2257		  || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
2258	    insn_code_number = recog_for_combine (&newi3pat, i3,
2259						  &new_i3_notes);
2260	  if (insn_code_number >= 0)
2261	    newpat = newi3pat;
2262
2263	  /* It is possible that both insns now set the destination of I3.
2264	     If so, we must show an extra use of it.  */
2265
2266	  if (insn_code_number >= 0)
2267	    {
2268	      rtx new_i3_dest = SET_DEST (i3set);
2269	      rtx new_i2_dest = SET_DEST (i2set);
2270
2271	      while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2272		     || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2273		     || GET_CODE (new_i3_dest) == SUBREG)
2274		new_i3_dest = XEXP (new_i3_dest, 0);
2275
2276	      while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2277		     || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2278		     || GET_CODE (new_i2_dest) == SUBREG)
2279		new_i2_dest = XEXP (new_i2_dest, 0);
2280
2281	      if (GET_CODE (new_i3_dest) == REG
2282		  && GET_CODE (new_i2_dest) == REG
2283		  && REGNO (new_i3_dest) == REGNO (new_i2_dest))
2284		REG_N_SETS (REGNO (new_i2_dest))++;
2285	    }
2286	}
2287
2288      /* If we can split it and use I2DEST, go ahead and see if that
2289	 helps things be recognized.  Verify that none of the registers
2290	 are set between I2 and I3.  */
2291      if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
2292#ifdef HAVE_cc0
2293	  && GET_CODE (i2dest) == REG
2294#endif
2295	  /* We need I2DEST in the proper mode.  If it is a hard register
2296	     or the only use of a pseudo, we can change its mode.  */
2297	  && (GET_MODE (*split) == GET_MODE (i2dest)
2298	      || GET_MODE (*split) == VOIDmode
2299	      || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2300	      || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2301		  && ! REG_USERVAR_P (i2dest)))
2302	  && (next_real_insn (i2) == i3
2303	      || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2304	  /* We can't overwrite I2DEST if its value is still used by
2305	     NEWPAT.  */
2306	  && ! reg_referenced_p (i2dest, newpat))
2307	{
2308	  rtx newdest = i2dest;
2309	  enum rtx_code split_code = GET_CODE (*split);
2310	  enum machine_mode split_mode = GET_MODE (*split);
2311
2312	  /* Get NEWDEST as a register in the proper mode.  We have already
2313	     validated that we can do this.  */
2314	  if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2315	    {
2316	      newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
2317
2318	      if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2319		SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2320	    }
2321
2322	  /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2323	     an ASHIFT.  This can occur if it was inside a PLUS and hence
2324	     appeared to be a memory address.  This is a kludge.  */
2325	  if (split_code == MULT
2326	      && GET_CODE (XEXP (*split, 1)) == CONST_INT
2327	      && INTVAL (XEXP (*split, 1)) > 0
2328	      && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2329	    {
2330	      SUBST (*split, gen_rtx_ASHIFT (split_mode,
2331					     XEXP (*split, 0), GEN_INT (i)));
2332	      /* Update split_code because we may not have a multiply
2333		 anymore.  */
2334	      split_code = GET_CODE (*split);
2335	    }
2336
2337#ifdef INSN_SCHEDULING
2338	  /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2339	     be written as a ZERO_EXTEND.  */
2340	  if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2341	    {
2342#ifdef LOAD_EXTEND_OP
2343	      /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
2344		 what it really is.  */
2345	      if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split)))
2346		  == SIGN_EXTEND)
2347		SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
2348						    SUBREG_REG (*split)));
2349	      else
2350#endif
2351		SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
2352						    SUBREG_REG (*split)));
2353	    }
2354#endif
2355
2356	  newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
2357	  SUBST (*split, newdest);
2358	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2359
2360	  /* If the split point was a MULT and we didn't have one before,
2361	     don't use one now.  */
2362	  if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2363	    insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2364	}
2365    }
2366
2367  /* Check for a case where we loaded from memory in a narrow mode and
2368     then sign extended it, but we need both registers.  In that case,
2369     we have a PARALLEL with both loads from the same memory location.
2370     We can split this into a load from memory followed by a register-register
2371     copy.  This saves at least one insn, more if register allocation can
2372     eliminate the copy.
2373
2374     We cannot do this if the destination of the first assignment is a
2375     condition code register or cc0.  We eliminate this case by making sure
2376     the SET_DEST and SET_SRC have the same mode.
2377
2378     We cannot do this if the destination of the second assignment is
2379     a register that we have already assumed is zero-extended.  Similarly
2380     for a SUBREG of such a register.  */
2381
2382  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2383	   && GET_CODE (newpat) == PARALLEL
2384	   && XVECLEN (newpat, 0) == 2
2385	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2386	   && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2387	   && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
2388	       == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
2389	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2390	   && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2391			   XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2392	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2393				   INSN_CUID (i2))
2394	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2395	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2396	   && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2397		 (GET_CODE (temp) == REG
2398		  && reg_nonzero_bits[REGNO (temp)] != 0
2399		  && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2400		  && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2401		  && (reg_nonzero_bits[REGNO (temp)]
2402		      != GET_MODE_MASK (word_mode))))
2403	   && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2404		 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2405		     (GET_CODE (temp) == REG
2406		      && reg_nonzero_bits[REGNO (temp)] != 0
2407		      && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2408		      && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2409		      && (reg_nonzero_bits[REGNO (temp)]
2410			  != GET_MODE_MASK (word_mode)))))
2411	   && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2412					 SET_SRC (XVECEXP (newpat, 0, 1)))
2413	   && ! find_reg_note (i3, REG_UNUSED,
2414			       SET_DEST (XVECEXP (newpat, 0, 0))))
2415    {
2416      rtx ni2dest;
2417
2418      newi2pat = XVECEXP (newpat, 0, 0);
2419      ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2420      newpat = XVECEXP (newpat, 0, 1);
2421      SUBST (SET_SRC (newpat),
2422	     gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
2423      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2424
2425      if (i2_code_number >= 0)
2426	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2427
2428      if (insn_code_number >= 0)
2429	{
2430	  rtx insn;
2431	  rtx link;
2432
2433	  /* If we will be able to accept this, we have made a change to the
2434	     destination of I3.  This requires us to do a few adjustments.  */
2435	  PATTERN (i3) = newpat;
2436	  adjust_for_new_dest (i3);
2437
2438	  /* I3 now uses what used to be its destination and which is
2439	     now I2's destination.  That means we need a LOG_LINK from
2440	     I3 to I2.  But we used to have one, so we still will.
2441
2442	     However, some later insn might be using I2's dest and have
2443	     a LOG_LINK pointing at I3.  We must remove this link.
2444	     The simplest way to remove the link is to point it at I1,
2445	     which we know will be a NOTE.  */
2446
2447	  for (insn = NEXT_INSN (i3);
2448	       insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2449			|| insn != this_basic_block->next_bb->head);
2450	       insn = NEXT_INSN (insn))
2451	    {
2452	      if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
2453		{
2454		  for (link = LOG_LINKS (insn); link;
2455		       link = XEXP (link, 1))
2456		    if (XEXP (link, 0) == i3)
2457		      XEXP (link, 0) = i1;
2458
2459		  break;
2460		}
2461	    }
2462	}
2463    }
2464
2465  /* Similarly, check for a case where we have a PARALLEL of two independent
2466     SETs but we started with three insns.  In this case, we can do the sets
2467     as two separate insns.  This case occurs when some SET allows two
2468     other insns to combine, but the destination of that SET is still live.  */
2469
2470  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2471	   && GET_CODE (newpat) == PARALLEL
2472	   && XVECLEN (newpat, 0) == 2
2473	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2474	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2475	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2476	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2477	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2478	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2479	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2480				   INSN_CUID (i2))
2481	   /* Don't pass sets with (USE (MEM ...)) dests to the following.  */
2482	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2483	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2484	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2485				  XVECEXP (newpat, 0, 0))
2486	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2487				  XVECEXP (newpat, 0, 1))
2488	   && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2489		 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
2490    {
2491      /* Normally, it doesn't matter which of the two is done first,
2492	 but it does if one references cc0.  In that case, it has to
2493	 be first.  */
2494#ifdef HAVE_cc0
2495      if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2496	{
2497	  newi2pat = XVECEXP (newpat, 0, 0);
2498	  newpat = XVECEXP (newpat, 0, 1);
2499	}
2500      else
2501#endif
2502	{
2503	  newi2pat = XVECEXP (newpat, 0, 1);
2504	  newpat = XVECEXP (newpat, 0, 0);
2505	}
2506
2507      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2508
2509      if (i2_code_number >= 0)
2510	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2511    }
2512
2513  /* If it still isn't recognized, fail and change things back the way they
2514     were.  */
2515  if ((insn_code_number < 0
2516       /* Is the result a reasonable ASM_OPERANDS?  */
2517       && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2518    {
2519      undo_all ();
2520      return 0;
2521    }
2522
2523  /* If we had to change another insn, make sure it is valid also.  */
2524  if (undobuf.other_insn)
2525    {
2526      rtx other_pat = PATTERN (undobuf.other_insn);
2527      rtx new_other_notes;
2528      rtx note, next;
2529
2530      CLEAR_HARD_REG_SET (newpat_used_regs);
2531
2532      other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2533					     &new_other_notes);
2534
2535      if (other_code_number < 0 && ! check_asm_operands (other_pat))
2536	{
2537	  undo_all ();
2538	  return 0;
2539	}
2540
2541      PATTERN (undobuf.other_insn) = other_pat;
2542
2543      /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2544	 are still valid.  Then add any non-duplicate notes added by
2545	 recog_for_combine.  */
2546      for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2547	{
2548	  next = XEXP (note, 1);
2549
2550	  if (REG_NOTE_KIND (note) == REG_UNUSED
2551	      && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2552	    {
2553	      if (GET_CODE (XEXP (note, 0)) == REG)
2554		REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2555
2556	      remove_note (undobuf.other_insn, note);
2557	    }
2558	}
2559
2560      for (note = new_other_notes; note; note = XEXP (note, 1))
2561	if (GET_CODE (XEXP (note, 0)) == REG)
2562	  REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
2563
2564      distribute_notes (new_other_notes, undobuf.other_insn,
2565			undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2566    }
2567#ifdef HAVE_cc0
2568  /* If I2 is the setter CC0 and I3 is the user CC0 then check whether
2569     they are adjacent to each other or not.  */
2570  {
2571    rtx p = prev_nonnote_insn (i3);
2572    if (p && p != i2 && GET_CODE (p) == INSN && newi2pat
2573	&& sets_cc0_p (newi2pat))
2574      {
2575	undo_all ();
2576	return 0;
2577      }
2578  }
2579#endif
2580
2581  /* We now know that we can do this combination.  Merge the insns and
2582     update the status of registers and LOG_LINKS.  */
2583
2584  {
2585    rtx i3notes, i2notes, i1notes = 0;
2586    rtx i3links, i2links, i1links = 0;
2587    rtx midnotes = 0;
2588    unsigned int regno;
2589    /* Compute which registers we expect to eliminate.  newi2pat may be setting
2590       either i3dest or i2dest, so we must check it.  Also, i1dest may be the
2591       same as i3dest, in which case newi2pat may be setting i1dest.  */
2592    rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2593		   || i2dest_in_i2src || i2dest_in_i1src
2594		   ? 0 : i2dest);
2595    rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2596		   || (newi2pat && reg_set_p (i1dest, newi2pat))
2597		   ? 0 : i1dest);
2598
2599    /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2600       clear them.  */
2601    i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2602    i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2603    if (i1)
2604      i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2605
2606    /* Ensure that we do not have something that should not be shared but
2607       occurs multiple times in the new insns.  Check this by first
2608       resetting all the `used' flags and then copying anything is shared.  */
2609
2610    reset_used_flags (i3notes);
2611    reset_used_flags (i2notes);
2612    reset_used_flags (i1notes);
2613    reset_used_flags (newpat);
2614    reset_used_flags (newi2pat);
2615    if (undobuf.other_insn)
2616      reset_used_flags (PATTERN (undobuf.other_insn));
2617
2618    i3notes = copy_rtx_if_shared (i3notes);
2619    i2notes = copy_rtx_if_shared (i2notes);
2620    i1notes = copy_rtx_if_shared (i1notes);
2621    newpat = copy_rtx_if_shared (newpat);
2622    newi2pat = copy_rtx_if_shared (newi2pat);
2623    if (undobuf.other_insn)
2624      reset_used_flags (PATTERN (undobuf.other_insn));
2625
2626    INSN_CODE (i3) = insn_code_number;
2627    PATTERN (i3) = newpat;
2628
2629    if (GET_CODE (i3) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (i3))
2630      {
2631	rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
2632
2633	reset_used_flags (call_usage);
2634	call_usage = copy_rtx (call_usage);
2635
2636	if (substed_i2)
2637	  replace_rtx (call_usage, i2dest, i2src);
2638
2639	if (substed_i1)
2640	  replace_rtx (call_usage, i1dest, i1src);
2641
2642	CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
2643      }
2644
2645    if (undobuf.other_insn)
2646      INSN_CODE (undobuf.other_insn) = other_code_number;
2647
2648    /* We had one special case above where I2 had more than one set and
2649       we replaced a destination of one of those sets with the destination
2650       of I3.  In that case, we have to update LOG_LINKS of insns later
2651       in this basic block.  Note that this (expensive) case is rare.
2652
2653       Also, in this case, we must pretend that all REG_NOTEs for I2
2654       actually came from I3, so that REG_UNUSED notes from I2 will be
2655       properly handled.  */
2656
2657    if (i3_subst_into_i2)
2658      {
2659	for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2660	  if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != USE
2661	      && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2662	      && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2663	      && ! find_reg_note (i2, REG_UNUSED,
2664				  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2665	    for (temp = NEXT_INSN (i2);
2666		 temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2667			  || this_basic_block->head != temp);
2668		 temp = NEXT_INSN (temp))
2669	      if (temp != i3 && INSN_P (temp))
2670		for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2671		  if (XEXP (link, 0) == i2)
2672		    XEXP (link, 0) = i3;
2673
2674	if (i3notes)
2675	  {
2676	    rtx link = i3notes;
2677	    while (XEXP (link, 1))
2678	      link = XEXP (link, 1);
2679	    XEXP (link, 1) = i2notes;
2680	  }
2681	else
2682	  i3notes = i2notes;
2683	i2notes = 0;
2684      }
2685
2686    LOG_LINKS (i3) = 0;
2687    REG_NOTES (i3) = 0;
2688    LOG_LINKS (i2) = 0;
2689    REG_NOTES (i2) = 0;
2690
2691    if (newi2pat)
2692      {
2693	INSN_CODE (i2) = i2_code_number;
2694	PATTERN (i2) = newi2pat;
2695      }
2696    else
2697      {
2698	PUT_CODE (i2, NOTE);
2699	NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2700	NOTE_SOURCE_FILE (i2) = 0;
2701      }
2702
2703    if (i1)
2704      {
2705	LOG_LINKS (i1) = 0;
2706	REG_NOTES (i1) = 0;
2707	PUT_CODE (i1, NOTE);
2708	NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2709	NOTE_SOURCE_FILE (i1) = 0;
2710      }
2711
2712    /* Get death notes for everything that is now used in either I3 or
2713       I2 and used to die in a previous insn.  If we built two new
2714       patterns, move from I1 to I2 then I2 to I3 so that we get the
2715       proper movement on registers that I2 modifies.  */
2716
2717    if (newi2pat)
2718      {
2719	move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2720	move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2721      }
2722    else
2723      move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2724		   i3, &midnotes);
2725
2726    /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3.  */
2727    if (i3notes)
2728      distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2729			elim_i2, elim_i1);
2730    if (i2notes)
2731      distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2732			elim_i2, elim_i1);
2733    if (i1notes)
2734      distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2735			elim_i2, elim_i1);
2736    if (midnotes)
2737      distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2738			elim_i2, elim_i1);
2739
2740    /* Distribute any notes added to I2 or I3 by recog_for_combine.  We
2741       know these are REG_UNUSED and want them to go to the desired insn,
2742       so we always pass it as i3.  We have not counted the notes in
2743       reg_n_deaths yet, so we need to do so now.  */
2744
2745    if (newi2pat && new_i2_notes)
2746      {
2747	for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2748	  if (GET_CODE (XEXP (temp, 0)) == REG)
2749	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2750
2751	distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2752      }
2753
2754    if (new_i3_notes)
2755      {
2756	for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2757	  if (GET_CODE (XEXP (temp, 0)) == REG)
2758	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2759
2760	distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2761      }
2762
2763    /* If I3DEST was used in I3SRC, it really died in I3.  We may need to
2764       put a REG_DEAD note for it somewhere.  If NEWI2PAT exists and sets
2765       I3DEST, the death must be somewhere before I2, not I3.  If we passed I3
2766       in that case, it might delete I2.  Similarly for I2 and I1.
2767       Show an additional death due to the REG_DEAD note we make here.  If
2768       we discard it in distribute_notes, we will decrement it again.  */
2769
2770    if (i3dest_killed)
2771      {
2772	if (GET_CODE (i3dest_killed) == REG)
2773	  REG_N_DEATHS (REGNO (i3dest_killed))++;
2774
2775	if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
2776	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2777					       NULL_RTX),
2778			    NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
2779	else
2780	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2781					       NULL_RTX),
2782			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2783			    elim_i2, elim_i1);
2784      }
2785
2786    if (i2dest_in_i2src)
2787      {
2788	if (GET_CODE (i2dest) == REG)
2789	  REG_N_DEATHS (REGNO (i2dest))++;
2790
2791	if (newi2pat && reg_set_p (i2dest, newi2pat))
2792	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2793			    NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2794	else
2795	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2796			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2797			    NULL_RTX, NULL_RTX);
2798      }
2799
2800    if (i1dest_in_i1src)
2801      {
2802	if (GET_CODE (i1dest) == REG)
2803	  REG_N_DEATHS (REGNO (i1dest))++;
2804
2805	if (newi2pat && reg_set_p (i1dest, newi2pat))
2806	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2807			    NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2808	else
2809	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2810			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2811			    NULL_RTX, NULL_RTX);
2812      }
2813
2814    distribute_links (i3links);
2815    distribute_links (i2links);
2816    distribute_links (i1links);
2817
2818    if (GET_CODE (i2dest) == REG)
2819      {
2820	rtx link;
2821	rtx i2_insn = 0, i2_val = 0, set;
2822
2823	/* The insn that used to set this register doesn't exist, and
2824	   this life of the register may not exist either.  See if one of
2825	   I3's links points to an insn that sets I2DEST.  If it does,
2826	   that is now the last known value for I2DEST. If we don't update
2827	   this and I2 set the register to a value that depended on its old
2828	   contents, we will get confused.  If this insn is used, thing
2829	   will be set correctly in combine_instructions.  */
2830
2831	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2832	  if ((set = single_set (XEXP (link, 0))) != 0
2833	      && rtx_equal_p (i2dest, SET_DEST (set)))
2834	    i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2835
2836	record_value_for_reg (i2dest, i2_insn, i2_val);
2837
2838	/* If the reg formerly set in I2 died only once and that was in I3,
2839	   zero its use count so it won't make `reload' do any work.  */
2840	if (! added_sets_2
2841	    && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2842	    && ! i2dest_in_i2src)
2843	  {
2844	    regno = REGNO (i2dest);
2845	    REG_N_SETS (regno)--;
2846	  }
2847      }
2848
2849    if (i1 && GET_CODE (i1dest) == REG)
2850      {
2851	rtx link;
2852	rtx i1_insn = 0, i1_val = 0, set;
2853
2854	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2855	  if ((set = single_set (XEXP (link, 0))) != 0
2856	      && rtx_equal_p (i1dest, SET_DEST (set)))
2857	    i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2858
2859	record_value_for_reg (i1dest, i1_insn, i1_val);
2860
2861	regno = REGNO (i1dest);
2862	if (! added_sets_1 && ! i1dest_in_i1src)
2863	  REG_N_SETS (regno)--;
2864      }
2865
2866    /* Update reg_nonzero_bits et al for any changes that may have been made
2867       to this insn.  The order of set_nonzero_bits_and_sign_copies() is
2868       important.  Because newi2pat can affect nonzero_bits of newpat */
2869    if (newi2pat)
2870      note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
2871    note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
2872
2873    /* Set new_direct_jump_p if a new return or simple jump instruction
2874       has been created.
2875
2876       If I3 is now an unconditional jump, ensure that it has a
2877       BARRIER following it since it may have initially been a
2878       conditional jump.  It may also be the last nonnote insn.  */
2879
2880    if (returnjump_p (i3) || any_uncondjump_p (i3))
2881      {
2882	*new_direct_jump_p = 1;
2883
2884	if ((temp = next_nonnote_insn (i3)) == NULL_RTX
2885	    || GET_CODE (temp) != BARRIER)
2886	  emit_barrier_after (i3);
2887      }
2888
2889    if (undobuf.other_insn != NULL_RTX
2890	&& (returnjump_p (undobuf.other_insn)
2891	    || any_uncondjump_p (undobuf.other_insn)))
2892      {
2893	*new_direct_jump_p = 1;
2894
2895	if ((temp = next_nonnote_insn (undobuf.other_insn)) == NULL_RTX
2896	    || GET_CODE (temp) != BARRIER)
2897	  emit_barrier_after (undobuf.other_insn);
2898      }
2899
2900    /* An NOOP jump does not need barrier, but it does need cleaning up
2901       of CFG.  */
2902    if (GET_CODE (newpat) == SET
2903	&& SET_SRC (newpat) == pc_rtx
2904	&& SET_DEST (newpat) == pc_rtx)
2905      *new_direct_jump_p = 1;
2906  }
2907
2908  combine_successes++;
2909  undo_commit ();
2910
2911  /* Clear this here, so that subsequent get_last_value calls are not
2912     affected.  */
2913  subst_prev_insn = NULL_RTX;
2914
2915  if (added_links_insn
2916      && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2917      && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2918    return added_links_insn;
2919  else
2920    return newi2pat ? i2 : i3;
2921}
2922
2923/* Undo all the modifications recorded in undobuf.  */
2924
2925static void
2926undo_all ()
2927{
2928  struct undo *undo, *next;
2929
2930  for (undo = undobuf.undos; undo; undo = next)
2931    {
2932      next = undo->next;
2933      if (undo->is_int)
2934	*undo->where.i = undo->old_contents.i;
2935      else
2936	*undo->where.r = undo->old_contents.r;
2937
2938      undo->next = undobuf.frees;
2939      undobuf.frees = undo;
2940    }
2941
2942  undobuf.undos = 0;
2943
2944  /* Clear this here, so that subsequent get_last_value calls are not
2945     affected.  */
2946  subst_prev_insn = NULL_RTX;
2947}
2948
2949/* We've committed to accepting the changes we made.  Move all
2950   of the undos to the free list.  */
2951
2952static void
2953undo_commit ()
2954{
2955  struct undo *undo, *next;
2956
2957  for (undo = undobuf.undos; undo; undo = next)
2958    {
2959      next = undo->next;
2960      undo->next = undobuf.frees;
2961      undobuf.frees = undo;
2962    }
2963  undobuf.undos = 0;
2964}
2965
2966
2967/* Find the innermost point within the rtx at LOC, possibly LOC itself,
2968   where we have an arithmetic expression and return that point.  LOC will
2969   be inside INSN.
2970
2971   try_combine will call this function to see if an insn can be split into
2972   two insns.  */
2973
2974static rtx *
2975find_split_point (loc, insn)
2976     rtx *loc;
2977     rtx insn;
2978{
2979  rtx x = *loc;
2980  enum rtx_code code = GET_CODE (x);
2981  rtx *split;
2982  unsigned HOST_WIDE_INT len = 0;
2983  HOST_WIDE_INT pos = 0;
2984  int unsignedp = 0;
2985  rtx inner = NULL_RTX;
2986
2987  /* First special-case some codes.  */
2988  switch (code)
2989    {
2990    case SUBREG:
2991#ifdef INSN_SCHEDULING
2992      /* If we are making a paradoxical SUBREG invalid, it becomes a split
2993	 point.  */
2994      if (GET_CODE (SUBREG_REG (x)) == MEM)
2995	return loc;
2996#endif
2997      return find_split_point (&SUBREG_REG (x), insn);
2998
2999    case MEM:
3000#ifdef HAVE_lo_sum
3001      /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
3002	 using LO_SUM and HIGH.  */
3003      if (GET_CODE (XEXP (x, 0)) == CONST
3004	  || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
3005	{
3006	  SUBST (XEXP (x, 0),
3007		 gen_rtx_LO_SUM (Pmode,
3008				 gen_rtx_HIGH (Pmode, XEXP (x, 0)),
3009				 XEXP (x, 0)));
3010	  return &XEXP (XEXP (x, 0), 0);
3011	}
3012#endif
3013
3014      /* If we have a PLUS whose second operand is a constant and the
3015	 address is not valid, perhaps will can split it up using
3016	 the machine-specific way to split large constants.  We use
3017	 the first pseudo-reg (one of the virtual regs) as a placeholder;
3018	 it will not remain in the result.  */
3019      if (GET_CODE (XEXP (x, 0)) == PLUS
3020	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3021	  && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
3022	{
3023	  rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
3024	  rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
3025				 subst_insn);
3026
3027	  /* This should have produced two insns, each of which sets our
3028	     placeholder.  If the source of the second is a valid address,
3029	     we can make put both sources together and make a split point
3030	     in the middle.  */
3031
3032	  if (seq
3033	      && NEXT_INSN (seq) != NULL_RTX
3034	      && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
3035	      && GET_CODE (seq) == INSN
3036	      && GET_CODE (PATTERN (seq)) == SET
3037	      && SET_DEST (PATTERN (seq)) == reg
3038	      && ! reg_mentioned_p (reg,
3039				    SET_SRC (PATTERN (seq)))
3040	      && GET_CODE (NEXT_INSN (seq)) == INSN
3041	      && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
3042	      && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
3043	      && memory_address_p (GET_MODE (x),
3044				   SET_SRC (PATTERN (NEXT_INSN (seq)))))
3045	    {
3046	      rtx src1 = SET_SRC (PATTERN (seq));
3047	      rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
3048
3049	      /* Replace the placeholder in SRC2 with SRC1.  If we can
3050		 find where in SRC2 it was placed, that can become our
3051		 split point and we can replace this address with SRC2.
3052		 Just try two obvious places.  */
3053
3054	      src2 = replace_rtx (src2, reg, src1);
3055	      split = 0;
3056	      if (XEXP (src2, 0) == src1)
3057		split = &XEXP (src2, 0);
3058	      else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
3059		       && XEXP (XEXP (src2, 0), 0) == src1)
3060		split = &XEXP (XEXP (src2, 0), 0);
3061
3062	      if (split)
3063		{
3064		  SUBST (XEXP (x, 0), src2);
3065		  return split;
3066		}
3067	    }
3068
3069	  /* If that didn't work, perhaps the first operand is complex and
3070	     needs to be computed separately, so make a split point there.
3071	     This will occur on machines that just support REG + CONST
3072	     and have a constant moved through some previous computation.  */
3073
3074	  else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
3075		   && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
3076			 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
3077			     == 'o')))
3078	    return &XEXP (XEXP (x, 0), 0);
3079	}
3080      break;
3081
3082    case SET:
3083#ifdef HAVE_cc0
3084      /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
3085	 ZERO_EXTRACT, the most likely reason why this doesn't match is that
3086	 we need to put the operand into a register.  So split at that
3087	 point.  */
3088
3089      if (SET_DEST (x) == cc0_rtx
3090	  && GET_CODE (SET_SRC (x)) != COMPARE
3091	  && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
3092	  && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
3093	  && ! (GET_CODE (SET_SRC (x)) == SUBREG
3094		&& GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
3095	return &SET_SRC (x);
3096#endif
3097
3098      /* See if we can split SET_SRC as it stands.  */
3099      split = find_split_point (&SET_SRC (x), insn);
3100      if (split && split != &SET_SRC (x))
3101	return split;
3102
3103      /* See if we can split SET_DEST as it stands.  */
3104      split = find_split_point (&SET_DEST (x), insn);
3105      if (split && split != &SET_DEST (x))
3106	return split;
3107
3108      /* See if this is a bitfield assignment with everything constant.  If
3109	 so, this is an IOR of an AND, so split it into that.  */
3110      if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
3111	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
3112	      <= HOST_BITS_PER_WIDE_INT)
3113	  && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
3114	  && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
3115	  && GET_CODE (SET_SRC (x)) == CONST_INT
3116	  && ((INTVAL (XEXP (SET_DEST (x), 1))
3117	       + INTVAL (XEXP (SET_DEST (x), 2)))
3118	      <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
3119	  && ! side_effects_p (XEXP (SET_DEST (x), 0)))
3120	{
3121	  HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
3122	  unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
3123	  unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
3124	  rtx dest = XEXP (SET_DEST (x), 0);
3125	  enum machine_mode mode = GET_MODE (dest);
3126	  unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
3127
3128	  if (BITS_BIG_ENDIAN)
3129	    pos = GET_MODE_BITSIZE (mode) - len - pos;
3130
3131	  if (src == mask)
3132	    SUBST (SET_SRC (x),
3133		   gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
3134	  else
3135	    SUBST (SET_SRC (x),
3136		   gen_binary (IOR, mode,
3137			       gen_binary (AND, mode, dest,
3138					   gen_int_mode (~(mask << pos),
3139							 mode)),
3140			       GEN_INT (src << pos)));
3141
3142	  SUBST (SET_DEST (x), dest);
3143
3144	  split = find_split_point (&SET_SRC (x), insn);
3145	  if (split && split != &SET_SRC (x))
3146	    return split;
3147	}
3148
3149      /* Otherwise, see if this is an operation that we can split into two.
3150	 If so, try to split that.  */
3151      code = GET_CODE (SET_SRC (x));
3152
3153      switch (code)
3154	{
3155	case AND:
3156	  /* If we are AND'ing with a large constant that is only a single
3157	     bit and the result is only being used in a context where we
3158	     need to know if it is zero or nonzero, replace it with a bit
3159	     extraction.  This will avoid the large constant, which might
3160	     have taken more than one insn to make.  If the constant were
3161	     not a valid argument to the AND but took only one insn to make,
3162	     this is no worse, but if it took more than one insn, it will
3163	     be better.  */
3164
3165	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3166	      && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
3167	      && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3168	      && GET_CODE (SET_DEST (x)) == REG
3169	      && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
3170	      && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3171	      && XEXP (*split, 0) == SET_DEST (x)
3172	      && XEXP (*split, 1) == const0_rtx)
3173	    {
3174	      rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3175						XEXP (SET_SRC (x), 0),
3176						pos, NULL_RTX, 1, 1, 0, 0);
3177	      if (extraction != 0)
3178		{
3179		  SUBST (SET_SRC (x), extraction);
3180		  return find_split_point (loc, insn);
3181		}
3182	    }
3183	  break;
3184
3185	case NE:
3186	  /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3187	     is known to be on, this can be converted into a NEG of a shift.  */
3188	  if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3189	      && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
3190	      && 1 <= (pos = exact_log2
3191		       (nonzero_bits (XEXP (SET_SRC (x), 0),
3192				      GET_MODE (XEXP (SET_SRC (x), 0))))))
3193	    {
3194	      enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3195
3196	      SUBST (SET_SRC (x),
3197		     gen_rtx_NEG (mode,
3198				  gen_rtx_LSHIFTRT (mode,
3199						    XEXP (SET_SRC (x), 0),
3200						    GEN_INT (pos))));
3201
3202	      split = find_split_point (&SET_SRC (x), insn);
3203	      if (split && split != &SET_SRC (x))
3204		return split;
3205	    }
3206	  break;
3207
3208	case SIGN_EXTEND:
3209	  inner = XEXP (SET_SRC (x), 0);
3210
3211	  /* We can't optimize if either mode is a partial integer
3212	     mode as we don't know how many bits are significant
3213	     in those modes.  */
3214	  if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3215	      || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3216	    break;
3217
3218	  pos = 0;
3219	  len = GET_MODE_BITSIZE (GET_MODE (inner));
3220	  unsignedp = 0;
3221	  break;
3222
3223	case SIGN_EXTRACT:
3224	case ZERO_EXTRACT:
3225	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3226	      && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3227	    {
3228	      inner = XEXP (SET_SRC (x), 0);
3229	      len = INTVAL (XEXP (SET_SRC (x), 1));
3230	      pos = INTVAL (XEXP (SET_SRC (x), 2));
3231
3232	      if (BITS_BIG_ENDIAN)
3233		pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
3234	      unsignedp = (code == ZERO_EXTRACT);
3235	    }
3236	  break;
3237
3238	default:
3239	  break;
3240	}
3241
3242      if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3243	{
3244	  enum machine_mode mode = GET_MODE (SET_SRC (x));
3245
3246	  /* For unsigned, we have a choice of a shift followed by an
3247	     AND or two shifts.  Use two shifts for field sizes where the
3248	     constant might be too large.  We assume here that we can
3249	     always at least get 8-bit constants in an AND insn, which is
3250	     true for every current RISC.  */
3251
3252	  if (unsignedp && len <= 8)
3253	    {
3254	      SUBST (SET_SRC (x),
3255		     gen_rtx_AND (mode,
3256				  gen_rtx_LSHIFTRT
3257				  (mode, gen_lowpart_for_combine (mode, inner),
3258				   GEN_INT (pos)),
3259				  GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
3260
3261	      split = find_split_point (&SET_SRC (x), insn);
3262	      if (split && split != &SET_SRC (x))
3263		return split;
3264	    }
3265	  else
3266	    {
3267	      SUBST (SET_SRC (x),
3268		     gen_rtx_fmt_ee
3269		     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
3270		      gen_rtx_ASHIFT (mode,
3271				      gen_lowpart_for_combine (mode, inner),
3272				      GEN_INT (GET_MODE_BITSIZE (mode)
3273					       - len - pos)),
3274		      GEN_INT (GET_MODE_BITSIZE (mode) - len)));
3275
3276	      split = find_split_point (&SET_SRC (x), insn);
3277	      if (split && split != &SET_SRC (x))
3278		return split;
3279	    }
3280	}
3281
3282      /* See if this is a simple operation with a constant as the second
3283	 operand.  It might be that this constant is out of range and hence
3284	 could be used as a split point.  */
3285      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3286	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3287	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
3288	  && CONSTANT_P (XEXP (SET_SRC (x), 1))
3289	  && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
3290	      || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3291		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
3292		      == 'o'))))
3293	return &XEXP (SET_SRC (x), 1);
3294
3295      /* Finally, see if this is a simple operation with its first operand
3296	 not in a register.  The operation might require this operand in a
3297	 register, so return it as a split point.  We can always do this
3298	 because if the first operand were another operation, we would have
3299	 already found it as a split point.  */
3300      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3301	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3302	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
3303	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
3304	  && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3305	return &XEXP (SET_SRC (x), 0);
3306
3307      return 0;
3308
3309    case AND:
3310    case IOR:
3311      /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3312	 it is better to write this as (not (ior A B)) so we can split it.
3313	 Similarly for IOR.  */
3314      if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3315	{
3316	  SUBST (*loc,
3317		 gen_rtx_NOT (GET_MODE (x),
3318			      gen_rtx_fmt_ee (code == IOR ? AND : IOR,
3319					      GET_MODE (x),
3320					      XEXP (XEXP (x, 0), 0),
3321					      XEXP (XEXP (x, 1), 0))));
3322	  return find_split_point (loc, insn);
3323	}
3324
3325      /* Many RISC machines have a large set of logical insns.  If the
3326	 second operand is a NOT, put it first so we will try to split the
3327	 other operand first.  */
3328      if (GET_CODE (XEXP (x, 1)) == NOT)
3329	{
3330	  rtx tem = XEXP (x, 0);
3331	  SUBST (XEXP (x, 0), XEXP (x, 1));
3332	  SUBST (XEXP (x, 1), tem);
3333	}
3334      break;
3335
3336    default:
3337      break;
3338    }
3339
3340  /* Otherwise, select our actions depending on our rtx class.  */
3341  switch (GET_RTX_CLASS (code))
3342    {
3343    case 'b':			/* This is ZERO_EXTRACT and SIGN_EXTRACT.  */
3344    case '3':
3345      split = find_split_point (&XEXP (x, 2), insn);
3346      if (split)
3347	return split;
3348      /* ... fall through ...  */
3349    case '2':
3350    case 'c':
3351    case '<':
3352      split = find_split_point (&XEXP (x, 1), insn);
3353      if (split)
3354	return split;
3355      /* ... fall through ...  */
3356    case '1':
3357      /* Some machines have (and (shift ...) ...) insns.  If X is not
3358	 an AND, but XEXP (X, 0) is, use it as our split point.  */
3359      if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3360	return &XEXP (x, 0);
3361
3362      split = find_split_point (&XEXP (x, 0), insn);
3363      if (split)
3364	return split;
3365      return loc;
3366    }
3367
3368  /* Otherwise, we don't have a split point.  */
3369  return 0;
3370}
3371
3372/* Throughout X, replace FROM with TO, and return the result.
3373   The result is TO if X is FROM;
3374   otherwise the result is X, but its contents may have been modified.
3375   If they were modified, a record was made in undobuf so that
3376   undo_all will (among other things) return X to its original state.
3377
3378   If the number of changes necessary is too much to record to undo,
3379   the excess changes are not made, so the result is invalid.
3380   The changes already made can still be undone.
3381   undobuf.num_undo is incremented for such changes, so by testing that
3382   the caller can tell whether the result is valid.
3383
3384   `n_occurrences' is incremented each time FROM is replaced.
3385
3386   IN_DEST is nonzero if we are processing the SET_DEST of a SET.
3387
3388   UNIQUE_COPY is nonzero if each substitution must be unique.  We do this
3389   by copying if `n_occurrences' is nonzero.  */
3390
3391static rtx
3392subst (x, from, to, in_dest, unique_copy)
3393     rtx x, from, to;
3394     int in_dest;
3395     int unique_copy;
3396{
3397  enum rtx_code code = GET_CODE (x);
3398  enum machine_mode op0_mode = VOIDmode;
3399  const char *fmt;
3400  int len, i;
3401  rtx new;
3402
3403/* Two expressions are equal if they are identical copies of a shared
3404   RTX or if they are both registers with the same register number
3405   and mode.  */
3406
3407#define COMBINE_RTX_EQUAL_P(X,Y)			\
3408  ((X) == (Y)						\
3409   || (GET_CODE (X) == REG && GET_CODE (Y) == REG	\
3410       && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3411
3412  if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3413    {
3414      n_occurrences++;
3415      return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3416    }
3417
3418  /* If X and FROM are the same register but different modes, they will
3419     not have been seen as equal above.  However, flow.c will make a
3420     LOG_LINKS entry for that case.  If we do nothing, we will try to
3421     rerecognize our original insn and, when it succeeds, we will
3422     delete the feeding insn, which is incorrect.
3423
3424     So force this insn not to match in this (rare) case.  */
3425  if (! in_dest && code == REG && GET_CODE (from) == REG
3426      && REGNO (x) == REGNO (from))
3427    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
3428
3429  /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3430     of which may contain things that can be combined.  */
3431  if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3432    return x;
3433
3434  /* It is possible to have a subexpression appear twice in the insn.
3435     Suppose that FROM is a register that appears within TO.
3436     Then, after that subexpression has been scanned once by `subst',
3437     the second time it is scanned, TO may be found.  If we were
3438     to scan TO here, we would find FROM within it and create a
3439     self-referent rtl structure which is completely wrong.  */
3440  if (COMBINE_RTX_EQUAL_P (x, to))
3441    return to;
3442
3443  /* Parallel asm_operands need special attention because all of the
3444     inputs are shared across the arms.  Furthermore, unsharing the
3445     rtl results in recognition failures.  Failure to handle this case
3446     specially can result in circular rtl.
3447
3448     Solve this by doing a normal pass across the first entry of the
3449     parallel, and only processing the SET_DESTs of the subsequent
3450     entries.  Ug.  */
3451
3452  if (code == PARALLEL
3453      && GET_CODE (XVECEXP (x, 0, 0)) == SET
3454      && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
3455    {
3456      new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3457
3458      /* If this substitution failed, this whole thing fails.  */
3459      if (GET_CODE (new) == CLOBBER
3460	  && XEXP (new, 0) == const0_rtx)
3461	return new;
3462
3463      SUBST (XVECEXP (x, 0, 0), new);
3464
3465      for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
3466	{
3467	  rtx dest = SET_DEST (XVECEXP (x, 0, i));
3468
3469	  if (GET_CODE (dest) != REG
3470	      && GET_CODE (dest) != CC0
3471	      && GET_CODE (dest) != PC)
3472	    {
3473	      new = subst (dest, from, to, 0, unique_copy);
3474
3475	      /* If this substitution failed, this whole thing fails.  */
3476	      if (GET_CODE (new) == CLOBBER
3477		  && XEXP (new, 0) == const0_rtx)
3478		return new;
3479
3480	      SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
3481	    }
3482	}
3483    }
3484  else
3485    {
3486      len = GET_RTX_LENGTH (code);
3487      fmt = GET_RTX_FORMAT (code);
3488
3489      /* We don't need to process a SET_DEST that is a register, CC0,
3490	 or PC, so set up to skip this common case.  All other cases
3491	 where we want to suppress replacing something inside a
3492	 SET_SRC are handled via the IN_DEST operand.  */
3493      if (code == SET
3494	  && (GET_CODE (SET_DEST (x)) == REG
3495	      || GET_CODE (SET_DEST (x)) == CC0
3496	      || GET_CODE (SET_DEST (x)) == PC))
3497	fmt = "ie";
3498
3499      /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3500	 constant.  */
3501      if (fmt[0] == 'e')
3502	op0_mode = GET_MODE (XEXP (x, 0));
3503
3504      for (i = 0; i < len; i++)
3505	{
3506	  if (fmt[i] == 'E')
3507	    {
3508	      int j;
3509	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3510		{
3511		  if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3512		    {
3513		      new = (unique_copy && n_occurrences
3514			     ? copy_rtx (to) : to);
3515		      n_occurrences++;
3516		    }
3517		  else
3518		    {
3519		      new = subst (XVECEXP (x, i, j), from, to, 0,
3520				   unique_copy);
3521
3522		      /* If this substitution failed, this whole thing
3523			 fails.  */
3524		      if (GET_CODE (new) == CLOBBER
3525			  && XEXP (new, 0) == const0_rtx)
3526			return new;
3527		    }
3528
3529		  SUBST (XVECEXP (x, i, j), new);
3530		}
3531	    }
3532	  else if (fmt[i] == 'e')
3533	    {
3534	      /* If this is a register being set, ignore it.  */
3535	      new = XEXP (x, i);
3536	      if (in_dest
3537		  && (code == SUBREG || code == STRICT_LOW_PART
3538		      || code == ZERO_EXTRACT)
3539		  && i == 0
3540		  && GET_CODE (new) == REG)
3541		;
3542
3543	      else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3544		{
3545		  /* In general, don't install a subreg involving two
3546		     modes not tieable.  It can worsen register
3547		     allocation, and can even make invalid reload
3548		     insns, since the reg inside may need to be copied
3549		     from in the outside mode, and that may be invalid
3550		     if it is an fp reg copied in integer mode.
3551
3552		     We allow two exceptions to this: It is valid if
3553		     it is inside another SUBREG and the mode of that
3554		     SUBREG and the mode of the inside of TO is
3555		     tieable and it is valid if X is a SET that copies
3556		     FROM to CC0.  */
3557
3558		  if (GET_CODE (to) == SUBREG
3559		      && ! MODES_TIEABLE_P (GET_MODE (to),
3560					    GET_MODE (SUBREG_REG (to)))
3561		      && ! (code == SUBREG
3562			    && MODES_TIEABLE_P (GET_MODE (x),
3563						GET_MODE (SUBREG_REG (to))))
3564#ifdef HAVE_cc0
3565		      && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3566#endif
3567		      )
3568		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3569
3570#ifdef CANNOT_CHANGE_MODE_CLASS
3571		  if (code == SUBREG
3572		      && GET_CODE (to) == REG
3573		      && REGNO (to) < FIRST_PSEUDO_REGISTER
3574		      && REG_CANNOT_CHANGE_MODE_P (REGNO (to),
3575						   GET_MODE (to),
3576						   GET_MODE (x)))
3577		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3578#endif
3579
3580		  new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3581		  n_occurrences++;
3582		}
3583	      else
3584		/* If we are in a SET_DEST, suppress most cases unless we
3585		   have gone inside a MEM, in which case we want to
3586		   simplify the address.  We assume here that things that
3587		   are actually part of the destination have their inner
3588		   parts in the first expression.  This is true for SUBREG,
3589		   STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3590		   things aside from REG and MEM that should appear in a
3591		   SET_DEST.  */
3592		new = subst (XEXP (x, i), from, to,
3593			     (((in_dest
3594				&& (code == SUBREG || code == STRICT_LOW_PART
3595				    || code == ZERO_EXTRACT))
3596			       || code == SET)
3597			      && i == 0), unique_copy);
3598
3599	      /* If we found that we will have to reject this combination,
3600		 indicate that by returning the CLOBBER ourselves, rather than
3601		 an expression containing it.  This will speed things up as
3602		 well as prevent accidents where two CLOBBERs are considered
3603		 to be equal, thus producing an incorrect simplification.  */
3604
3605	      if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3606		return new;
3607
3608	      if (GET_CODE (new) == CONST_INT && GET_CODE (x) == SUBREG)
3609		{
3610		  enum machine_mode mode = GET_MODE (x);
3611
3612		  x = simplify_subreg (GET_MODE (x), new,
3613				       GET_MODE (SUBREG_REG (x)),
3614				       SUBREG_BYTE (x));
3615		  if (! x)
3616		    x = gen_rtx_CLOBBER (mode, const0_rtx);
3617		}
3618	      else if (GET_CODE (new) == CONST_INT
3619		       && GET_CODE (x) == ZERO_EXTEND)
3620		{
3621		  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
3622						new, GET_MODE (XEXP (x, 0)));
3623		  if (! x)
3624		    abort ();
3625		}
3626	      else
3627		SUBST (XEXP (x, i), new);
3628	    }
3629	}
3630    }
3631
3632  /* Try to simplify X.  If the simplification changed the code, it is likely
3633     that further simplification will help, so loop, but limit the number
3634     of repetitions that will be performed.  */
3635
3636  for (i = 0; i < 4; i++)
3637    {
3638      /* If X is sufficiently simple, don't bother trying to do anything
3639	 with it.  */
3640      if (code != CONST_INT && code != REG && code != CLOBBER)
3641	x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest);
3642
3643      if (GET_CODE (x) == code)
3644	break;
3645
3646      code = GET_CODE (x);
3647
3648      /* We no longer know the original mode of operand 0 since we
3649	 have changed the form of X)  */
3650      op0_mode = VOIDmode;
3651    }
3652
3653  return x;
3654}
3655
3656/* Simplify X, a piece of RTL.  We just operate on the expression at the
3657   outer level; call `subst' to simplify recursively.  Return the new
3658   expression.
3659
3660   OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3661   will be the iteration even if an expression with a code different from
3662   X is returned; IN_DEST is nonzero if we are inside a SET_DEST.  */
3663
3664static rtx
3665combine_simplify_rtx (x, op0_mode, last, in_dest)
3666     rtx x;
3667     enum machine_mode op0_mode;
3668     int last;
3669     int in_dest;
3670{
3671  enum rtx_code code = GET_CODE (x);
3672  enum machine_mode mode = GET_MODE (x);
3673  rtx temp;
3674  rtx reversed;
3675  int i;
3676
3677  /* If this is a commutative operation, put a constant last and a complex
3678     expression first.  We don't need to do this for comparisons here.  */
3679  if (GET_RTX_CLASS (code) == 'c'
3680      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
3681    {
3682      temp = XEXP (x, 0);
3683      SUBST (XEXP (x, 0), XEXP (x, 1));
3684      SUBST (XEXP (x, 1), temp);
3685    }
3686
3687  /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3688     sign extension of a PLUS with a constant, reverse the order of the sign
3689     extension and the addition. Note that this not the same as the original
3690     code, but overflow is undefined for signed values.  Also note that the
3691     PLUS will have been partially moved "inside" the sign-extension, so that
3692     the first operand of X will really look like:
3693         (ashiftrt (plus (ashift A C4) C5) C4).
3694     We convert this to
3695         (plus (ashiftrt (ashift A C4) C2) C4)
3696     and replace the first operand of X with that expression.  Later parts
3697     of this function may simplify the expression further.
3698
3699     For example, if we start with (mult (sign_extend (plus A C1)) C2),
3700     we swap the SIGN_EXTEND and PLUS.  Later code will apply the
3701     distributive law to produce (plus (mult (sign_extend X) C1) C3).
3702
3703     We do this to simplify address expressions.  */
3704
3705  if ((code == PLUS || code == MINUS || code == MULT)
3706      && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3707      && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3708      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3709      && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3710      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3711      && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3712      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3713      && (temp = simplify_binary_operation (ASHIFTRT, mode,
3714					    XEXP (XEXP (XEXP (x, 0), 0), 1),
3715					    XEXP (XEXP (x, 0), 1))) != 0)
3716    {
3717      rtx new
3718	= simplify_shift_const (NULL_RTX, ASHIFT, mode,
3719				XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3720				INTVAL (XEXP (XEXP (x, 0), 1)));
3721
3722      new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3723				  INTVAL (XEXP (XEXP (x, 0), 1)));
3724
3725      SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3726    }
3727
3728  /* If this is a simple operation applied to an IF_THEN_ELSE, try
3729     applying it to the arms of the IF_THEN_ELSE.  This often simplifies
3730     things.  Check for cases where both arms are testing the same
3731     condition.
3732
3733     Don't do anything if all operands are very simple.  */
3734
3735  if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3736	|| GET_RTX_CLASS (code) == '<')
3737       && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3738	    && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3739		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3740		      == 'o')))
3741	   || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3742	       && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3743		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3744			 == 'o')))))
3745      || (GET_RTX_CLASS (code) == '1'
3746	  && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3747	       && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3748		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3749			 == 'o'))))))
3750    {
3751      rtx cond, true_rtx, false_rtx;
3752
3753      cond = if_then_else_cond (x, &true_rtx, &false_rtx);
3754      if (cond != 0
3755	  /* If everything is a comparison, what we have is highly unlikely
3756	     to be simpler, so don't use it.  */
3757	  && ! (GET_RTX_CLASS (code) == '<'
3758		&& (GET_RTX_CLASS (GET_CODE (true_rtx)) == '<'
3759		    || GET_RTX_CLASS (GET_CODE (false_rtx)) == '<')))
3760	{
3761	  rtx cop1 = const0_rtx;
3762	  enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3763
3764	  if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3765	    return x;
3766
3767	  /* Simplify the alternative arms; this may collapse the true and
3768	     false arms to store-flag values.  */
3769	  true_rtx = subst (true_rtx, pc_rtx, pc_rtx, 0, 0);
3770	  false_rtx = subst (false_rtx, pc_rtx, pc_rtx, 0, 0);
3771
3772	  /* If true_rtx and false_rtx are not general_operands, an if_then_else
3773	     is unlikely to be simpler.  */
3774	  if (general_operand (true_rtx, VOIDmode)
3775	      && general_operand (false_rtx, VOIDmode))
3776	    {
3777	      /* Restarting if we generate a store-flag expression will cause
3778		 us to loop.  Just drop through in this case.  */
3779
3780	      /* If the result values are STORE_FLAG_VALUE and zero, we can
3781		 just make the comparison operation.  */
3782	      if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
3783		x = gen_binary (cond_code, mode, cond, cop1);
3784	      else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
3785		       && reverse_condition (cond_code) != UNKNOWN)
3786		x = gen_binary (reverse_condition (cond_code),
3787				mode, cond, cop1);
3788
3789	      /* Likewise, we can make the negate of a comparison operation
3790		 if the result values are - STORE_FLAG_VALUE and zero.  */
3791	      else if (GET_CODE (true_rtx) == CONST_INT
3792		       && INTVAL (true_rtx) == - STORE_FLAG_VALUE
3793		       && false_rtx == const0_rtx)
3794		x = simplify_gen_unary (NEG, mode,
3795					gen_binary (cond_code, mode, cond,
3796						    cop1),
3797					mode);
3798	      else if (GET_CODE (false_rtx) == CONST_INT
3799		       && INTVAL (false_rtx) == - STORE_FLAG_VALUE
3800		       && true_rtx == const0_rtx)
3801		x = simplify_gen_unary (NEG, mode,
3802					gen_binary (reverse_condition
3803						    (cond_code),
3804						    mode, cond, cop1),
3805					mode);
3806	      else
3807		return gen_rtx_IF_THEN_ELSE (mode,
3808					     gen_binary (cond_code, VOIDmode,
3809							 cond, cop1),
3810					     true_rtx, false_rtx);
3811
3812	      code = GET_CODE (x);
3813	      op0_mode = VOIDmode;
3814	    }
3815	}
3816    }
3817
3818  /* Try to fold this expression in case we have constants that weren't
3819     present before.  */
3820  temp = 0;
3821  switch (GET_RTX_CLASS (code))
3822    {
3823    case '1':
3824      temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3825      break;
3826    case '<':
3827      {
3828	enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
3829	if (cmp_mode == VOIDmode)
3830	  {
3831	    cmp_mode = GET_MODE (XEXP (x, 1));
3832	    if (cmp_mode == VOIDmode)
3833	      cmp_mode = op0_mode;
3834	  }
3835	temp = simplify_relational_operation (code, cmp_mode,
3836					      XEXP (x, 0), XEXP (x, 1));
3837      }
3838#ifdef FLOAT_STORE_FLAG_VALUE
3839      if (temp != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3840	{
3841	  if (temp == const0_rtx)
3842	    temp = CONST0_RTX (mode);
3843	  else
3844	    temp = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE (mode),
3845						 mode);
3846	}
3847#endif
3848      break;
3849    case 'c':
3850    case '2':
3851      temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3852      break;
3853    case 'b':
3854    case '3':
3855      temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3856					 XEXP (x, 1), XEXP (x, 2));
3857      break;
3858    }
3859
3860  if (temp)
3861    {
3862      x = temp;
3863      code = GET_CODE (temp);
3864      op0_mode = VOIDmode;
3865      mode = GET_MODE (temp);
3866    }
3867
3868  /* First see if we can apply the inverse distributive law.  */
3869  if (code == PLUS || code == MINUS
3870      || code == AND || code == IOR || code == XOR)
3871    {
3872      x = apply_distributive_law (x);
3873      code = GET_CODE (x);
3874      op0_mode = VOIDmode;
3875    }
3876
3877  /* If CODE is an associative operation not otherwise handled, see if we
3878     can associate some operands.  This can win if they are constants or
3879     if they are logically related (i.e. (a & b) & a).  */
3880  if ((code == PLUS || code == MINUS || code == MULT || code == DIV
3881       || code == AND || code == IOR || code == XOR
3882       || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3883      && ((INTEGRAL_MODE_P (mode) && code != DIV)
3884	  || (flag_unsafe_math_optimizations && FLOAT_MODE_P (mode))))
3885    {
3886      if (GET_CODE (XEXP (x, 0)) == code)
3887	{
3888	  rtx other = XEXP (XEXP (x, 0), 0);
3889	  rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3890	  rtx inner_op1 = XEXP (x, 1);
3891	  rtx inner;
3892
3893	  /* Make sure we pass the constant operand if any as the second
3894	     one if this is a commutative operation.  */
3895	  if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3896	    {
3897	      rtx tem = inner_op0;
3898	      inner_op0 = inner_op1;
3899	      inner_op1 = tem;
3900	    }
3901	  inner = simplify_binary_operation (code == MINUS ? PLUS
3902					     : code == DIV ? MULT
3903					     : code,
3904					     mode, inner_op0, inner_op1);
3905
3906	  /* For commutative operations, try the other pair if that one
3907	     didn't simplify.  */
3908	  if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3909	    {
3910	      other = XEXP (XEXP (x, 0), 1);
3911	      inner = simplify_binary_operation (code, mode,
3912						 XEXP (XEXP (x, 0), 0),
3913						 XEXP (x, 1));
3914	    }
3915
3916	  if (inner)
3917	    return gen_binary (code, mode, other, inner);
3918	}
3919    }
3920
3921  /* A little bit of algebraic simplification here.  */
3922  switch (code)
3923    {
3924    case MEM:
3925      /* Ensure that our address has any ASHIFTs converted to MULT in case
3926	 address-recognizing predicates are called later.  */
3927      temp = make_compound_operation (XEXP (x, 0), MEM);
3928      SUBST (XEXP (x, 0), temp);
3929      break;
3930
3931    case SUBREG:
3932      if (op0_mode == VOIDmode)
3933	op0_mode = GET_MODE (SUBREG_REG (x));
3934
3935      /* simplify_subreg can't use gen_lowpart_for_combine.  */
3936      if (CONSTANT_P (SUBREG_REG (x))
3937	  && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
3938	     /* Don't call gen_lowpart_for_combine if the inner mode
3939		is VOIDmode and we cannot simplify it, as SUBREG without
3940		inner mode is invalid.  */
3941	  && (GET_MODE (SUBREG_REG (x)) != VOIDmode
3942	      || gen_lowpart_common (mode, SUBREG_REG (x))))
3943	return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3944
3945      if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
3946        break;
3947      {
3948	rtx temp;
3949	temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
3950				SUBREG_BYTE (x));
3951	if (temp)
3952	  return temp;
3953      }
3954
3955      /* Don't change the mode of the MEM if that would change the meaning
3956	 of the address.  */
3957      if (GET_CODE (SUBREG_REG (x)) == MEM
3958	  && (MEM_VOLATILE_P (SUBREG_REG (x))
3959	      || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
3960	return gen_rtx_CLOBBER (mode, const0_rtx);
3961
3962      /* Note that we cannot do any narrowing for non-constants since
3963	 we might have been counting on using the fact that some bits were
3964	 zero.  We now do this in the SET.  */
3965
3966      break;
3967
3968    case NOT:
3969      /* (not (plus X -1)) can become (neg X).  */
3970      if (GET_CODE (XEXP (x, 0)) == PLUS
3971	  && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3972	return gen_rtx_NEG (mode, XEXP (XEXP (x, 0), 0));
3973
3974      /* Similarly, (not (neg X)) is (plus X -1).  */
3975      if (GET_CODE (XEXP (x, 0)) == NEG)
3976	return gen_rtx_PLUS (mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3977
3978      /* (not (xor X C)) for C constant is (xor X D) with D = ~C.  */
3979      if (GET_CODE (XEXP (x, 0)) == XOR
3980	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3981	  && (temp = simplify_unary_operation (NOT, mode,
3982					       XEXP (XEXP (x, 0), 1),
3983					       mode)) != 0)
3984	return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
3985
3986      /* (not (ashift 1 X)) is (rotate ~1 X).  We used to do this for operands
3987	 other than 1, but that is not valid.  We could do a similar
3988	 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3989	 but this doesn't seem common enough to bother with.  */
3990      if (GET_CODE (XEXP (x, 0)) == ASHIFT
3991	  && XEXP (XEXP (x, 0), 0) == const1_rtx)
3992	return gen_rtx_ROTATE (mode, simplify_gen_unary (NOT, mode,
3993							 const1_rtx, mode),
3994			       XEXP (XEXP (x, 0), 1));
3995
3996      if (GET_CODE (XEXP (x, 0)) == SUBREG
3997	  && subreg_lowpart_p (XEXP (x, 0))
3998	  && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3999	      < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
4000	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
4001	  && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
4002	{
4003	  enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
4004
4005	  x = gen_rtx_ROTATE (inner_mode,
4006			      simplify_gen_unary (NOT, inner_mode, const1_rtx,
4007						  inner_mode),
4008			      XEXP (SUBREG_REG (XEXP (x, 0)), 1));
4009	  return gen_lowpart_for_combine (mode, x);
4010	}
4011
4012      /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
4013	 reversing the comparison code if valid.  */
4014      if (STORE_FLAG_VALUE == -1
4015	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4016	  && (reversed = reversed_comparison (x, mode, XEXP (XEXP (x, 0), 0),
4017					      XEXP (XEXP (x, 0), 1))))
4018	return reversed;
4019
4020      /* (not (ashiftrt foo C)) where C is the number of bits in FOO minus 1
4021	 is (ge foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
4022	 perform the above simplification.  */
4023
4024      if (STORE_FLAG_VALUE == -1
4025	  && GET_CODE (XEXP (x, 0)) == ASHIFTRT
4026	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4027	  && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
4028	return gen_rtx_GE (mode, XEXP (XEXP (x, 0), 0), const0_rtx);
4029
4030      /* Apply De Morgan's laws to reduce number of patterns for machines
4031	 with negating logical insns (and-not, nand, etc.).  If result has
4032	 only one NOT, put it first, since that is how the patterns are
4033	 coded.  */
4034
4035      if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
4036	{
4037	  rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
4038	  enum machine_mode op_mode;
4039
4040	  op_mode = GET_MODE (in1);
4041	  in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
4042
4043	  op_mode = GET_MODE (in2);
4044	  if (op_mode == VOIDmode)
4045	    op_mode = mode;
4046	  in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
4047
4048	  if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
4049	    {
4050	      rtx tem = in2;
4051	      in2 = in1; in1 = tem;
4052	    }
4053
4054	  return gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
4055				 mode, in1, in2);
4056	}
4057      break;
4058
4059    case NEG:
4060      /* (neg (plus X 1)) can become (not X).  */
4061      if (GET_CODE (XEXP (x, 0)) == PLUS
4062	  && XEXP (XEXP (x, 0), 1) == const1_rtx)
4063	return gen_rtx_NOT (mode, XEXP (XEXP (x, 0), 0));
4064
4065      /* Similarly, (neg (not X)) is (plus X 1).  */
4066      if (GET_CODE (XEXP (x, 0)) == NOT)
4067	return plus_constant (XEXP (XEXP (x, 0), 0), 1);
4068
4069      /* (neg (minus X Y)) can become (minus Y X).  This transformation
4070	 isn't safe for modes with signed zeros, since if X and Y are
4071	 both +0, (minus Y X) is the same as (minus X Y).  If the rounding
4072	 mode is towards +infinity (or -infinity) then the two expressions
4073	 will be rounded differently.  */
4074      if (GET_CODE (XEXP (x, 0)) == MINUS
4075	  && !HONOR_SIGNED_ZEROS (mode)
4076	  && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
4077	return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
4078			   XEXP (XEXP (x, 0), 0));
4079
4080      /* (neg (plus A B)) is canonicalized to (minus (neg A) B).  */
4081      if (GET_CODE (XEXP (x, 0)) == PLUS
4082	  && !HONOR_SIGNED_ZEROS (mode)
4083	  && !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
4084	{
4085	  temp = simplify_gen_unary (NEG, mode, XEXP (XEXP (x, 0), 0), mode);
4086	  temp = combine_simplify_rtx (temp, mode, last, in_dest);
4087	  return gen_binary (MINUS, mode, temp, XEXP (XEXP (x, 0), 1));
4088	}
4089
4090      /* (neg (mult A B)) becomes (mult (neg A) B).
4091         This works even for floating-point values.  */
4092      if (GET_CODE (XEXP (x, 0)) == MULT)
4093	{
4094	  temp = simplify_gen_unary (NEG, mode, XEXP (XEXP (x, 0), 0), mode);
4095	  return gen_binary (MULT, mode, temp, XEXP (XEXP (x, 0), 1));
4096	}
4097
4098      /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1.  */
4099      if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
4100	  && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
4101	return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
4102
4103      /* NEG commutes with ASHIFT since it is multiplication.  Only do this
4104	 if we can then eliminate the NEG (e.g.,
4105	 if the operand is a constant).  */
4106
4107      if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4108	{
4109	  temp = simplify_unary_operation (NEG, mode,
4110					   XEXP (XEXP (x, 0), 0), mode);
4111	  if (temp)
4112	    return gen_binary (ASHIFT, mode, temp, XEXP (XEXP (x, 0), 1));
4113	}
4114
4115      temp = expand_compound_operation (XEXP (x, 0));
4116
4117      /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
4118	 replaced by (lshiftrt X C).  This will convert
4119	 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y).  */
4120
4121      if (GET_CODE (temp) == ASHIFTRT
4122	  && GET_CODE (XEXP (temp, 1)) == CONST_INT
4123	  && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
4124	return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
4125				     INTVAL (XEXP (temp, 1)));
4126
4127      /* If X has only a single bit that might be nonzero, say, bit I, convert
4128	 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
4129	 MODE minus 1.  This will convert (neg (zero_extract X 1 Y)) to
4130	 (sign_extract X 1 Y).  But only do this if TEMP isn't a register
4131	 or a SUBREG of one since we'd be making the expression more
4132	 complex if it was just a register.  */
4133
4134      if (GET_CODE (temp) != REG
4135	  && ! (GET_CODE (temp) == SUBREG
4136		&& GET_CODE (SUBREG_REG (temp)) == REG)
4137	  && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
4138	{
4139	  rtx temp1 = simplify_shift_const
4140	    (NULL_RTX, ASHIFTRT, mode,
4141	     simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
4142				   GET_MODE_BITSIZE (mode) - 1 - i),
4143	     GET_MODE_BITSIZE (mode) - 1 - i);
4144
4145	  /* If all we did was surround TEMP with the two shifts, we
4146	     haven't improved anything, so don't use it.  Otherwise,
4147	     we are better off with TEMP1.  */
4148	  if (GET_CODE (temp1) != ASHIFTRT
4149	      || GET_CODE (XEXP (temp1, 0)) != ASHIFT
4150	      || XEXP (XEXP (temp1, 0), 0) != temp)
4151	    return temp1;
4152	}
4153      break;
4154
4155    case TRUNCATE:
4156      /* We can't handle truncation to a partial integer mode here
4157	 because we don't know the real bitsize of the partial
4158	 integer mode.  */
4159      if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
4160	break;
4161
4162      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4163	  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4164				    GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4165	SUBST (XEXP (x, 0),
4166	       force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
4167			      GET_MODE_MASK (mode), NULL_RTX, 0));
4168
4169      /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI.  */
4170      if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4171	   || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4172	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4173	return XEXP (XEXP (x, 0), 0);
4174
4175      /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
4176	 (OP:SI foo:SI) if OP is NEG or ABS.  */
4177      if ((GET_CODE (XEXP (x, 0)) == ABS
4178	   || GET_CODE (XEXP (x, 0)) == NEG)
4179	  && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
4180	      || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
4181	  && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4182	return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4183				   XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4184
4185      /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
4186	 (truncate:SI x).  */
4187      if (GET_CODE (XEXP (x, 0)) == SUBREG
4188	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
4189	  && subreg_lowpart_p (XEXP (x, 0)))
4190	return SUBREG_REG (XEXP (x, 0));
4191
4192      /* If we know that the value is already truncated, we can
4193         replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
4194         is nonzero for the corresponding modes.  But don't do this
4195         for an (LSHIFTRT (MULT ...)) since this will cause problems
4196         with the umulXi3_highpart patterns.  */
4197      if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4198				 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4199	  && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4200	     >= (unsigned int) (GET_MODE_BITSIZE (mode) + 1)
4201	  && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4202		&& GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
4203	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4204
4205      /* A truncate of a comparison can be replaced with a subreg if
4206         STORE_FLAG_VALUE permits.  This is like the previous test,
4207         but it works even if the comparison is done in a mode larger
4208         than HOST_BITS_PER_WIDE_INT.  */
4209      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4210	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4211	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
4212	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4213
4214      /* Similarly, a truncate of a register whose value is a
4215         comparison can be replaced with a subreg if STORE_FLAG_VALUE
4216         permits.  */
4217      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4218	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
4219	  && (temp = get_last_value (XEXP (x, 0)))
4220	  && GET_RTX_CLASS (GET_CODE (temp)) == '<')
4221	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4222
4223      break;
4224
4225    case FLOAT_TRUNCATE:
4226      /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF.  */
4227      if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4228	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4229	return XEXP (XEXP (x, 0), 0);
4230
4231      /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4232	 (OP:SF foo:SF) if OP is NEG or ABS.  */
4233      if ((GET_CODE (XEXP (x, 0)) == ABS
4234	   || GET_CODE (XEXP (x, 0)) == NEG)
4235	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4236	  && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4237	return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4238				   XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4239
4240      /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4241	 is (float_truncate:SF x).  */
4242      if (GET_CODE (XEXP (x, 0)) == SUBREG
4243	  && subreg_lowpart_p (XEXP (x, 0))
4244	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4245	return SUBREG_REG (XEXP (x, 0));
4246      break;
4247
4248#ifdef HAVE_cc0
4249    case COMPARE:
4250      /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4251	 using cc0, in which case we want to leave it as a COMPARE
4252	 so we can distinguish it from a register-register-copy.  */
4253      if (XEXP (x, 1) == const0_rtx)
4254	return XEXP (x, 0);
4255
4256      /* x - 0 is the same as x unless x's mode has signed zeros and
4257	 allows rounding towards -infinity.  Under those conditions,
4258	 0 - 0 is -0.  */
4259      if (!(HONOR_SIGNED_ZEROS (GET_MODE (XEXP (x, 0)))
4260	    && HONOR_SIGN_DEPENDENT_ROUNDING (GET_MODE (XEXP (x, 0))))
4261	  && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4262	return XEXP (x, 0);
4263      break;
4264#endif
4265
4266    case CONST:
4267      /* (const (const X)) can become (const X).  Do it this way rather than
4268	 returning the inner CONST since CONST can be shared with a
4269	 REG_EQUAL note.  */
4270      if (GET_CODE (XEXP (x, 0)) == CONST)
4271	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4272      break;
4273
4274#ifdef HAVE_lo_sum
4275    case LO_SUM:
4276      /* Convert (lo_sum (high FOO) FOO) to FOO.  This is necessary so we
4277	 can add in an offset.  find_split_point will split this address up
4278	 again if it doesn't match.  */
4279      if (GET_CODE (XEXP (x, 0)) == HIGH
4280	  && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4281	return XEXP (x, 1);
4282      break;
4283#endif
4284
4285    case PLUS:
4286      /* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)).
4287       */
4288      if (GET_CODE (XEXP (x, 0)) == MULT
4289	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == NEG)
4290	{
4291	  rtx in1, in2;
4292
4293	  in1 = XEXP (XEXP (XEXP (x, 0), 0), 0);
4294	  in2 = XEXP (XEXP (x, 0), 1);
4295	  return gen_binary (MINUS, mode, XEXP (x, 1),
4296			     gen_binary (MULT, mode, in1, in2));
4297	}
4298
4299      /* If we have (plus (plus (A const) B)), associate it so that CONST is
4300	 outermost.  That's because that's the way indexed addresses are
4301	 supposed to appear.  This code used to check many more cases, but
4302	 they are now checked elsewhere.  */
4303      if (GET_CODE (XEXP (x, 0)) == PLUS
4304	  && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4305	return gen_binary (PLUS, mode,
4306			   gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4307				       XEXP (x, 1)),
4308			   XEXP (XEXP (x, 0), 1));
4309
4310      /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4311	 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4312	 bit-field and can be replaced by either a sign_extend or a
4313	 sign_extract.  The `and' may be a zero_extend and the two
4314	 <c>, -<c> constants may be reversed.  */
4315      if (GET_CODE (XEXP (x, 0)) == XOR
4316	  && GET_CODE (XEXP (x, 1)) == CONST_INT
4317	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4318	  && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
4319	  && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4320	      || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4321	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4322	  && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4323	       && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4324	       && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4325		   == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
4326	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4327		  && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
4328		      == (unsigned int) i + 1))))
4329	return simplify_shift_const
4330	  (NULL_RTX, ASHIFTRT, mode,
4331	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
4332				 XEXP (XEXP (XEXP (x, 0), 0), 0),
4333				 GET_MODE_BITSIZE (mode) - (i + 1)),
4334	   GET_MODE_BITSIZE (mode) - (i + 1));
4335
4336      /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4337	 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4338	 is 1.  This produces better code than the alternative immediately
4339	 below.  */
4340      if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4341	  && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
4342	      || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))
4343	  && (reversed = reversed_comparison (XEXP (x, 0), mode,
4344					      XEXP (XEXP (x, 0), 0),
4345					      XEXP (XEXP (x, 0), 1))))
4346	return
4347	  simplify_gen_unary (NEG, mode, reversed, mode);
4348
4349      /* If only the low-order bit of X is possibly nonzero, (plus x -1)
4350	 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4351	 the bitsize of the mode - 1.  This allows simplification of
4352	 "a = (b & 8) == 0;"  */
4353      if (XEXP (x, 1) == constm1_rtx
4354	  && GET_CODE (XEXP (x, 0)) != REG
4355	  && ! (GET_CODE (XEXP (x,0)) == SUBREG
4356		&& GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
4357	  && nonzero_bits (XEXP (x, 0), mode) == 1)
4358	return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4359	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
4360				 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
4361				 GET_MODE_BITSIZE (mode) - 1),
4362	   GET_MODE_BITSIZE (mode) - 1);
4363
4364      /* If we are adding two things that have no bits in common, convert
4365	 the addition into an IOR.  This will often be further simplified,
4366	 for example in cases like ((a & 1) + (a & 2)), which can
4367	 become a & 3.  */
4368
4369      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4370	  && (nonzero_bits (XEXP (x, 0), mode)
4371	      & nonzero_bits (XEXP (x, 1), mode)) == 0)
4372	{
4373	  /* Try to simplify the expression further.  */
4374	  rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4375	  temp = combine_simplify_rtx (tor, mode, last, in_dest);
4376
4377	  /* If we could, great.  If not, do not go ahead with the IOR
4378	     replacement, since PLUS appears in many special purpose
4379	     address arithmetic instructions.  */
4380	  if (GET_CODE (temp) != CLOBBER && temp != tor)
4381	    return temp;
4382	}
4383      break;
4384
4385    case MINUS:
4386      /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4387	 by reversing the comparison code if valid.  */
4388      if (STORE_FLAG_VALUE == 1
4389	  && XEXP (x, 0) == const1_rtx
4390	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
4391	  && (reversed = reversed_comparison (XEXP (x, 1), mode,
4392					      XEXP (XEXP (x, 1), 0),
4393					      XEXP (XEXP (x, 1), 1))))
4394	return reversed;
4395
4396      /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4397	 (and <foo> (const_int pow2-1))  */
4398      if (GET_CODE (XEXP (x, 1)) == AND
4399	  && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4400	  && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4401	  && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4402	return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4403				       -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4404
4405      /* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A).
4406       */
4407      if (GET_CODE (XEXP (x, 1)) == MULT
4408	  && GET_CODE (XEXP (XEXP (x, 1), 0)) == NEG)
4409	{
4410	  rtx in1, in2;
4411
4412	  in1 = XEXP (XEXP (XEXP (x, 1), 0), 0);
4413	  in2 = XEXP (XEXP (x, 1), 1);
4414	  return gen_binary (PLUS, mode, gen_binary (MULT, mode, in1, in2),
4415			     XEXP (x, 0));
4416	}
4417
4418       /* Canonicalize (minus (neg A) (mult B C)) to
4419	  (minus (mult (neg B) C) A). */
4420      if (GET_CODE (XEXP (x, 1)) == MULT
4421	  && GET_CODE (XEXP (x, 0)) == NEG)
4422	{
4423	  rtx in1, in2;
4424
4425	  in1 = simplify_gen_unary (NEG, mode, XEXP (XEXP (x, 1), 0), mode);
4426	  in2 = XEXP (XEXP (x, 1), 1);
4427	  return gen_binary (MINUS, mode, gen_binary (MULT, mode, in1, in2),
4428			     XEXP (XEXP (x, 0), 0));
4429	}
4430
4431      /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4432	 integers.  */
4433      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
4434	return gen_binary (MINUS, mode,
4435			   gen_binary (MINUS, mode, XEXP (x, 0),
4436				       XEXP (XEXP (x, 1), 0)),
4437			   XEXP (XEXP (x, 1), 1));
4438      break;
4439
4440    case MULT:
4441      /* If we have (mult (plus A B) C), apply the distributive law and then
4442	 the inverse distributive law to see if things simplify.  This
4443	 occurs mostly in addresses, often when unrolling loops.  */
4444
4445      if (GET_CODE (XEXP (x, 0)) == PLUS)
4446	{
4447	  x = apply_distributive_law
4448	    (gen_binary (PLUS, mode,
4449			 gen_binary (MULT, mode,
4450				     XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4451			 gen_binary (MULT, mode,
4452				     XEXP (XEXP (x, 0), 1),
4453				     copy_rtx (XEXP (x, 1)))));
4454
4455	  if (GET_CODE (x) != MULT)
4456	    return x;
4457	}
4458      /* Try simplify a*(b/c) as (a*b)/c.  */
4459      if (FLOAT_MODE_P (mode) && flag_unsafe_math_optimizations
4460	  && GET_CODE (XEXP (x, 0)) == DIV)
4461	{
4462	  rtx tem = simplify_binary_operation (MULT, mode,
4463					       XEXP (XEXP (x, 0), 0),
4464					       XEXP (x, 1));
4465	  if (tem)
4466	    return gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
4467	}
4468      break;
4469
4470    case UDIV:
4471      /* If this is a divide by a power of two, treat it as a shift if
4472	 its first operand is a shift.  */
4473      if (GET_CODE (XEXP (x, 1)) == CONST_INT
4474	  && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4475	  && (GET_CODE (XEXP (x, 0)) == ASHIFT
4476	      || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4477	      || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4478	      || GET_CODE (XEXP (x, 0)) == ROTATE
4479	      || GET_CODE (XEXP (x, 0)) == ROTATERT))
4480	return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4481      break;
4482
4483    case EQ:  case NE:
4484    case GT:  case GTU:  case GE:  case GEU:
4485    case LT:  case LTU:  case LE:  case LEU:
4486    case UNEQ:  case LTGT:
4487    case UNGT:  case UNGE:
4488    case UNLT:  case UNLE:
4489    case UNORDERED: case ORDERED:
4490      /* If the first operand is a condition code, we can't do anything
4491	 with it.  */
4492      if (GET_CODE (XEXP (x, 0)) == COMPARE
4493	  || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4494#ifdef HAVE_cc0
4495	      && XEXP (x, 0) != cc0_rtx
4496#endif
4497	      ))
4498	{
4499	  rtx op0 = XEXP (x, 0);
4500	  rtx op1 = XEXP (x, 1);
4501	  enum rtx_code new_code;
4502
4503	  if (GET_CODE (op0) == COMPARE)
4504	    op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4505
4506	  /* Simplify our comparison, if possible.  */
4507	  new_code = simplify_comparison (code, &op0, &op1);
4508
4509	  /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4510	     if only the low-order bit is possibly nonzero in X (such as when
4511	     X is a ZERO_EXTRACT of one bit).  Similarly, we can convert EQ to
4512	     (xor X 1) or (minus 1 X); we use the former.  Finally, if X is
4513	     known to be either 0 or -1, NE becomes a NEG and EQ becomes
4514	     (plus X 1).
4515
4516	     Remove any ZERO_EXTRACT we made when thinking this was a
4517	     comparison.  It may now be simpler to use, e.g., an AND.  If a
4518	     ZERO_EXTRACT is indeed appropriate, it will be placed back by
4519	     the call to make_compound_operation in the SET case.  */
4520
4521	  if (STORE_FLAG_VALUE == 1
4522	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4523	      && op1 == const0_rtx
4524	      && mode == GET_MODE (op0)
4525	      && nonzero_bits (op0, mode) == 1)
4526	    return gen_lowpart_for_combine (mode,
4527					    expand_compound_operation (op0));
4528
4529	  else if (STORE_FLAG_VALUE == 1
4530		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4531		   && op1 == const0_rtx
4532		   && mode == GET_MODE (op0)
4533		   && (num_sign_bit_copies (op0, mode)
4534		       == GET_MODE_BITSIZE (mode)))
4535	    {
4536	      op0 = expand_compound_operation (op0);
4537	      return simplify_gen_unary (NEG, mode,
4538					 gen_lowpart_for_combine (mode, op0),
4539					 mode);
4540	    }
4541
4542	  else if (STORE_FLAG_VALUE == 1
4543		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4544		   && op1 == const0_rtx
4545		   && mode == GET_MODE (op0)
4546		   && nonzero_bits (op0, mode) == 1)
4547	    {
4548	      op0 = expand_compound_operation (op0);
4549	      return gen_binary (XOR, mode,
4550				 gen_lowpart_for_combine (mode, op0),
4551				 const1_rtx);
4552	    }
4553
4554	  else if (STORE_FLAG_VALUE == 1
4555		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4556		   && op1 == const0_rtx
4557		   && mode == GET_MODE (op0)
4558		   && (num_sign_bit_copies (op0, mode)
4559		       == GET_MODE_BITSIZE (mode)))
4560	    {
4561	      op0 = expand_compound_operation (op0);
4562	      return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
4563	    }
4564
4565	  /* If STORE_FLAG_VALUE is -1, we have cases similar to
4566	     those above.  */
4567	  if (STORE_FLAG_VALUE == -1
4568	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4569	      && op1 == const0_rtx
4570	      && (num_sign_bit_copies (op0, mode)
4571		  == GET_MODE_BITSIZE (mode)))
4572	    return gen_lowpart_for_combine (mode,
4573					    expand_compound_operation (op0));
4574
4575	  else if (STORE_FLAG_VALUE == -1
4576		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4577		   && op1 == const0_rtx
4578		   && mode == GET_MODE (op0)
4579		   && nonzero_bits (op0, mode) == 1)
4580	    {
4581	      op0 = expand_compound_operation (op0);
4582	      return simplify_gen_unary (NEG, mode,
4583					 gen_lowpart_for_combine (mode, op0),
4584					 mode);
4585	    }
4586
4587	  else if (STORE_FLAG_VALUE == -1
4588		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4589		   && op1 == const0_rtx
4590		   && mode == GET_MODE (op0)
4591		   && (num_sign_bit_copies (op0, mode)
4592		       == GET_MODE_BITSIZE (mode)))
4593	    {
4594	      op0 = expand_compound_operation (op0);
4595	      return simplify_gen_unary (NOT, mode,
4596					 gen_lowpart_for_combine (mode, op0),
4597					 mode);
4598	    }
4599
4600	  /* If X is 0/1, (eq X 0) is X-1.  */
4601	  else if (STORE_FLAG_VALUE == -1
4602		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4603		   && op1 == const0_rtx
4604		   && mode == GET_MODE (op0)
4605		   && nonzero_bits (op0, mode) == 1)
4606	    {
4607	      op0 = expand_compound_operation (op0);
4608	      return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
4609	    }
4610
4611	  /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4612	     one bit that might be nonzero, we can convert (ne x 0) to
4613	     (ashift x c) where C puts the bit in the sign bit.  Remove any
4614	     AND with STORE_FLAG_VALUE when we are done, since we are only
4615	     going to test the sign bit.  */
4616	  if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4617	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4618	      && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4619		  == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE(mode)-1))
4620	      && op1 == const0_rtx
4621	      && mode == GET_MODE (op0)
4622	      && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4623	    {
4624	      x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4625					expand_compound_operation (op0),
4626					GET_MODE_BITSIZE (mode) - 1 - i);
4627	      if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4628		return XEXP (x, 0);
4629	      else
4630		return x;
4631	    }
4632
4633	  /* If the code changed, return a whole new comparison.  */
4634	  if (new_code != code)
4635	    return gen_rtx_fmt_ee (new_code, mode, op0, op1);
4636
4637	  /* Otherwise, keep this operation, but maybe change its operands.
4638	     This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR).  */
4639	  SUBST (XEXP (x, 0), op0);
4640	  SUBST (XEXP (x, 1), op1);
4641	}
4642      break;
4643
4644    case IF_THEN_ELSE:
4645      return simplify_if_then_else (x);
4646
4647    case ZERO_EXTRACT:
4648    case SIGN_EXTRACT:
4649    case ZERO_EXTEND:
4650    case SIGN_EXTEND:
4651      /* If we are processing SET_DEST, we are done.  */
4652      if (in_dest)
4653	return x;
4654
4655      return expand_compound_operation (x);
4656
4657    case SET:
4658      return simplify_set (x);
4659
4660    case AND:
4661    case IOR:
4662    case XOR:
4663      return simplify_logical (x, last);
4664
4665    case ABS:
4666      /* (abs (neg <foo>)) -> (abs <foo>) */
4667      if (GET_CODE (XEXP (x, 0)) == NEG)
4668	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4669
4670      /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4671         do nothing.  */
4672      if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4673	break;
4674
4675      /* If operand is something known to be positive, ignore the ABS.  */
4676      if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4677	  || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4678	       <= HOST_BITS_PER_WIDE_INT)
4679	      && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4680		   & ((HOST_WIDE_INT) 1
4681		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4682		  == 0)))
4683	return XEXP (x, 0);
4684
4685      /* If operand is known to be only -1 or 0, convert ABS to NEG.  */
4686      if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4687	return gen_rtx_NEG (mode, XEXP (x, 0));
4688
4689      break;
4690
4691    case FFS:
4692      /* (ffs (*_extend <X>)) = (ffs <X>) */
4693      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4694	  || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4695	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4696      break;
4697
4698    case FLOAT:
4699      /* (float (sign_extend <X>)) = (float <X>).  */
4700      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4701	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4702      break;
4703
4704    case ASHIFT:
4705    case LSHIFTRT:
4706    case ASHIFTRT:
4707    case ROTATE:
4708    case ROTATERT:
4709      /* If this is a shift by a constant amount, simplify it.  */
4710      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4711	return simplify_shift_const (x, code, mode, XEXP (x, 0),
4712				     INTVAL (XEXP (x, 1)));
4713
4714#ifdef SHIFT_COUNT_TRUNCATED
4715      else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4716	SUBST (XEXP (x, 1),
4717	       force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
4718			      ((HOST_WIDE_INT) 1
4719			       << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4720			      - 1,
4721			      NULL_RTX, 0));
4722#endif
4723
4724      break;
4725
4726    case VEC_SELECT:
4727      {
4728	rtx op0 = XEXP (x, 0);
4729	rtx op1 = XEXP (x, 1);
4730	int len;
4731
4732	if (GET_CODE (op1) != PARALLEL)
4733	  abort ();
4734	len = XVECLEN (op1, 0);
4735	if (len == 1
4736	    && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
4737	    && GET_CODE (op0) == VEC_CONCAT)
4738	  {
4739	    int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
4740
4741	    /* Try to find the element in the VEC_CONCAT.  */
4742	    for (;;)
4743	      {
4744		if (GET_MODE (op0) == GET_MODE (x))
4745		  return op0;
4746		if (GET_CODE (op0) == VEC_CONCAT)
4747		  {
4748		    HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
4749		    if (op0_size < offset)
4750		      op0 = XEXP (op0, 0);
4751		    else
4752		      {
4753			offset -= op0_size;
4754			op0 = XEXP (op0, 1);
4755		      }
4756		  }
4757		else
4758		  break;
4759	      }
4760	  }
4761      }
4762
4763      break;
4764
4765    default:
4766      break;
4767    }
4768
4769  return x;
4770}
4771
4772/* Simplify X, an IF_THEN_ELSE expression.  Return the new expression.  */
4773
4774static rtx
4775simplify_if_then_else (x)
4776     rtx x;
4777{
4778  enum machine_mode mode = GET_MODE (x);
4779  rtx cond = XEXP (x, 0);
4780  rtx true_rtx = XEXP (x, 1);
4781  rtx false_rtx = XEXP (x, 2);
4782  enum rtx_code true_code = GET_CODE (cond);
4783  int comparison_p = GET_RTX_CLASS (true_code) == '<';
4784  rtx temp;
4785  int i;
4786  enum rtx_code false_code;
4787  rtx reversed;
4788
4789  /* Simplify storing of the truth value.  */
4790  if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
4791    return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4792
4793  /* Also when the truth value has to be reversed.  */
4794  if (comparison_p
4795      && true_rtx == const0_rtx && false_rtx == const_true_rtx
4796      && (reversed = reversed_comparison (cond, mode, XEXP (cond, 0),
4797					  XEXP (cond, 1))))
4798    return reversed;
4799
4800  /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4801     in it is being compared against certain values.  Get the true and false
4802     comparisons and see if that says anything about the value of each arm.  */
4803
4804  if (comparison_p
4805      && ((false_code = combine_reversed_comparison_code (cond))
4806	  != UNKNOWN)
4807      && GET_CODE (XEXP (cond, 0)) == REG)
4808    {
4809      HOST_WIDE_INT nzb;
4810      rtx from = XEXP (cond, 0);
4811      rtx true_val = XEXP (cond, 1);
4812      rtx false_val = true_val;
4813      int swapped = 0;
4814
4815      /* If FALSE_CODE is EQ, swap the codes and arms.  */
4816
4817      if (false_code == EQ)
4818	{
4819	  swapped = 1, true_code = EQ, false_code = NE;
4820	  temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4821	}
4822
4823      /* If we are comparing against zero and the expression being tested has
4824	 only a single bit that might be nonzero, that is its value when it is
4825	 not equal to zero.  Similarly if it is known to be -1 or 0.  */
4826
4827      if (true_code == EQ && true_val == const0_rtx
4828	  && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4829	false_code = EQ, false_val = GEN_INT (nzb);
4830      else if (true_code == EQ && true_val == const0_rtx
4831	       && (num_sign_bit_copies (from, GET_MODE (from))
4832		   == GET_MODE_BITSIZE (GET_MODE (from))))
4833	false_code = EQ, false_val = constm1_rtx;
4834
4835      /* Now simplify an arm if we know the value of the register in the
4836	 branch and it is used in the arm.  Be careful due to the potential
4837	 of locally-shared RTL.  */
4838
4839      if (reg_mentioned_p (from, true_rtx))
4840	true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
4841				      from, true_val),
4842		      pc_rtx, pc_rtx, 0, 0);
4843      if (reg_mentioned_p (from, false_rtx))
4844	false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
4845				   from, false_val),
4846		       pc_rtx, pc_rtx, 0, 0);
4847
4848      SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
4849      SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
4850
4851      true_rtx = XEXP (x, 1);
4852      false_rtx = XEXP (x, 2);
4853      true_code = GET_CODE (cond);
4854    }
4855
4856  /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4857     reversed, do so to avoid needing two sets of patterns for
4858     subtract-and-branch insns.  Similarly if we have a constant in the true
4859     arm, the false arm is the same as the first operand of the comparison, or
4860     the false arm is more complicated than the true arm.  */
4861
4862  if (comparison_p
4863      && combine_reversed_comparison_code (cond) != UNKNOWN
4864      && (true_rtx == pc_rtx
4865	  || (CONSTANT_P (true_rtx)
4866	      && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx)
4867	  || true_rtx == const0_rtx
4868	  || (GET_RTX_CLASS (GET_CODE (true_rtx)) == 'o'
4869	      && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4870	  || (GET_CODE (true_rtx) == SUBREG
4871	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true_rtx))) == 'o'
4872	      && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4873	  || reg_mentioned_p (true_rtx, false_rtx)
4874	  || rtx_equal_p (false_rtx, XEXP (cond, 0))))
4875    {
4876      true_code = reversed_comparison_code (cond, NULL);
4877      SUBST (XEXP (x, 0),
4878	     reversed_comparison (cond, GET_MODE (cond), XEXP (cond, 0),
4879				  XEXP (cond, 1)));
4880
4881      SUBST (XEXP (x, 1), false_rtx);
4882      SUBST (XEXP (x, 2), true_rtx);
4883
4884      temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4885      cond = XEXP (x, 0);
4886
4887      /* It is possible that the conditional has been simplified out.  */
4888      true_code = GET_CODE (cond);
4889      comparison_p = GET_RTX_CLASS (true_code) == '<';
4890    }
4891
4892  /* If the two arms are identical, we don't need the comparison.  */
4893
4894  if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
4895    return true_rtx;
4896
4897  /* Convert a == b ? b : a to "a".  */
4898  if (true_code == EQ && ! side_effects_p (cond)
4899      && !HONOR_NANS (mode)
4900      && rtx_equal_p (XEXP (cond, 0), false_rtx)
4901      && rtx_equal_p (XEXP (cond, 1), true_rtx))
4902    return false_rtx;
4903  else if (true_code == NE && ! side_effects_p (cond)
4904	   && !HONOR_NANS (mode)
4905	   && rtx_equal_p (XEXP (cond, 0), true_rtx)
4906	   && rtx_equal_p (XEXP (cond, 1), false_rtx))
4907    return true_rtx;
4908
4909  /* Look for cases where we have (abs x) or (neg (abs X)).  */
4910
4911  if (GET_MODE_CLASS (mode) == MODE_INT
4912      && GET_CODE (false_rtx) == NEG
4913      && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
4914      && comparison_p
4915      && rtx_equal_p (true_rtx, XEXP (cond, 0))
4916      && ! side_effects_p (true_rtx))
4917    switch (true_code)
4918      {
4919      case GT:
4920      case GE:
4921	return simplify_gen_unary (ABS, mode, true_rtx, mode);
4922      case LT:
4923      case LE:
4924	return
4925	  simplify_gen_unary (NEG, mode,
4926			      simplify_gen_unary (ABS, mode, true_rtx, mode),
4927			      mode);
4928      default:
4929	break;
4930      }
4931
4932  /* Look for MIN or MAX.  */
4933
4934  if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
4935      && comparison_p
4936      && rtx_equal_p (XEXP (cond, 0), true_rtx)
4937      && rtx_equal_p (XEXP (cond, 1), false_rtx)
4938      && ! side_effects_p (cond))
4939    switch (true_code)
4940      {
4941      case GE:
4942      case GT:
4943	return gen_binary (SMAX, mode, true_rtx, false_rtx);
4944      case LE:
4945      case LT:
4946	return gen_binary (SMIN, mode, true_rtx, false_rtx);
4947      case GEU:
4948      case GTU:
4949	return gen_binary (UMAX, mode, true_rtx, false_rtx);
4950      case LEU:
4951      case LTU:
4952	return gen_binary (UMIN, mode, true_rtx, false_rtx);
4953      default:
4954	break;
4955      }
4956
4957  /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4958     second operand is zero, this can be done as (OP Z (mult COND C2)) where
4959     C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4960     SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4961     We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4962     neither 1 or -1, but it isn't worth checking for.  */
4963
4964  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4965      && comparison_p
4966      && GET_MODE_CLASS (mode) == MODE_INT
4967      && ! side_effects_p (x))
4968    {
4969      rtx t = make_compound_operation (true_rtx, SET);
4970      rtx f = make_compound_operation (false_rtx, SET);
4971      rtx cond_op0 = XEXP (cond, 0);
4972      rtx cond_op1 = XEXP (cond, 1);
4973      enum rtx_code op = NIL, extend_op = NIL;
4974      enum machine_mode m = mode;
4975      rtx z = 0, c1 = NULL_RTX;
4976
4977      if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4978	   || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4979	   || GET_CODE (t) == ASHIFT
4980	   || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4981	  && rtx_equal_p (XEXP (t, 0), f))
4982	c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4983
4984      /* If an identity-zero op is commutative, check whether there
4985	 would be a match if we swapped the operands.  */
4986      else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4987		|| GET_CODE (t) == XOR)
4988	       && rtx_equal_p (XEXP (t, 1), f))
4989	c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4990      else if (GET_CODE (t) == SIGN_EXTEND
4991	       && (GET_CODE (XEXP (t, 0)) == PLUS
4992		   || GET_CODE (XEXP (t, 0)) == MINUS
4993		   || GET_CODE (XEXP (t, 0)) == IOR
4994		   || GET_CODE (XEXP (t, 0)) == XOR
4995		   || GET_CODE (XEXP (t, 0)) == ASHIFT
4996		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4997		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4998	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4999	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5000	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5001	       && (num_sign_bit_copies (f, GET_MODE (f))
5002		   > (unsigned int)
5003		     (GET_MODE_BITSIZE (mode)
5004		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
5005	{
5006	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5007	  extend_op = SIGN_EXTEND;
5008	  m = GET_MODE (XEXP (t, 0));
5009	}
5010      else if (GET_CODE (t) == SIGN_EXTEND
5011	       && (GET_CODE (XEXP (t, 0)) == PLUS
5012		   || GET_CODE (XEXP (t, 0)) == IOR
5013		   || GET_CODE (XEXP (t, 0)) == XOR)
5014	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5015	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5016	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5017	       && (num_sign_bit_copies (f, GET_MODE (f))
5018		   > (unsigned int)
5019		     (GET_MODE_BITSIZE (mode)
5020		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
5021	{
5022	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5023	  extend_op = SIGN_EXTEND;
5024	  m = GET_MODE (XEXP (t, 0));
5025	}
5026      else if (GET_CODE (t) == ZERO_EXTEND
5027	       && (GET_CODE (XEXP (t, 0)) == PLUS
5028		   || GET_CODE (XEXP (t, 0)) == MINUS
5029		   || GET_CODE (XEXP (t, 0)) == IOR
5030		   || GET_CODE (XEXP (t, 0)) == XOR
5031		   || GET_CODE (XEXP (t, 0)) == ASHIFT
5032		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5033		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5034	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5035	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5036	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5037	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5038	       && ((nonzero_bits (f, GET_MODE (f))
5039		    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
5040		   == 0))
5041	{
5042	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5043	  extend_op = ZERO_EXTEND;
5044	  m = GET_MODE (XEXP (t, 0));
5045	}
5046      else if (GET_CODE (t) == ZERO_EXTEND
5047	       && (GET_CODE (XEXP (t, 0)) == PLUS
5048		   || GET_CODE (XEXP (t, 0)) == IOR
5049		   || GET_CODE (XEXP (t, 0)) == XOR)
5050	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5051	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5052	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5053	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5054	       && ((nonzero_bits (f, GET_MODE (f))
5055		    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
5056		   == 0))
5057	{
5058	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5059	  extend_op = ZERO_EXTEND;
5060	  m = GET_MODE (XEXP (t, 0));
5061	}
5062
5063      if (z)
5064	{
5065	  temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
5066			pc_rtx, pc_rtx, 0, 0);
5067	  temp = gen_binary (MULT, m, temp,
5068			     gen_binary (MULT, m, c1, const_true_rtx));
5069	  temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
5070	  temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
5071
5072	  if (extend_op != NIL)
5073	    temp = simplify_gen_unary (extend_op, mode, temp, m);
5074
5075	  return temp;
5076	}
5077    }
5078
5079  /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
5080     1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
5081     negation of a single bit, we can convert this operation to a shift.  We
5082     can actually do this more generally, but it doesn't seem worth it.  */
5083
5084  if (true_code == NE && XEXP (cond, 1) == const0_rtx
5085      && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
5086      && ((1 == nonzero_bits (XEXP (cond, 0), mode)
5087	   && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
5088	  || ((num_sign_bit_copies (XEXP (cond, 0), mode)
5089	       == GET_MODE_BITSIZE (mode))
5090	      && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
5091    return
5092      simplify_shift_const (NULL_RTX, ASHIFT, mode,
5093			    gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
5094
5095  return x;
5096}
5097
5098/* Simplify X, a SET expression.  Return the new expression.  */
5099
5100static rtx
5101simplify_set (x)
5102     rtx x;
5103{
5104  rtx src = SET_SRC (x);
5105  rtx dest = SET_DEST (x);
5106  enum machine_mode mode
5107    = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
5108  rtx other_insn;
5109  rtx *cc_use;
5110
5111  /* (set (pc) (return)) gets written as (return).  */
5112  if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
5113    return src;
5114
5115  /* Now that we know for sure which bits of SRC we are using, see if we can
5116     simplify the expression for the object knowing that we only need the
5117     low-order bits.  */
5118
5119  if (GET_MODE_CLASS (mode) == MODE_INT
5120      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5121    {
5122      src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
5123      SUBST (SET_SRC (x), src);
5124    }
5125
5126  /* If we are setting CC0 or if the source is a COMPARE, look for the use of
5127     the comparison result and try to simplify it unless we already have used
5128     undobuf.other_insn.  */
5129  if ((GET_MODE_CLASS (mode) == MODE_CC
5130       || GET_CODE (src) == COMPARE
5131       || CC0_P (dest))
5132      && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
5133      && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
5134      && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
5135      && rtx_equal_p (XEXP (*cc_use, 0), dest))
5136    {
5137      enum rtx_code old_code = GET_CODE (*cc_use);
5138      enum rtx_code new_code;
5139      rtx op0, op1, tmp;
5140      int other_changed = 0;
5141      enum machine_mode compare_mode = GET_MODE (dest);
5142      enum machine_mode tmp_mode;
5143
5144      if (GET_CODE (src) == COMPARE)
5145	op0 = XEXP (src, 0), op1 = XEXP (src, 1);
5146      else
5147	op0 = src, op1 = const0_rtx;
5148
5149      /* Check whether the comparison is known at compile time.  */
5150      if (GET_MODE (op0) != VOIDmode)
5151	tmp_mode = GET_MODE (op0);
5152      else if (GET_MODE (op1) != VOIDmode)
5153	tmp_mode = GET_MODE (op1);
5154      else
5155	tmp_mode = compare_mode;
5156      tmp = simplify_relational_operation (old_code, tmp_mode, op0, op1);
5157      if (tmp != NULL_RTX)
5158	{
5159	  rtx pat = PATTERN (other_insn);
5160	  undobuf.other_insn = other_insn;
5161	  SUBST (*cc_use, tmp);
5162
5163	  /* Attempt to simplify CC user.  */
5164	  if (GET_CODE (pat) == SET)
5165	    {
5166	      rtx new = simplify_rtx (SET_SRC (pat));
5167	      if (new != NULL_RTX)
5168		SUBST (SET_SRC (pat), new);
5169	    }
5170
5171	  /* Convert X into a no-op move.  */
5172	  SUBST (SET_DEST (x), pc_rtx);
5173	  SUBST (SET_SRC (x), pc_rtx);
5174	  return x;
5175	}
5176
5177      /* Simplify our comparison, if possible.  */
5178      new_code = simplify_comparison (old_code, &op0, &op1);
5179
5180#ifdef EXTRA_CC_MODES
5181      /* If this machine has CC modes other than CCmode, check to see if we
5182	 need to use a different CC mode here.  */
5183      compare_mode = SELECT_CC_MODE (new_code, op0, op1);
5184#endif /* EXTRA_CC_MODES */
5185
5186#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
5187      /* If the mode changed, we have to change SET_DEST, the mode in the
5188	 compare, and the mode in the place SET_DEST is used.  If SET_DEST is
5189	 a hard register, just build new versions with the proper mode.  If it
5190	 is a pseudo, we lose unless it is only time we set the pseudo, in
5191	 which case we can safely change its mode.  */
5192      if (compare_mode != GET_MODE (dest))
5193	{
5194	  unsigned int regno = REGNO (dest);
5195	  rtx new_dest = gen_rtx_REG (compare_mode, regno);
5196
5197	  if (regno < FIRST_PSEUDO_REGISTER
5198	      || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
5199	    {
5200	      if (regno >= FIRST_PSEUDO_REGISTER)
5201		SUBST (regno_reg_rtx[regno], new_dest);
5202
5203	      SUBST (SET_DEST (x), new_dest);
5204	      SUBST (XEXP (*cc_use, 0), new_dest);
5205	      other_changed = 1;
5206
5207	      dest = new_dest;
5208	    }
5209	}
5210#endif
5211
5212      /* If the code changed, we have to build a new comparison in
5213	 undobuf.other_insn.  */
5214      if (new_code != old_code)
5215	{
5216	  unsigned HOST_WIDE_INT mask;
5217
5218	  SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
5219					  dest, const0_rtx));
5220
5221	  /* If the only change we made was to change an EQ into an NE or
5222	     vice versa, OP0 has only one bit that might be nonzero, and OP1
5223	     is zero, check if changing the user of the condition code will
5224	     produce a valid insn.  If it won't, we can keep the original code
5225	     in that insn by surrounding our operation with an XOR.  */
5226
5227	  if (((old_code == NE && new_code == EQ)
5228	       || (old_code == EQ && new_code == NE))
5229	      && ! other_changed && op1 == const0_rtx
5230	      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
5231	      && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
5232	    {
5233	      rtx pat = PATTERN (other_insn), note = 0;
5234
5235	      if ((recog_for_combine (&pat, other_insn, &note) < 0
5236		   && ! check_asm_operands (pat)))
5237		{
5238		  PUT_CODE (*cc_use, old_code);
5239		  other_insn = 0;
5240
5241		  op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
5242		}
5243	    }
5244
5245	  other_changed = 1;
5246	}
5247
5248      if (other_changed)
5249	undobuf.other_insn = other_insn;
5250
5251#ifdef HAVE_cc0
5252      /* If we are now comparing against zero, change our source if
5253	 needed.  If we do not use cc0, we always have a COMPARE.  */
5254      if (op1 == const0_rtx && dest == cc0_rtx)
5255	{
5256	  SUBST (SET_SRC (x), op0);
5257	  src = op0;
5258	}
5259      else
5260#endif
5261
5262      /* Otherwise, if we didn't previously have a COMPARE in the
5263	 correct mode, we need one.  */
5264      if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5265	{
5266	  SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
5267	  src = SET_SRC (x);
5268	}
5269      else
5270	{
5271	  /* Otherwise, update the COMPARE if needed.  */
5272	  SUBST (XEXP (src, 0), op0);
5273	  SUBST (XEXP (src, 1), op1);
5274	}
5275    }
5276  else
5277    {
5278      /* Get SET_SRC in a form where we have placed back any
5279	 compound expressions.  Then do the checks below.  */
5280      src = make_compound_operation (src, SET);
5281      SUBST (SET_SRC (x), src);
5282    }
5283
5284  /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5285     and X being a REG or (subreg (reg)), we may be able to convert this to
5286     (set (subreg:m2 x) (op)).
5287
5288     We can always do this if M1 is narrower than M2 because that means that
5289     we only care about the low bits of the result.
5290
5291     However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
5292     perform a narrower operation than requested since the high-order bits will
5293     be undefined.  On machine where it is defined, this transformation is safe
5294     as long as M1 and M2 have the same number of words.  */
5295
5296  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5297      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
5298      && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5299	   / UNITS_PER_WORD)
5300	  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5301	       + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
5302#ifndef WORD_REGISTER_OPERATIONS
5303      && (GET_MODE_SIZE (GET_MODE (src))
5304	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5305#endif
5306#ifdef CANNOT_CHANGE_MODE_CLASS
5307      && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
5308	    && REG_CANNOT_CHANGE_MODE_P (REGNO (dest),
5309					 GET_MODE (SUBREG_REG (src)),
5310					 GET_MODE (src)))
5311#endif
5312      && (GET_CODE (dest) == REG
5313	  || (GET_CODE (dest) == SUBREG
5314	      && GET_CODE (SUBREG_REG (dest)) == REG)))
5315    {
5316      SUBST (SET_DEST (x),
5317	     gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
5318				      dest));
5319      SUBST (SET_SRC (x), SUBREG_REG (src));
5320
5321      src = SET_SRC (x), dest = SET_DEST (x);
5322    }
5323
5324#ifdef HAVE_cc0
5325  /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
5326     in SRC.  */
5327  if (dest == cc0_rtx
5328      && GET_CODE (src) == SUBREG
5329      && subreg_lowpart_p (src)
5330      && (GET_MODE_BITSIZE (GET_MODE (src))
5331	  < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src)))))
5332    {
5333      rtx inner = SUBREG_REG (src);
5334      enum machine_mode inner_mode = GET_MODE (inner);
5335
5336      /* Here we make sure that we don't have a sign bit on.  */
5337      if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT
5338	  && (nonzero_bits (inner, inner_mode)
5339	      < ((unsigned HOST_WIDE_INT) 1
5340		 << (GET_MODE_BITSIZE (GET_MODE (src)) - 1))))
5341	{
5342	  SUBST (SET_SRC (x), inner);
5343	  src = SET_SRC (x);
5344	}
5345    }
5346#endif
5347
5348#ifdef LOAD_EXTEND_OP
5349  /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5350     would require a paradoxical subreg.  Replace the subreg with a
5351     zero_extend to avoid the reload that would otherwise be required.  */
5352
5353  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5354      && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
5355      && SUBREG_BYTE (src) == 0
5356      && (GET_MODE_SIZE (GET_MODE (src))
5357	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5358      && GET_CODE (SUBREG_REG (src)) == MEM)
5359    {
5360      SUBST (SET_SRC (x),
5361	     gen_rtx (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
5362		      GET_MODE (src), SUBREG_REG (src)));
5363
5364      src = SET_SRC (x);
5365    }
5366#endif
5367
5368  /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5369     are comparing an item known to be 0 or -1 against 0, use a logical
5370     operation instead. Check for one of the arms being an IOR of the other
5371     arm with some value.  We compute three terms to be IOR'ed together.  In
5372     practice, at most two will be nonzero.  Then we do the IOR's.  */
5373
5374  if (GET_CODE (dest) != PC
5375      && GET_CODE (src) == IF_THEN_ELSE
5376      && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
5377      && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5378      && XEXP (XEXP (src, 0), 1) == const0_rtx
5379      && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
5380#ifdef HAVE_conditional_move
5381      && ! can_conditionally_move_p (GET_MODE (src))
5382#endif
5383      && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5384			       GET_MODE (XEXP (XEXP (src, 0), 0)))
5385	  == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5386      && ! side_effects_p (src))
5387    {
5388      rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
5389		      ? XEXP (src, 1) : XEXP (src, 2));
5390      rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
5391		   ? XEXP (src, 2) : XEXP (src, 1));
5392      rtx term1 = const0_rtx, term2, term3;
5393
5394      if (GET_CODE (true_rtx) == IOR
5395	  && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
5396	term1 = false_rtx, true_rtx = XEXP(true_rtx, 1), false_rtx = const0_rtx;
5397      else if (GET_CODE (true_rtx) == IOR
5398	       && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
5399	term1 = false_rtx, true_rtx = XEXP(true_rtx, 0), false_rtx = const0_rtx;
5400      else if (GET_CODE (false_rtx) == IOR
5401	       && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
5402	term1 = true_rtx, false_rtx = XEXP(false_rtx, 1), true_rtx = const0_rtx;
5403      else if (GET_CODE (false_rtx) == IOR
5404	       && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
5405	term1 = true_rtx, false_rtx = XEXP(false_rtx, 0), true_rtx = const0_rtx;
5406
5407      term2 = gen_binary (AND, GET_MODE (src),
5408			  XEXP (XEXP (src, 0), 0), true_rtx);
5409      term3 = gen_binary (AND, GET_MODE (src),
5410			  simplify_gen_unary (NOT, GET_MODE (src),
5411					      XEXP (XEXP (src, 0), 0),
5412					      GET_MODE (src)),
5413			  false_rtx);
5414
5415      SUBST (SET_SRC (x),
5416	     gen_binary (IOR, GET_MODE (src),
5417			 gen_binary (IOR, GET_MODE (src), term1, term2),
5418			 term3));
5419
5420      src = SET_SRC (x);
5421    }
5422
5423  /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5424     whole thing fail.  */
5425  if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5426    return src;
5427  else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5428    return dest;
5429  else
5430    /* Convert this into a field assignment operation, if possible.  */
5431    return make_field_assignment (x);
5432}
5433
5434/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5435   result.  LAST is nonzero if this is the last retry.  */
5436
5437static rtx
5438simplify_logical (x, last)
5439     rtx x;
5440     int last;
5441{
5442  enum machine_mode mode = GET_MODE (x);
5443  rtx op0 = XEXP (x, 0);
5444  rtx op1 = XEXP (x, 1);
5445  rtx reversed;
5446
5447  switch (GET_CODE (x))
5448    {
5449    case AND:
5450      /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
5451	 insn (and may simplify more).  */
5452      if (GET_CODE (op0) == XOR
5453	  && rtx_equal_p (XEXP (op0, 0), op1)
5454	  && ! side_effects_p (op1))
5455	x = gen_binary (AND, mode,
5456			simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5457			op1);
5458
5459      if (GET_CODE (op0) == XOR
5460	  && rtx_equal_p (XEXP (op0, 1), op1)
5461	  && ! side_effects_p (op1))
5462	x = gen_binary (AND, mode,
5463			simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5464			op1);
5465
5466      /* Similarly for (~(A ^ B)) & A.  */
5467      if (GET_CODE (op0) == NOT
5468	  && GET_CODE (XEXP (op0, 0)) == XOR
5469	  && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5470	  && ! side_effects_p (op1))
5471	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5472
5473      if (GET_CODE (op0) == NOT
5474	  && GET_CODE (XEXP (op0, 0)) == XOR
5475	  && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5476	  && ! side_effects_p (op1))
5477	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5478
5479      /* We can call simplify_and_const_int only if we don't lose
5480	 any (sign) bits when converting INTVAL (op1) to
5481	 "unsigned HOST_WIDE_INT".  */
5482      if (GET_CODE (op1) == CONST_INT
5483	  && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5484	      || INTVAL (op1) > 0))
5485	{
5486	  x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
5487
5488	  /* If we have (ior (and (X C1) C2)) and the next restart would be
5489	     the last, simplify this by making C1 as small as possible
5490	     and then exit.  */
5491	  if (last
5492	      && GET_CODE (x) == IOR && GET_CODE (op0) == AND
5493	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
5494	      && GET_CODE (op1) == CONST_INT)
5495	    return gen_binary (IOR, mode,
5496			       gen_binary (AND, mode, XEXP (op0, 0),
5497					   GEN_INT (INTVAL (XEXP (op0, 1))
5498						    & ~INTVAL (op1))), op1);
5499
5500	  if (GET_CODE (x) != AND)
5501	    return x;
5502
5503	  if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
5504	      || GET_RTX_CLASS (GET_CODE (x)) == '2')
5505	    op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5506	}
5507
5508      /* Convert (A | B) & A to A.  */
5509      if (GET_CODE (op0) == IOR
5510	  && (rtx_equal_p (XEXP (op0, 0), op1)
5511	      || rtx_equal_p (XEXP (op0, 1), op1))
5512	  && ! side_effects_p (XEXP (op0, 0))
5513	  && ! side_effects_p (XEXP (op0, 1)))
5514	return op1;
5515
5516      /* In the following group of tests (and those in case IOR below),
5517	 we start with some combination of logical operations and apply
5518	 the distributive law followed by the inverse distributive law.
5519	 Most of the time, this results in no change.  However, if some of
5520	 the operands are the same or inverses of each other, simplifications
5521	 will result.
5522
5523	 For example, (and (ior A B) (not B)) can occur as the result of
5524	 expanding a bit field assignment.  When we apply the distributive
5525	 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
5526	 which then simplifies to (and (A (not B))).
5527
5528	 If we have (and (ior A B) C), apply the distributive law and then
5529	 the inverse distributive law to see if things simplify.  */
5530
5531      if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
5532	{
5533	  x = apply_distributive_law
5534	    (gen_binary (GET_CODE (op0), mode,
5535			 gen_binary (AND, mode, XEXP (op0, 0), op1),
5536			 gen_binary (AND, mode, XEXP (op0, 1),
5537				     copy_rtx (op1))));
5538	  if (GET_CODE (x) != AND)
5539	    return x;
5540	}
5541
5542      if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5543	return apply_distributive_law
5544	  (gen_binary (GET_CODE (op1), mode,
5545		       gen_binary (AND, mode, XEXP (op1, 0), op0),
5546		       gen_binary (AND, mode, XEXP (op1, 1),
5547				   copy_rtx (op0))));
5548
5549      /* Similarly, taking advantage of the fact that
5550	 (and (not A) (xor B C)) == (xor (ior A B) (ior A C))  */
5551
5552      if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
5553	return apply_distributive_law
5554	  (gen_binary (XOR, mode,
5555		       gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
5556		       gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)),
5557				   XEXP (op1, 1))));
5558
5559      else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
5560	return apply_distributive_law
5561	  (gen_binary (XOR, mode,
5562		       gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
5563		       gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1))));
5564      break;
5565
5566    case IOR:
5567      /* (ior A C) is C if all bits of A that might be nonzero are on in C.  */
5568      if (GET_CODE (op1) == CONST_INT
5569	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5570	  && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
5571	return op1;
5572
5573      /* Convert (A & B) | A to A.  */
5574      if (GET_CODE (op0) == AND
5575	  && (rtx_equal_p (XEXP (op0, 0), op1)
5576	      || rtx_equal_p (XEXP (op0, 1), op1))
5577	  && ! side_effects_p (XEXP (op0, 0))
5578	  && ! side_effects_p (XEXP (op0, 1)))
5579	return op1;
5580
5581      /* If we have (ior (and A B) C), apply the distributive law and then
5582	 the inverse distributive law to see if things simplify.  */
5583
5584      if (GET_CODE (op0) == AND)
5585	{
5586	  x = apply_distributive_law
5587	    (gen_binary (AND, mode,
5588			 gen_binary (IOR, mode, XEXP (op0, 0), op1),
5589			 gen_binary (IOR, mode, XEXP (op0, 1),
5590				     copy_rtx (op1))));
5591
5592	  if (GET_CODE (x) != IOR)
5593	    return x;
5594	}
5595
5596      if (GET_CODE (op1) == AND)
5597	{
5598	  x = apply_distributive_law
5599	    (gen_binary (AND, mode,
5600			 gen_binary (IOR, mode, XEXP (op1, 0), op0),
5601			 gen_binary (IOR, mode, XEXP (op1, 1),
5602				     copy_rtx (op0))));
5603
5604	  if (GET_CODE (x) != IOR)
5605	    return x;
5606	}
5607
5608      /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5609	 mode size to (rotate A CX).  */
5610
5611      if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5612	   || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5613	  && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5614	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5615	  && GET_CODE (XEXP (op1, 1)) == CONST_INT
5616	  && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
5617	      == GET_MODE_BITSIZE (mode)))
5618	return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5619			       (GET_CODE (op0) == ASHIFT
5620				? XEXP (op0, 1) : XEXP (op1, 1)));
5621
5622      /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5623	 a (sign_extend (plus ...)).  If so, OP1 is a CONST_INT, and the PLUS
5624	 does not affect any of the bits in OP1, it can really be done
5625	 as a PLUS and we can associate.  We do this by seeing if OP1
5626	 can be safely shifted left C bits.  */
5627      if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5628	  && GET_CODE (XEXP (op0, 0)) == PLUS
5629	  && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5630	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5631	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5632	{
5633	  int count = INTVAL (XEXP (op0, 1));
5634	  HOST_WIDE_INT mask = INTVAL (op1) << count;
5635
5636	  if (mask >> count == INTVAL (op1)
5637	      && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5638	    {
5639	      SUBST (XEXP (XEXP (op0, 0), 1),
5640		     GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5641	      return op0;
5642	    }
5643	}
5644      break;
5645
5646    case XOR:
5647      /* If we are XORing two things that have no bits in common,
5648	 convert them into an IOR.  This helps to detect rotation encoded
5649	 using those methods and possibly other simplifications.  */
5650
5651      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5652	  && (nonzero_bits (op0, mode)
5653	      & nonzero_bits (op1, mode)) == 0)
5654	return (gen_binary (IOR, mode, op0, op1));
5655
5656      /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5657	 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5658	 (NOT y).  */
5659      {
5660	int num_negated = 0;
5661
5662	if (GET_CODE (op0) == NOT)
5663	  num_negated++, op0 = XEXP (op0, 0);
5664	if (GET_CODE (op1) == NOT)
5665	  num_negated++, op1 = XEXP (op1, 0);
5666
5667	if (num_negated == 2)
5668	  {
5669	    SUBST (XEXP (x, 0), op0);
5670	    SUBST (XEXP (x, 1), op1);
5671	  }
5672	else if (num_negated == 1)
5673	  return
5674	    simplify_gen_unary (NOT, mode, gen_binary (XOR, mode, op0, op1),
5675				mode);
5676      }
5677
5678      /* Convert (xor (and A B) B) to (and (not A) B).  The latter may
5679	 correspond to a machine insn or result in further simplifications
5680	 if B is a constant.  */
5681
5682      if (GET_CODE (op0) == AND
5683	  && rtx_equal_p (XEXP (op0, 1), op1)
5684	  && ! side_effects_p (op1))
5685	return gen_binary (AND, mode,
5686			   simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5687			   op1);
5688
5689      else if (GET_CODE (op0) == AND
5690	       && rtx_equal_p (XEXP (op0, 0), op1)
5691	       && ! side_effects_p (op1))
5692	return gen_binary (AND, mode,
5693			   simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5694			   op1);
5695
5696      /* (xor (comparison foo bar) (const_int 1)) can become the reversed
5697	 comparison if STORE_FLAG_VALUE is 1.  */
5698      if (STORE_FLAG_VALUE == 1
5699	  && op1 == const1_rtx
5700	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5701	  && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5702					      XEXP (op0, 1))))
5703	return reversed;
5704
5705      /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5706	 is (lt foo (const_int 0)), so we can perform the above
5707	 simplification if STORE_FLAG_VALUE is 1.  */
5708
5709      if (STORE_FLAG_VALUE == 1
5710	  && op1 == const1_rtx
5711	  && GET_CODE (op0) == LSHIFTRT
5712	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5713	  && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5714	return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
5715
5716      /* (xor (comparison foo bar) (const_int sign-bit))
5717	 when STORE_FLAG_VALUE is the sign bit.  */
5718      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5719	  && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5720	      == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5721	  && op1 == const_true_rtx
5722	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5723	  && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5724					      XEXP (op0, 1))))
5725	return reversed;
5726
5727      break;
5728
5729    default:
5730      abort ();
5731    }
5732
5733  return x;
5734}
5735
5736/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5737   operations" because they can be replaced with two more basic operations.
5738   ZERO_EXTEND is also considered "compound" because it can be replaced with
5739   an AND operation, which is simpler, though only one operation.
5740
5741   The function expand_compound_operation is called with an rtx expression
5742   and will convert it to the appropriate shifts and AND operations,
5743   simplifying at each stage.
5744
5745   The function make_compound_operation is called to convert an expression
5746   consisting of shifts and ANDs into the equivalent compound expression.
5747   It is the inverse of this function, loosely speaking.  */
5748
5749static rtx
5750expand_compound_operation (x)
5751     rtx x;
5752{
5753  unsigned HOST_WIDE_INT pos = 0, len;
5754  int unsignedp = 0;
5755  unsigned int modewidth;
5756  rtx tem;
5757
5758  switch (GET_CODE (x))
5759    {
5760    case ZERO_EXTEND:
5761      unsignedp = 1;
5762    case SIGN_EXTEND:
5763      /* We can't necessarily use a const_int for a multiword mode;
5764	 it depends on implicitly extending the value.
5765	 Since we don't know the right way to extend it,
5766	 we can't tell whether the implicit way is right.
5767
5768	 Even for a mode that is no wider than a const_int,
5769	 we can't win, because we need to sign extend one of its bits through
5770	 the rest of it, and we don't know which bit.  */
5771      if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5772	return x;
5773
5774      /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5775	 (zero_extend:MODE FROM) or (sign_extend:MODE FROM).  It is for any MEM
5776	 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5777	 reloaded. If not for that, MEM's would very rarely be safe.
5778
5779	 Reject MODEs bigger than a word, because we might not be able
5780	 to reference a two-register group starting with an arbitrary register
5781	 (and currently gen_lowpart might crash for a SUBREG).  */
5782
5783      if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5784	return x;
5785
5786      /* Reject MODEs that aren't scalar integers because turning vector
5787	 or complex modes into shifts causes problems.  */
5788
5789      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5790	return x;
5791
5792      len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5793      /* If the inner object has VOIDmode (the only way this can happen
5794	 is if it is an ASM_OPERANDS), we can't do anything since we don't
5795	 know how much masking to do.  */
5796      if (len == 0)
5797	return x;
5798
5799      break;
5800
5801    case ZERO_EXTRACT:
5802      unsignedp = 1;
5803    case SIGN_EXTRACT:
5804      /* If the operand is a CLOBBER, just return it.  */
5805      if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5806	return XEXP (x, 0);
5807
5808      if (GET_CODE (XEXP (x, 1)) != CONST_INT
5809	  || GET_CODE (XEXP (x, 2)) != CONST_INT
5810	  || GET_MODE (XEXP (x, 0)) == VOIDmode)
5811	return x;
5812
5813      /* Reject MODEs that aren't scalar integers because turning vector
5814	 or complex modes into shifts causes problems.  */
5815
5816      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5817	return x;
5818
5819      len = INTVAL (XEXP (x, 1));
5820      pos = INTVAL (XEXP (x, 2));
5821
5822      /* If this goes outside the object being extracted, replace the object
5823	 with a (use (mem ...)) construct that only combine understands
5824	 and is used only for this purpose.  */
5825      if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5826	SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
5827
5828      if (BITS_BIG_ENDIAN)
5829	pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5830
5831      break;
5832
5833    default:
5834      return x;
5835    }
5836  /* Convert sign extension to zero extension, if we know that the high
5837     bit is not set, as this is easier to optimize.  It will be converted
5838     back to cheaper alternative in make_extraction.  */
5839  if (GET_CODE (x) == SIGN_EXTEND
5840      && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5841	  && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5842		& ~(((unsigned HOST_WIDE_INT)
5843		      GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5844		     >> 1))
5845	       == 0)))
5846    {
5847      rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5848      return expand_compound_operation (temp);
5849    }
5850
5851  /* We can optimize some special cases of ZERO_EXTEND.  */
5852  if (GET_CODE (x) == ZERO_EXTEND)
5853    {
5854      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5855         know that the last value didn't have any inappropriate bits
5856         set.  */
5857      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5858	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5859	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5860	  && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5861	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5862	return XEXP (XEXP (x, 0), 0);
5863
5864      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5865      if (GET_CODE (XEXP (x, 0)) == SUBREG
5866	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5867	  && subreg_lowpart_p (XEXP (x, 0))
5868	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5869	  && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5870	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5871	return SUBREG_REG (XEXP (x, 0));
5872
5873      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5874         is a comparison and STORE_FLAG_VALUE permits.  This is like
5875         the first case, but it works even when GET_MODE (x) is larger
5876         than HOST_WIDE_INT.  */
5877      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5878	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5879	  && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5880	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5881	      <= HOST_BITS_PER_WIDE_INT)
5882	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5883	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5884	return XEXP (XEXP (x, 0), 0);
5885
5886      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5887      if (GET_CODE (XEXP (x, 0)) == SUBREG
5888	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5889	  && subreg_lowpart_p (XEXP (x, 0))
5890	  && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5891	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5892	      <= HOST_BITS_PER_WIDE_INT)
5893	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5894	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5895	return SUBREG_REG (XEXP (x, 0));
5896
5897    }
5898
5899  /* If we reach here, we want to return a pair of shifts.  The inner
5900     shift is a left shift of BITSIZE - POS - LEN bits.  The outer
5901     shift is a right shift of BITSIZE - LEN bits.  It is arithmetic or
5902     logical depending on the value of UNSIGNEDP.
5903
5904     If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5905     converted into an AND of a shift.
5906
5907     We must check for the case where the left shift would have a negative
5908     count.  This can happen in a case like (x >> 31) & 255 on machines
5909     that can't shift by a constant.  On those machines, we would first
5910     combine the shift with the AND to produce a variable-position
5911     extraction.  Then the constant of 31 would be substituted in to produce
5912     a such a position.  */
5913
5914  modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5915  if (modewidth + len >= pos)
5916    tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
5917				GET_MODE (x),
5918				simplify_shift_const (NULL_RTX, ASHIFT,
5919						      GET_MODE (x),
5920						      XEXP (x, 0),
5921						      modewidth - pos - len),
5922				modewidth - len);
5923
5924  else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5925    tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5926				  simplify_shift_const (NULL_RTX, LSHIFTRT,
5927							GET_MODE (x),
5928							XEXP (x, 0), pos),
5929				  ((HOST_WIDE_INT) 1 << len) - 1);
5930  else
5931    /* Any other cases we can't handle.  */
5932    return x;
5933
5934  /* If we couldn't do this for some reason, return the original
5935     expression.  */
5936  if (GET_CODE (tem) == CLOBBER)
5937    return x;
5938
5939  return tem;
5940}
5941
5942/* X is a SET which contains an assignment of one object into
5943   a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5944   or certain SUBREGS). If possible, convert it into a series of
5945   logical operations.
5946
5947   We half-heartedly support variable positions, but do not at all
5948   support variable lengths.  */
5949
5950static rtx
5951expand_field_assignment (x)
5952     rtx x;
5953{
5954  rtx inner;
5955  rtx pos;			/* Always counts from low bit.  */
5956  int len;
5957  rtx mask;
5958  enum machine_mode compute_mode;
5959
5960  /* Loop until we find something we can't simplify.  */
5961  while (1)
5962    {
5963      if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5964	  && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5965	{
5966	  inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5967	  len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
5968	  pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
5969	}
5970      else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5971	       && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5972	{
5973	  inner = XEXP (SET_DEST (x), 0);
5974	  len = INTVAL (XEXP (SET_DEST (x), 1));
5975	  pos = XEXP (SET_DEST (x), 2);
5976
5977	  /* If the position is constant and spans the width of INNER,
5978	     surround INNER  with a USE to indicate this.  */
5979	  if (GET_CODE (pos) == CONST_INT
5980	      && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5981	    inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
5982
5983	  if (BITS_BIG_ENDIAN)
5984	    {
5985	      if (GET_CODE (pos) == CONST_INT)
5986		pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5987			       - INTVAL (pos));
5988	      else if (GET_CODE (pos) == MINUS
5989		       && GET_CODE (XEXP (pos, 1)) == CONST_INT
5990		       && (INTVAL (XEXP (pos, 1))
5991			   == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5992		/* If position is ADJUST - X, new position is X.  */
5993		pos = XEXP (pos, 0);
5994	      else
5995		pos = gen_binary (MINUS, GET_MODE (pos),
5996				  GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5997					   - len),
5998				  pos);
5999	    }
6000	}
6001
6002      /* A SUBREG between two modes that occupy the same numbers of words
6003	 can be done by moving the SUBREG to the source.  */
6004      else if (GET_CODE (SET_DEST (x)) == SUBREG
6005	       /* We need SUBREGs to compute nonzero_bits properly.  */
6006	       && nonzero_sign_valid
6007	       && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
6008		     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6009		   == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
6010			+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
6011	{
6012	  x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
6013			   gen_lowpart_for_combine
6014			   (GET_MODE (SUBREG_REG (SET_DEST (x))),
6015			    SET_SRC (x)));
6016	  continue;
6017	}
6018      else
6019	break;
6020
6021      while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6022	inner = SUBREG_REG (inner);
6023
6024      compute_mode = GET_MODE (inner);
6025
6026      /* Don't attempt bitwise arithmetic on non scalar integer modes.  */
6027      if (! SCALAR_INT_MODE_P (compute_mode))
6028	{
6029	  enum machine_mode imode;
6030
6031	  /* Don't do anything for vector or complex integral types.  */
6032	  if (! FLOAT_MODE_P (compute_mode))
6033	    break;
6034
6035	  /* Try to find an integral mode to pun with.  */
6036	  imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
6037	  if (imode == BLKmode)
6038	    break;
6039
6040	  compute_mode = imode;
6041	  inner = gen_lowpart_for_combine (imode, inner);
6042	}
6043
6044      /* Compute a mask of LEN bits, if we can do this on the host machine.  */
6045      if (len < HOST_BITS_PER_WIDE_INT)
6046	mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
6047      else
6048	break;
6049
6050      /* Now compute the equivalent expression.  Make a copy of INNER
6051	 for the SET_DEST in case it is a MEM into which we will substitute;
6052	 we don't want shared RTL in that case.  */
6053      x = gen_rtx_SET
6054	(VOIDmode, copy_rtx (inner),
6055	 gen_binary (IOR, compute_mode,
6056		     gen_binary (AND, compute_mode,
6057				 simplify_gen_unary (NOT, compute_mode,
6058						     gen_binary (ASHIFT,
6059								 compute_mode,
6060								 mask, pos),
6061						     compute_mode),
6062				 inner),
6063		     gen_binary (ASHIFT, compute_mode,
6064				 gen_binary (AND, compute_mode,
6065					     gen_lowpart_for_combine
6066					     (compute_mode, SET_SRC (x)),
6067					     mask),
6068				 pos)));
6069    }
6070
6071  return x;
6072}
6073
6074/* Return an RTX for a reference to LEN bits of INNER.  If POS_RTX is nonzero,
6075   it is an RTX that represents a variable starting position; otherwise,
6076   POS is the (constant) starting bit position (counted from the LSB).
6077
6078   INNER may be a USE.  This will occur when we started with a bitfield
6079   that went outside the boundary of the object in memory, which is
6080   allowed on most machines.  To isolate this case, we produce a USE
6081   whose mode is wide enough and surround the MEM with it.  The only
6082   code that understands the USE is this routine.  If it is not removed,
6083   it will cause the resulting insn not to match.
6084
6085   UNSIGNEDP is nonzero for an unsigned reference and zero for a
6086   signed reference.
6087
6088   IN_DEST is nonzero if this is a reference in the destination of a
6089   SET.  This is used when a ZERO_ or SIGN_EXTRACT isn't needed.  If nonzero,
6090   a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
6091   be used.
6092
6093   IN_COMPARE is nonzero if we are in a COMPARE.  This means that a
6094   ZERO_EXTRACT should be built even for bits starting at bit 0.
6095
6096   MODE is the desired mode of the result (if IN_DEST == 0).
6097
6098   The result is an RTX for the extraction or NULL_RTX if the target
6099   can't handle it.  */
6100
6101static rtx
6102make_extraction (mode, inner, pos, pos_rtx, len,
6103		 unsignedp, in_dest, in_compare)
6104     enum machine_mode mode;
6105     rtx inner;
6106     HOST_WIDE_INT pos;
6107     rtx pos_rtx;
6108     unsigned HOST_WIDE_INT len;
6109     int unsignedp;
6110     int in_dest, in_compare;
6111{
6112  /* This mode describes the size of the storage area
6113     to fetch the overall value from.  Within that, we
6114     ignore the POS lowest bits, etc.  */
6115  enum machine_mode is_mode = GET_MODE (inner);
6116  enum machine_mode inner_mode;
6117  enum machine_mode wanted_inner_mode = byte_mode;
6118  enum machine_mode wanted_inner_reg_mode = word_mode;
6119  enum machine_mode pos_mode = word_mode;
6120  enum machine_mode extraction_mode = word_mode;
6121  enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
6122  int spans_byte = 0;
6123  rtx new = 0;
6124  rtx orig_pos_rtx = pos_rtx;
6125  HOST_WIDE_INT orig_pos;
6126
6127  /* Get some information about INNER and get the innermost object.  */
6128  if (GET_CODE (inner) == USE)
6129    /* (use:SI (mem:QI foo)) stands for (mem:SI foo).  */
6130    /* We don't need to adjust the position because we set up the USE
6131       to pretend that it was a full-word object.  */
6132    spans_byte = 1, inner = XEXP (inner, 0);
6133  else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6134    {
6135      /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
6136	 consider just the QI as the memory to extract from.
6137	 The subreg adds or removes high bits; its mode is
6138	 irrelevant to the meaning of this extraction,
6139	 since POS and LEN count from the lsb.  */
6140      if (GET_CODE (SUBREG_REG (inner)) == MEM)
6141	is_mode = GET_MODE (SUBREG_REG (inner));
6142      inner = SUBREG_REG (inner);
6143    }
6144  else if (GET_CODE (inner) == ASHIFT
6145	   && GET_CODE (XEXP (inner, 1)) == CONST_INT
6146	   && pos_rtx == 0 && pos == 0
6147	   && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1)))
6148    {
6149      /* We're extracting the least significant bits of an rtx
6150	 (ashift X (const_int C)), where LEN > C.  Extract the
6151	 least significant (LEN - C) bits of X, giving an rtx
6152	 whose mode is MODE, then shift it left C times.  */
6153      new = make_extraction (mode, XEXP (inner, 0),
6154			     0, 0, len - INTVAL (XEXP (inner, 1)),
6155			     unsignedp, in_dest, in_compare);
6156      if (new != 0)
6157	return gen_rtx_ASHIFT (mode, new, XEXP (inner, 1));
6158    }
6159
6160  inner_mode = GET_MODE (inner);
6161
6162  if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
6163    pos = INTVAL (pos_rtx), pos_rtx = 0;
6164
6165  /* See if this can be done without an extraction.  We never can if the
6166     width of the field is not the same as that of some integer mode. For
6167     registers, we can only avoid the extraction if the position is at the
6168     low-order bit and this is either not in the destination or we have the
6169     appropriate STRICT_LOW_PART operation available.
6170
6171     For MEM, we can avoid an extract if the field starts on an appropriate
6172     boundary and we can change the mode of the memory reference.  However,
6173     we cannot directly access the MEM if we have a USE and the underlying
6174     MEM is not TMODE.  This combination means that MEM was being used in a
6175     context where bits outside its mode were being referenced; that is only
6176     valid in bit-field insns.  */
6177
6178  if (tmode != BLKmode
6179      && ! (spans_byte && inner_mode != tmode)
6180      && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
6181	   && GET_CODE (inner) != MEM
6182	   && (! in_dest
6183	       || (GET_CODE (inner) == REG
6184		   && have_insn_for (STRICT_LOW_PART, tmode))))
6185	  || (GET_CODE (inner) == MEM && pos_rtx == 0
6186	      && (pos
6187		  % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
6188		     : BITS_PER_UNIT)) == 0
6189	      /* We can't do this if we are widening INNER_MODE (it
6190		 may not be aligned, for one thing).  */
6191	      && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
6192	      && (inner_mode == tmode
6193		  || (! mode_dependent_address_p (XEXP (inner, 0))
6194		      && ! MEM_VOLATILE_P (inner))))))
6195    {
6196      /* If INNER is a MEM, make a new MEM that encompasses just the desired
6197	 field.  If the original and current mode are the same, we need not
6198	 adjust the offset.  Otherwise, we do if bytes big endian.
6199
6200	 If INNER is not a MEM, get a piece consisting of just the field
6201	 of interest (in this case POS % BITS_PER_WORD must be 0).  */
6202
6203      if (GET_CODE (inner) == MEM)
6204	{
6205	  HOST_WIDE_INT offset;
6206
6207	  /* POS counts from lsb, but make OFFSET count in memory order.  */
6208	  if (BYTES_BIG_ENDIAN)
6209	    offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
6210	  else
6211	    offset = pos / BITS_PER_UNIT;
6212
6213	  new = adjust_address_nv (inner, tmode, offset);
6214	}
6215      else if (GET_CODE (inner) == REG)
6216	{
6217	  /* We can't call gen_lowpart_for_combine here since we always want
6218	     a SUBREG and it would sometimes return a new hard register.  */
6219	  if (tmode != inner_mode)
6220	    {
6221	      HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6222
6223	      if (WORDS_BIG_ENDIAN
6224		  && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6225		final_word = ((GET_MODE_SIZE (inner_mode)
6226			       - GET_MODE_SIZE (tmode))
6227			      / UNITS_PER_WORD) - final_word;
6228
6229	      final_word *= UNITS_PER_WORD;
6230	      if (BYTES_BIG_ENDIAN &&
6231		  GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6232		final_word += (GET_MODE_SIZE (inner_mode)
6233			       - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6234
6235	      /* Avoid creating invalid subregs, for example when
6236		 simplifying (x>>32)&255.  */
6237	      if (final_word >= GET_MODE_SIZE (inner_mode))
6238		return NULL_RTX;
6239
6240	      new = gen_rtx_SUBREG (tmode, inner, final_word);
6241	    }
6242	  else
6243	    new = inner;
6244	}
6245      else
6246	new = force_to_mode (inner, tmode,
6247			     len >= HOST_BITS_PER_WIDE_INT
6248			     ? ~(unsigned HOST_WIDE_INT) 0
6249			     : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6250			     NULL_RTX, 0);
6251
6252      /* If this extraction is going into the destination of a SET,
6253	 make a STRICT_LOW_PART unless we made a MEM.  */
6254
6255      if (in_dest)
6256	return (GET_CODE (new) == MEM ? new
6257		: (GET_CODE (new) != SUBREG
6258		   ? gen_rtx_CLOBBER (tmode, const0_rtx)
6259		   : gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
6260
6261      if (mode == tmode)
6262	return new;
6263
6264      if (GET_CODE (new) == CONST_INT)
6265	return gen_int_mode (INTVAL (new), mode);
6266
6267      /* If we know that no extraneous bits are set, and that the high
6268	 bit is not set, convert the extraction to the cheaper of
6269	 sign and zero extension, that are equivalent in these cases.  */
6270      if (flag_expensive_optimizations
6271	  && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6272	      && ((nonzero_bits (new, tmode)
6273		   & ~(((unsigned HOST_WIDE_INT)
6274			GET_MODE_MASK (tmode))
6275		       >> 1))
6276		  == 0)))
6277	{
6278	  rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
6279	  rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
6280
6281	  /* Prefer ZERO_EXTENSION, since it gives more information to
6282	     backends.  */
6283	  if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
6284	    return temp;
6285	  return temp1;
6286	}
6287
6288      /* Otherwise, sign- or zero-extend unless we already are in the
6289	 proper mode.  */
6290
6291      return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6292			     mode, new));
6293    }
6294
6295  /* Unless this is a COMPARE or we have a funny memory reference,
6296     don't do anything with zero-extending field extracts starting at
6297     the low-order bit since they are simple AND operations.  */
6298  if (pos_rtx == 0 && pos == 0 && ! in_dest
6299      && ! in_compare && ! spans_byte && unsignedp)
6300    return 0;
6301
6302  /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
6303     we would be spanning bytes or if the position is not a constant and the
6304     length is not 1.  In all other cases, we would only be going outside
6305     our object in cases when an original shift would have been
6306     undefined.  */
6307  if (! spans_byte && GET_CODE (inner) == MEM
6308      && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6309	  || (pos_rtx != 0 && len != 1)))
6310    return 0;
6311
6312  /* Get the mode to use should INNER not be a MEM, the mode for the position,
6313     and the mode for the result.  */
6314  if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
6315    {
6316      wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6317      pos_mode = mode_for_extraction (EP_insv, 2);
6318      extraction_mode = mode_for_extraction (EP_insv, 3);
6319    }
6320
6321  if (! in_dest && unsignedp
6322      && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
6323    {
6324      wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6325      pos_mode = mode_for_extraction (EP_extzv, 3);
6326      extraction_mode = mode_for_extraction (EP_extzv, 0);
6327    }
6328
6329  if (! in_dest && ! unsignedp
6330      && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
6331    {
6332      wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6333      pos_mode = mode_for_extraction (EP_extv, 3);
6334      extraction_mode = mode_for_extraction (EP_extv, 0);
6335    }
6336
6337  /* Never narrow an object, since that might not be safe.  */
6338
6339  if (mode != VOIDmode
6340      && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6341    extraction_mode = mode;
6342
6343  if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6344      && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6345    pos_mode = GET_MODE (pos_rtx);
6346
6347  /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6348     if we have to change the mode of memory and cannot, the desired mode is
6349     EXTRACTION_MODE.  */
6350  if (GET_CODE (inner) != MEM)
6351    wanted_inner_mode = wanted_inner_reg_mode;
6352  else if (inner_mode != wanted_inner_mode
6353	   && (mode_dependent_address_p (XEXP (inner, 0))
6354	       || MEM_VOLATILE_P (inner)))
6355    wanted_inner_mode = extraction_mode;
6356
6357  orig_pos = pos;
6358
6359  if (BITS_BIG_ENDIAN)
6360    {
6361      /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6362	 BITS_BIG_ENDIAN style.  If position is constant, compute new
6363	 position.  Otherwise, build subtraction.
6364	 Note that POS is relative to the mode of the original argument.
6365	 If it's a MEM we need to recompute POS relative to that.
6366	 However, if we're extracting from (or inserting into) a register,
6367	 we want to recompute POS relative to wanted_inner_mode.  */
6368      int width = (GET_CODE (inner) == MEM
6369		   ? GET_MODE_BITSIZE (is_mode)
6370		   : GET_MODE_BITSIZE (wanted_inner_mode));
6371
6372      if (pos_rtx == 0)
6373	pos = width - len - pos;
6374      else
6375	pos_rtx
6376	  = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6377      /* POS may be less than 0 now, but we check for that below.
6378	 Note that it can only be less than 0 if GET_CODE (inner) != MEM.  */
6379    }
6380
6381  /* If INNER has a wider mode, make it smaller.  If this is a constant
6382     extract, try to adjust the byte to point to the byte containing
6383     the value.  */
6384  if (wanted_inner_mode != VOIDmode
6385      && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6386      && ((GET_CODE (inner) == MEM
6387	   && (inner_mode == wanted_inner_mode
6388	       || (! mode_dependent_address_p (XEXP (inner, 0))
6389		   && ! MEM_VOLATILE_P (inner))))))
6390    {
6391      int offset = 0;
6392
6393      /* The computations below will be correct if the machine is big
6394	 endian in both bits and bytes or little endian in bits and bytes.
6395	 If it is mixed, we must adjust.  */
6396
6397      /* If bytes are big endian and we had a paradoxical SUBREG, we must
6398	 adjust OFFSET to compensate.  */
6399      if (BYTES_BIG_ENDIAN
6400	  && ! spans_byte
6401	  && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6402	offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6403
6404      /* If this is a constant position, we can move to the desired byte.  */
6405      if (pos_rtx == 0)
6406	{
6407	  offset += pos / BITS_PER_UNIT;
6408	  pos %= GET_MODE_BITSIZE (wanted_inner_mode);
6409	}
6410
6411      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6412	  && ! spans_byte
6413	  && is_mode != wanted_inner_mode)
6414	offset = (GET_MODE_SIZE (is_mode)
6415		  - GET_MODE_SIZE (wanted_inner_mode) - offset);
6416
6417      if (offset != 0 || inner_mode != wanted_inner_mode)
6418	inner = adjust_address_nv (inner, wanted_inner_mode, offset);
6419    }
6420
6421  /* If INNER is not memory, we can always get it into the proper mode.  If we
6422     are changing its mode, POS must be a constant and smaller than the size
6423     of the new mode.  */
6424  else if (GET_CODE (inner) != MEM)
6425    {
6426      if (GET_MODE (inner) != wanted_inner_mode
6427	  && (pos_rtx != 0
6428	      || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6429	return 0;
6430
6431      inner = force_to_mode (inner, wanted_inner_mode,
6432			     pos_rtx
6433			     || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6434			     ? ~(unsigned HOST_WIDE_INT) 0
6435			     : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6436				<< orig_pos),
6437			     NULL_RTX, 0);
6438    }
6439
6440  /* Adjust mode of POS_RTX, if needed.  If we want a wider mode, we
6441     have to zero extend.  Otherwise, we can just use a SUBREG.  */
6442  if (pos_rtx != 0
6443      && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
6444    {
6445      rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
6446
6447      /* If we know that no extraneous bits are set, and that the high
6448	 bit is not set, convert extraction to cheaper one - either
6449	 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6450	 cases.  */
6451      if (flag_expensive_optimizations
6452	  && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6453	      && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
6454		   & ~(((unsigned HOST_WIDE_INT)
6455			GET_MODE_MASK (GET_MODE (pos_rtx)))
6456		       >> 1))
6457		  == 0)))
6458	{
6459	  rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6460
6461	  /* Prefer ZERO_EXTENSION, since it gives more information to
6462	     backends.  */
6463	  if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6464	    temp = temp1;
6465	}
6466      pos_rtx = temp;
6467    }
6468  else if (pos_rtx != 0
6469	   && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6470    pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
6471
6472  /* Make POS_RTX unless we already have it and it is correct.  If we don't
6473     have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
6474     be a CONST_INT.  */
6475  if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6476    pos_rtx = orig_pos_rtx;
6477
6478  else if (pos_rtx == 0)
6479    pos_rtx = GEN_INT (pos);
6480
6481  /* Make the required operation.  See if we can use existing rtx.  */
6482  new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
6483			 extraction_mode, inner, GEN_INT (len), pos_rtx);
6484  if (! in_dest)
6485    new = gen_lowpart_for_combine (mode, new);
6486
6487  return new;
6488}
6489
6490/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6491   with any other operations in X.  Return X without that shift if so.  */
6492
6493static rtx
6494extract_left_shift (x, count)
6495     rtx x;
6496     int count;
6497{
6498  enum rtx_code code = GET_CODE (x);
6499  enum machine_mode mode = GET_MODE (x);
6500  rtx tem;
6501
6502  switch (code)
6503    {
6504    case ASHIFT:
6505      /* This is the shift itself.  If it is wide enough, we will return
6506	 either the value being shifted if the shift count is equal to
6507	 COUNT or a shift for the difference.  */
6508      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6509	  && INTVAL (XEXP (x, 1)) >= count)
6510	return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6511				     INTVAL (XEXP (x, 1)) - count);
6512      break;
6513
6514    case NEG:  case NOT:
6515      if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6516	return simplify_gen_unary (code, mode, tem, mode);
6517
6518      break;
6519
6520    case PLUS:  case IOR:  case XOR:  case AND:
6521      /* If we can safely shift this constant and we find the inner shift,
6522	 make a new operation.  */
6523      if (GET_CODE (XEXP (x,1)) == CONST_INT
6524	  && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
6525	  && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6526	return gen_binary (code, mode, tem,
6527			   GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6528
6529      break;
6530
6531    default:
6532      break;
6533    }
6534
6535  return 0;
6536}
6537
6538/* Look at the expression rooted at X.  Look for expressions
6539   equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6540   Form these expressions.
6541
6542   Return the new rtx, usually just X.
6543
6544   Also, for machines like the VAX that don't have logical shift insns,
6545   try to convert logical to arithmetic shift operations in cases where
6546   they are equivalent.  This undoes the canonicalizations to logical
6547   shifts done elsewhere.
6548
6549   We try, as much as possible, to re-use rtl expressions to save memory.
6550
6551   IN_CODE says what kind of expression we are processing.  Normally, it is
6552   SET.  In a memory address (inside a MEM, PLUS or minus, the latter two
6553   being kludges), it is MEM.  When processing the arguments of a comparison
6554   or a COMPARE against zero, it is COMPARE.  */
6555
6556static rtx
6557make_compound_operation (x, in_code)
6558     rtx x;
6559     enum rtx_code in_code;
6560{
6561  enum rtx_code code = GET_CODE (x);
6562  enum machine_mode mode = GET_MODE (x);
6563  int mode_width = GET_MODE_BITSIZE (mode);
6564  rtx rhs, lhs;
6565  enum rtx_code next_code;
6566  int i;
6567  rtx new = 0;
6568  rtx tem;
6569  const char *fmt;
6570
6571  /* Select the code to be used in recursive calls.  Once we are inside an
6572     address, we stay there.  If we have a comparison, set to COMPARE,
6573     but once inside, go back to our default of SET.  */
6574
6575  next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
6576	       : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
6577		  && XEXP (x, 1) == const0_rtx) ? COMPARE
6578	       : in_code == COMPARE ? SET : in_code);
6579
6580  /* Process depending on the code of this operation.  If NEW is set
6581     nonzero, it will be returned.  */
6582
6583  switch (code)
6584    {
6585    case ASHIFT:
6586      /* Convert shifts by constants into multiplications if inside
6587	 an address.  */
6588      if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
6589	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6590	  && INTVAL (XEXP (x, 1)) >= 0)
6591	{
6592	  new = make_compound_operation (XEXP (x, 0), next_code);
6593	  new = gen_rtx_MULT (mode, new,
6594			      GEN_INT ((HOST_WIDE_INT) 1
6595				       << INTVAL (XEXP (x, 1))));
6596	}
6597      break;
6598
6599    case AND:
6600      /* If the second operand is not a constant, we can't do anything
6601	 with it.  */
6602      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6603	break;
6604
6605      /* If the constant is a power of two minus one and the first operand
6606	 is a logical right shift, make an extraction.  */
6607      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6608	  && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6609	{
6610	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6611	  new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6612				 0, in_code == COMPARE);
6613	}
6614
6615      /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
6616      else if (GET_CODE (XEXP (x, 0)) == SUBREG
6617	       && subreg_lowpart_p (XEXP (x, 0))
6618	       && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6619	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6620	{
6621	  new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6622					 next_code);
6623	  new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
6624				 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6625				 0, in_code == COMPARE);
6626	}
6627      /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)).  */
6628      else if ((GET_CODE (XEXP (x, 0)) == XOR
6629		|| GET_CODE (XEXP (x, 0)) == IOR)
6630	       && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6631	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6632	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6633	{
6634	  /* Apply the distributive law, and then try to make extractions.  */
6635	  new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
6636				gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6637					     XEXP (x, 1)),
6638				gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6639					     XEXP (x, 1)));
6640	  new = make_compound_operation (new, in_code);
6641	}
6642
6643      /* If we are have (and (rotate X C) M) and C is larger than the number
6644	 of bits in M, this is an extraction.  */
6645
6646      else if (GET_CODE (XEXP (x, 0)) == ROTATE
6647	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6648	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6649	       && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
6650	{
6651	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6652	  new = make_extraction (mode, new,
6653				 (GET_MODE_BITSIZE (mode)
6654				  - INTVAL (XEXP (XEXP (x, 0), 1))),
6655				 NULL_RTX, i, 1, 0, in_code == COMPARE);
6656	}
6657
6658      /* On machines without logical shifts, if the operand of the AND is
6659	 a logical shift and our mask turns off all the propagated sign
6660	 bits, we can replace the logical shift with an arithmetic shift.  */
6661      else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6662	       && !have_insn_for (LSHIFTRT, mode)
6663	       && have_insn_for (ASHIFTRT, mode)
6664	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6665	       && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6666	       && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6667	       && mode_width <= HOST_BITS_PER_WIDE_INT)
6668	{
6669	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6670
6671	  mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6672	  if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6673	    SUBST (XEXP (x, 0),
6674		   gen_rtx_ASHIFTRT (mode,
6675				     make_compound_operation
6676				     (XEXP (XEXP (x, 0), 0), next_code),
6677				     XEXP (XEXP (x, 0), 1)));
6678	}
6679
6680      /* If the constant is one less than a power of two, this might be
6681	 representable by an extraction even if no shift is present.
6682	 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6683	 we are in a COMPARE.  */
6684      else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6685	new = make_extraction (mode,
6686			       make_compound_operation (XEXP (x, 0),
6687							next_code),
6688			       0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6689
6690      /* If we are in a comparison and this is an AND with a power of two,
6691	 convert this into the appropriate bit extract.  */
6692      else if (in_code == COMPARE
6693	       && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6694	new = make_extraction (mode,
6695			       make_compound_operation (XEXP (x, 0),
6696							next_code),
6697			       i, NULL_RTX, 1, 1, 0, 1);
6698
6699      break;
6700
6701    case LSHIFTRT:
6702      /* If the sign bit is known to be zero, replace this with an
6703	 arithmetic shift.  */
6704      if (have_insn_for (ASHIFTRT, mode)
6705	  && ! have_insn_for (LSHIFTRT, mode)
6706	  && mode_width <= HOST_BITS_PER_WIDE_INT
6707	  && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6708	{
6709	  new = gen_rtx_ASHIFTRT (mode,
6710				  make_compound_operation (XEXP (x, 0),
6711							   next_code),
6712				  XEXP (x, 1));
6713	  break;
6714	}
6715
6716      /* ... fall through ...  */
6717
6718    case ASHIFTRT:
6719      lhs = XEXP (x, 0);
6720      rhs = XEXP (x, 1);
6721
6722      /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6723	 this is a SIGN_EXTRACT.  */
6724      if (GET_CODE (rhs) == CONST_INT
6725	  && GET_CODE (lhs) == ASHIFT
6726	  && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6727	  && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6728	{
6729	  new = make_compound_operation (XEXP (lhs, 0), next_code);
6730	  new = make_extraction (mode, new,
6731				 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6732				 NULL_RTX, mode_width - INTVAL (rhs),
6733				 code == LSHIFTRT, 0, in_code == COMPARE);
6734	  break;
6735	}
6736
6737      /* See if we have operations between an ASHIFTRT and an ASHIFT.
6738	 If so, try to merge the shifts into a SIGN_EXTEND.  We could
6739	 also do this for some cases of SIGN_EXTRACT, but it doesn't
6740	 seem worth the effort; the case checked for occurs on Alpha.  */
6741
6742      if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6743	  && ! (GET_CODE (lhs) == SUBREG
6744		&& (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6745	  && GET_CODE (rhs) == CONST_INT
6746	  && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6747	  && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6748	new = make_extraction (mode, make_compound_operation (new, next_code),
6749			       0, NULL_RTX, mode_width - INTVAL (rhs),
6750			       code == LSHIFTRT, 0, in_code == COMPARE);
6751
6752      break;
6753
6754    case SUBREG:
6755      /* Call ourselves recursively on the inner expression.  If we are
6756	 narrowing the object and it has a different RTL code from
6757	 what it originally did, do this SUBREG as a force_to_mode.  */
6758
6759      tem = make_compound_operation (SUBREG_REG (x), in_code);
6760      if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6761	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6762	  && subreg_lowpart_p (x))
6763	{
6764	  rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6765				     NULL_RTX, 0);
6766
6767	  /* If we have something other than a SUBREG, we might have
6768	     done an expansion, so rerun ourselves.  */
6769	  if (GET_CODE (newer) != SUBREG)
6770	    newer = make_compound_operation (newer, in_code);
6771
6772	  return newer;
6773	}
6774
6775      /* If this is a paradoxical subreg, and the new code is a sign or
6776	 zero extension, omit the subreg and widen the extension.  If it
6777	 is a regular subreg, we can still get rid of the subreg by not
6778	 widening so much, or in fact removing the extension entirely.  */
6779      if ((GET_CODE (tem) == SIGN_EXTEND
6780	   || GET_CODE (tem) == ZERO_EXTEND)
6781	  && subreg_lowpart_p (x))
6782	{
6783	  if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6784	      || (GET_MODE_SIZE (mode) >
6785		  GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6786	    {
6787	      if (! SCALAR_INT_MODE_P (mode))
6788		break;
6789	      tem = gen_rtx_fmt_e (GET_CODE (tem), mode, XEXP (tem, 0));
6790	    }
6791	  else
6792	    tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6793	  return tem;
6794	}
6795      break;
6796
6797    default:
6798      break;
6799    }
6800
6801  if (new)
6802    {
6803      x = gen_lowpart_for_combine (mode, new);
6804      code = GET_CODE (x);
6805    }
6806
6807  /* Now recursively process each operand of this operation.  */
6808  fmt = GET_RTX_FORMAT (code);
6809  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6810    if (fmt[i] == 'e')
6811      {
6812	new = make_compound_operation (XEXP (x, i), next_code);
6813	SUBST (XEXP (x, i), new);
6814      }
6815
6816  return x;
6817}
6818
6819/* Given M see if it is a value that would select a field of bits
6820   within an item, but not the entire word.  Return -1 if not.
6821   Otherwise, return the starting position of the field, where 0 is the
6822   low-order bit.
6823
6824   *PLEN is set to the length of the field.  */
6825
6826static int
6827get_pos_from_mask (m, plen)
6828     unsigned HOST_WIDE_INT m;
6829     unsigned HOST_WIDE_INT *plen;
6830{
6831  /* Get the bit number of the first 1 bit from the right, -1 if none.  */
6832  int pos = exact_log2 (m & -m);
6833  int len;
6834
6835  if (pos < 0)
6836    return -1;
6837
6838  /* Now shift off the low-order zero bits and see if we have a power of
6839     two minus 1.  */
6840  len = exact_log2 ((m >> pos) + 1);
6841
6842  if (len <= 0)
6843    return -1;
6844
6845  *plen = len;
6846  return pos;
6847}
6848
6849/* See if X can be simplified knowing that we will only refer to it in
6850   MODE and will only refer to those bits that are nonzero in MASK.
6851   If other bits are being computed or if masking operations are done
6852   that select a superset of the bits in MASK, they can sometimes be
6853   ignored.
6854
6855   Return a possibly simplified expression, but always convert X to
6856   MODE.  If X is a CONST_INT, AND the CONST_INT with MASK.
6857
6858   Also, if REG is nonzero and X is a register equal in value to REG,
6859   replace X with REG.
6860
6861   If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6862   are all off in X.  This is used when X will be complemented, by either
6863   NOT, NEG, or XOR.  */
6864
6865static rtx
6866force_to_mode (x, mode, mask, reg, just_select)
6867     rtx x;
6868     enum machine_mode mode;
6869     unsigned HOST_WIDE_INT mask;
6870     rtx reg;
6871     int just_select;
6872{
6873  enum rtx_code code = GET_CODE (x);
6874  int next_select = just_select || code == XOR || code == NOT || code == NEG;
6875  enum machine_mode op_mode;
6876  unsigned HOST_WIDE_INT fuller_mask, nonzero;
6877  rtx op0, op1, temp;
6878
6879  /* If this is a CALL or ASM_OPERANDS, don't do anything.  Some of the
6880     code below will do the wrong thing since the mode of such an
6881     expression is VOIDmode.
6882
6883     Also do nothing if X is a CLOBBER; this can happen if X was
6884     the return value from a call to gen_lowpart_for_combine.  */
6885  if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
6886    return x;
6887
6888  /* We want to perform the operation is its present mode unless we know
6889     that the operation is valid in MODE, in which case we do the operation
6890     in MODE.  */
6891  op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6892	      && have_insn_for (code, mode))
6893	     ? mode : GET_MODE (x));
6894
6895  /* It is not valid to do a right-shift in a narrower mode
6896     than the one it came in with.  */
6897  if ((code == LSHIFTRT || code == ASHIFTRT)
6898      && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6899    op_mode = GET_MODE (x);
6900
6901  /* Truncate MASK to fit OP_MODE.  */
6902  if (op_mode)
6903    mask &= GET_MODE_MASK (op_mode);
6904
6905  /* When we have an arithmetic operation, or a shift whose count we
6906     do not know, we need to assume that all bit the up to the highest-order
6907     bit in MASK will be needed.  This is how we form such a mask.  */
6908  if (op_mode)
6909    fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6910		   ? GET_MODE_MASK (op_mode)
6911		   : (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
6912		      - 1));
6913  else
6914    fuller_mask = ~(HOST_WIDE_INT) 0;
6915
6916  /* Determine what bits of X are guaranteed to be (non)zero.  */
6917  nonzero = nonzero_bits (x, mode);
6918
6919  /* If none of the bits in X are needed, return a zero.  */
6920  if (! just_select && (nonzero & mask) == 0)
6921    x = const0_rtx;
6922
6923  /* If X is a CONST_INT, return a new one.  Do this here since the
6924     test below will fail.  */
6925  if (GET_CODE (x) == CONST_INT)
6926    {
6927      if (SCALAR_INT_MODE_P (mode))
6928        return gen_int_mode (INTVAL (x) & mask, mode);
6929      else
6930	{
6931	  x = GEN_INT (INTVAL (x) & mask);
6932	  return gen_lowpart_common (mode, x);
6933	}
6934    }
6935
6936  /* If X is narrower than MODE and we want all the bits in X's mode, just
6937     get X in the proper mode.  */
6938  if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6939      && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
6940    return gen_lowpart_for_combine (mode, x);
6941
6942  /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6943     MASK are already known to be zero in X, we need not do anything.  */
6944  if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0)
6945    return x;
6946
6947  switch (code)
6948    {
6949    case CLOBBER:
6950      /* If X is a (clobber (const_int)), return it since we know we are
6951	 generating something that won't match.  */
6952      return x;
6953
6954    case USE:
6955      /* X is a (use (mem ..)) that was made from a bit-field extraction that
6956	 spanned the boundary of the MEM.  If we are now masking so it is
6957	 within that boundary, we don't need the USE any more.  */
6958      if (! BITS_BIG_ENDIAN
6959	  && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6960	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6961      break;
6962
6963    case SIGN_EXTEND:
6964    case ZERO_EXTEND:
6965    case ZERO_EXTRACT:
6966    case SIGN_EXTRACT:
6967      x = expand_compound_operation (x);
6968      if (GET_CODE (x) != code)
6969	return force_to_mode (x, mode, mask, reg, next_select);
6970      break;
6971
6972    case REG:
6973      if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6974		       || rtx_equal_p (reg, get_last_value (x))))
6975	x = reg;
6976      break;
6977
6978    case SUBREG:
6979      if (subreg_lowpart_p (x)
6980	  /* We can ignore the effect of this SUBREG if it narrows the mode or
6981	     if the constant masks to zero all the bits the mode doesn't
6982	     have.  */
6983	  && ((GET_MODE_SIZE (GET_MODE (x))
6984	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6985	      || (0 == (mask
6986			& GET_MODE_MASK (GET_MODE (x))
6987			& ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
6988	return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
6989      break;
6990
6991    case AND:
6992      /* If this is an AND with a constant, convert it into an AND
6993	 whose constant is the AND of that constant with MASK.  If it
6994	 remains an AND of MASK, delete it since it is redundant.  */
6995
6996      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6997	{
6998	  x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6999				      mask & INTVAL (XEXP (x, 1)));
7000
7001	  /* If X is still an AND, see if it is an AND with a mask that
7002	     is just some low-order bits.  If so, and it is MASK, we don't
7003	     need it.  */
7004
7005	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
7006	      && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
7007		  == mask))
7008	    x = XEXP (x, 0);
7009
7010	  /* If it remains an AND, try making another AND with the bits
7011	     in the mode mask that aren't in MASK turned on.  If the
7012	     constant in the AND is wide enough, this might make a
7013	     cheaper constant.  */
7014
7015	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
7016	      && GET_MODE_MASK (GET_MODE (x)) != mask
7017	      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
7018	    {
7019	      HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
7020				    | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
7021	      int width = GET_MODE_BITSIZE (GET_MODE (x));
7022	      rtx y;
7023
7024	      /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
7025		 number, sign extend it.  */
7026	      if (width > 0 && width < HOST_BITS_PER_WIDE_INT
7027		  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7028		cval |= (HOST_WIDE_INT) -1 << width;
7029
7030	      y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
7031	      if (rtx_cost (y, SET) < rtx_cost (x, SET))
7032		x = y;
7033	    }
7034
7035	  break;
7036	}
7037
7038      goto binop;
7039
7040    case PLUS:
7041      /* In (and (plus FOO C1) M), if M is a mask that just turns off
7042	 low-order bits (as in an alignment operation) and FOO is already
7043	 aligned to that boundary, mask C1 to that boundary as well.
7044	 This may eliminate that PLUS and, later, the AND.  */
7045
7046      {
7047	unsigned int width = GET_MODE_BITSIZE (mode);
7048	unsigned HOST_WIDE_INT smask = mask;
7049
7050	/* If MODE is narrower than HOST_WIDE_INT and mask is a negative
7051	   number, sign extend it.  */
7052
7053	if (width < HOST_BITS_PER_WIDE_INT
7054	    && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7055	  smask |= (HOST_WIDE_INT) -1 << width;
7056
7057	if (GET_CODE (XEXP (x, 1)) == CONST_INT
7058	    && exact_log2 (- smask) >= 0
7059	    && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
7060	    && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
7061	  return force_to_mode (plus_constant (XEXP (x, 0),
7062					       (INTVAL (XEXP (x, 1)) & smask)),
7063				mode, smask, reg, next_select);
7064      }
7065
7066      /* ... fall through ...  */
7067
7068    case MULT:
7069      /* For PLUS, MINUS and MULT, we need any bits less significant than the
7070	 most significant bit in MASK since carries from those bits will
7071	 affect the bits we are interested in.  */
7072      mask = fuller_mask;
7073      goto binop;
7074
7075    case MINUS:
7076      /* If X is (minus C Y) where C's least set bit is larger than any bit
7077	 in the mask, then we may replace with (neg Y).  */
7078      if (GET_CODE (XEXP (x, 0)) == CONST_INT
7079	  && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
7080					& -INTVAL (XEXP (x, 0))))
7081	      > mask))
7082	{
7083	  x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
7084				  GET_MODE (x));
7085	  return force_to_mode (x, mode, mask, reg, next_select);
7086	}
7087
7088      /* Similarly, if C contains every bit in the fuller_mask, then we may
7089	 replace with (not Y).  */
7090      if (GET_CODE (XEXP (x, 0)) == CONST_INT
7091	  && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask)
7092	      == INTVAL (XEXP (x, 0))))
7093	{
7094	  x = simplify_gen_unary (NOT, GET_MODE (x),
7095				  XEXP (x, 1), GET_MODE (x));
7096	  return force_to_mode (x, mode, mask, reg, next_select);
7097	}
7098
7099      mask = fuller_mask;
7100      goto binop;
7101
7102    case IOR:
7103    case XOR:
7104      /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
7105	 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
7106	 operation which may be a bitfield extraction.  Ensure that the
7107	 constant we form is not wider than the mode of X.  */
7108
7109      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7110	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7111	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7112	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
7113	  && GET_CODE (XEXP (x, 1)) == CONST_INT
7114	  && ((INTVAL (XEXP (XEXP (x, 0), 1))
7115	       + floor_log2 (INTVAL (XEXP (x, 1))))
7116	      < GET_MODE_BITSIZE (GET_MODE (x)))
7117	  && (INTVAL (XEXP (x, 1))
7118	      & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
7119	{
7120	  temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
7121			  << INTVAL (XEXP (XEXP (x, 0), 1)));
7122	  temp = gen_binary (GET_CODE (x), GET_MODE (x),
7123			     XEXP (XEXP (x, 0), 0), temp);
7124	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
7125			  XEXP (XEXP (x, 0), 1));
7126	  return force_to_mode (x, mode, mask, reg, next_select);
7127	}
7128
7129    binop:
7130      /* For most binary operations, just propagate into the operation and
7131	 change the mode if we have an operation of that mode.  */
7132
7133      op0 = gen_lowpart_for_combine (op_mode,
7134				     force_to_mode (XEXP (x, 0), mode, mask,
7135						    reg, next_select));
7136      op1 = gen_lowpart_for_combine (op_mode,
7137				     force_to_mode (XEXP (x, 1), mode, mask,
7138						    reg, next_select));
7139
7140      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7141	x = gen_binary (code, op_mode, op0, op1);
7142      break;
7143
7144    case ASHIFT:
7145      /* For left shifts, do the same, but just for the first operand.
7146	 However, we cannot do anything with shifts where we cannot
7147	 guarantee that the counts are smaller than the size of the mode
7148	 because such a count will have a different meaning in a
7149	 wider mode.  */
7150
7151      if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
7152	     && INTVAL (XEXP (x, 1)) >= 0
7153	     && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
7154	  && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
7155		&& (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
7156		    < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
7157	break;
7158
7159      /* If the shift count is a constant and we can do arithmetic in
7160	 the mode of the shift, refine which bits we need.  Otherwise, use the
7161	 conservative form of the mask.  */
7162      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7163	  && INTVAL (XEXP (x, 1)) >= 0
7164	  && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
7165	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7166	mask >>= INTVAL (XEXP (x, 1));
7167      else
7168	mask = fuller_mask;
7169
7170      op0 = gen_lowpart_for_combine (op_mode,
7171				     force_to_mode (XEXP (x, 0), op_mode,
7172						    mask, reg, next_select));
7173
7174      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7175	x = gen_binary (code, op_mode, op0, XEXP (x, 1));
7176      break;
7177
7178    case LSHIFTRT:
7179      /* Here we can only do something if the shift count is a constant,
7180	 this shift constant is valid for the host, and we can do arithmetic
7181	 in OP_MODE.  */
7182
7183      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7184	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7185	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7186	{
7187	  rtx inner = XEXP (x, 0);
7188	  unsigned HOST_WIDE_INT inner_mask;
7189
7190	  /* Select the mask of the bits we need for the shift operand.  */
7191	  inner_mask = mask << INTVAL (XEXP (x, 1));
7192
7193	  /* We can only change the mode of the shift if we can do arithmetic
7194	     in the mode of the shift and INNER_MASK is no wider than the
7195	     width of OP_MODE.  */
7196	  if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
7197	      || (inner_mask & ~GET_MODE_MASK (op_mode)) != 0)
7198	    op_mode = GET_MODE (x);
7199
7200	  inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
7201
7202	  if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7203	    x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
7204	}
7205
7206      /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7207	 shift and AND produces only copies of the sign bit (C2 is one less
7208	 than a power of two), we can do this with just a shift.  */
7209
7210      if (GET_CODE (x) == LSHIFTRT
7211	  && GET_CODE (XEXP (x, 1)) == CONST_INT
7212	  /* The shift puts one of the sign bit copies in the least significant
7213	     bit.  */
7214	  && ((INTVAL (XEXP (x, 1))
7215	       + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7216	      >= GET_MODE_BITSIZE (GET_MODE (x)))
7217	  && exact_log2 (mask + 1) >= 0
7218	  /* Number of bits left after the shift must be more than the mask
7219	     needs.  */
7220	  && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7221	      <= GET_MODE_BITSIZE (GET_MODE (x)))
7222	  /* Must be more sign bit copies than the mask needs.  */
7223	  && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
7224	      >= exact_log2 (mask + 1)))
7225	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7226			GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7227				 - exact_log2 (mask + 1)));
7228
7229      goto shiftrt;
7230
7231    case ASHIFTRT:
7232      /* If we are just looking for the sign bit, we don't need this shift at
7233	 all, even if it has a variable count.  */
7234      if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7235	  && (mask == ((unsigned HOST_WIDE_INT) 1
7236		       << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7237	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7238
7239      /* If this is a shift by a constant, get a mask that contains those bits
7240	 that are not copies of the sign bit.  We then have two cases:  If
7241	 MASK only includes those bits, this can be a logical shift, which may
7242	 allow simplifications.  If MASK is a single-bit field not within
7243	 those bits, we are requesting a copy of the sign bit and hence can
7244	 shift the sign bit to the appropriate location.  */
7245
7246      if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
7247	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7248	{
7249	  int i = -1;
7250
7251	  /* If the considered data is wider than HOST_WIDE_INT, we can't
7252	     represent a mask for all its bits in a single scalar.
7253	     But we only care about the lower bits, so calculate these.  */
7254
7255	  if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7256	    {
7257	      nonzero = ~(HOST_WIDE_INT) 0;
7258
7259	      /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7260		 is the number of bits a full-width mask would have set.
7261		 We need only shift if these are fewer than nonzero can
7262		 hold.  If not, we must keep all bits set in nonzero.  */
7263
7264	      if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7265		  < HOST_BITS_PER_WIDE_INT)
7266		nonzero >>= INTVAL (XEXP (x, 1))
7267			    + HOST_BITS_PER_WIDE_INT
7268			    - GET_MODE_BITSIZE (GET_MODE (x)) ;
7269	    }
7270	  else
7271	    {
7272	      nonzero = GET_MODE_MASK (GET_MODE (x));
7273	      nonzero >>= INTVAL (XEXP (x, 1));
7274	    }
7275
7276	  if ((mask & ~nonzero) == 0
7277	      || (i = exact_log2 (mask)) >= 0)
7278	    {
7279	      x = simplify_shift_const
7280		(x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7281		 i < 0 ? INTVAL (XEXP (x, 1))
7282		 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7283
7284	      if (GET_CODE (x) != ASHIFTRT)
7285		return force_to_mode (x, mode, mask, reg, next_select);
7286	    }
7287	}
7288
7289      /* If MASK is 1, convert this to an LSHIFTRT.  This can be done
7290	 even if the shift count isn't a constant.  */
7291      if (mask == 1)
7292	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
7293
7294    shiftrt:
7295
7296      /* If this is a zero- or sign-extension operation that just affects bits
7297	 we don't care about, remove it.  Be sure the call above returned
7298	 something that is still a shift.  */
7299
7300      if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7301	  && GET_CODE (XEXP (x, 1)) == CONST_INT
7302	  && INTVAL (XEXP (x, 1)) >= 0
7303	  && (INTVAL (XEXP (x, 1))
7304	      <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
7305	  && GET_CODE (XEXP (x, 0)) == ASHIFT
7306	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7307	  && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
7308	return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7309			      reg, next_select);
7310
7311      break;
7312
7313    case ROTATE:
7314    case ROTATERT:
7315      /* If the shift count is constant and we can do computations
7316	 in the mode of X, compute where the bits we care about are.
7317	 Otherwise, we can't do anything.  Don't change the mode of
7318	 the shift or propagate MODE into the shift, though.  */
7319      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7320	  && INTVAL (XEXP (x, 1)) >= 0)
7321	{
7322	  temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7323					    GET_MODE (x), GEN_INT (mask),
7324					    XEXP (x, 1));
7325	  if (temp && GET_CODE(temp) == CONST_INT)
7326	    SUBST (XEXP (x, 0),
7327		   force_to_mode (XEXP (x, 0), GET_MODE (x),
7328				  INTVAL (temp), reg, next_select));
7329	}
7330      break;
7331
7332    case NEG:
7333      /* If we just want the low-order bit, the NEG isn't needed since it
7334	 won't change the low-order bit.  */
7335      if (mask == 1)
7336	return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
7337
7338      /* We need any bits less significant than the most significant bit in
7339	 MASK since carries from those bits will affect the bits we are
7340	 interested in.  */
7341      mask = fuller_mask;
7342      goto unop;
7343
7344    case NOT:
7345      /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7346	 same as the XOR case above.  Ensure that the constant we form is not
7347	 wider than the mode of X.  */
7348
7349      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7350	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7351	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7352	  && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7353	      < GET_MODE_BITSIZE (GET_MODE (x)))
7354	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7355	{
7356	  temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)),
7357			       GET_MODE (x));
7358	  temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
7359	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
7360
7361	  return force_to_mode (x, mode, mask, reg, next_select);
7362	}
7363
7364      /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7365	 use the full mask inside the NOT.  */
7366      mask = fuller_mask;
7367
7368    unop:
7369      op0 = gen_lowpart_for_combine (op_mode,
7370				     force_to_mode (XEXP (x, 0), mode, mask,
7371						    reg, next_select));
7372      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7373	x = simplify_gen_unary (code, op_mode, op0, op_mode);
7374      break;
7375
7376    case NE:
7377      /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
7378	 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
7379	 which is equal to STORE_FLAG_VALUE.  */
7380      if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
7381	  && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
7382	  && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
7383	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7384
7385      break;
7386
7387    case IF_THEN_ELSE:
7388      /* We have no way of knowing if the IF_THEN_ELSE can itself be
7389	 written in a narrower mode.  We play it safe and do not do so.  */
7390
7391      SUBST (XEXP (x, 1),
7392	     gen_lowpart_for_combine (GET_MODE (x),
7393				      force_to_mode (XEXP (x, 1), mode,
7394						     mask, reg, next_select)));
7395      SUBST (XEXP (x, 2),
7396	     gen_lowpart_for_combine (GET_MODE (x),
7397				      force_to_mode (XEXP (x, 2), mode,
7398						     mask, reg,next_select)));
7399      break;
7400
7401    default:
7402      break;
7403    }
7404
7405  /* Ensure we return a value of the proper mode.  */
7406  return gen_lowpart_for_combine (mode, x);
7407}
7408
7409/* Return nonzero if X is an expression that has one of two values depending on
7410   whether some other value is zero or nonzero.  In that case, we return the
7411   value that is being tested, *PTRUE is set to the value if the rtx being
7412   returned has a nonzero value, and *PFALSE is set to the other alternative.
7413
7414   If we return zero, we set *PTRUE and *PFALSE to X.  */
7415
7416static rtx
7417if_then_else_cond (x, ptrue, pfalse)
7418     rtx x;
7419     rtx *ptrue, *pfalse;
7420{
7421  enum machine_mode mode = GET_MODE (x);
7422  enum rtx_code code = GET_CODE (x);
7423  rtx cond0, cond1, true0, true1, false0, false1;
7424  unsigned HOST_WIDE_INT nz;
7425
7426  /* If we are comparing a value against zero, we are done.  */
7427  if ((code == NE || code == EQ)
7428      && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
7429    {
7430      *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7431      *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
7432      return XEXP (x, 0);
7433    }
7434
7435  /* If this is a unary operation whose operand has one of two values, apply
7436     our opcode to compute those values.  */
7437  else if (GET_RTX_CLASS (code) == '1'
7438	   && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
7439    {
7440      *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
7441      *pfalse = simplify_gen_unary (code, mode, false0,
7442				    GET_MODE (XEXP (x, 0)));
7443      return cond0;
7444    }
7445
7446  /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
7447     make can't possibly match and would suppress other optimizations.  */
7448  else if (code == COMPARE)
7449    ;
7450
7451  /* If this is a binary operation, see if either side has only one of two
7452     values.  If either one does or if both do and they are conditional on
7453     the same value, compute the new true and false values.  */
7454  else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
7455	   || GET_RTX_CLASS (code) == '<')
7456    {
7457      cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7458      cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7459
7460      if ((cond0 != 0 || cond1 != 0)
7461	  && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7462	{
7463	  /* If if_then_else_cond returned zero, then true/false are the
7464	     same rtl.  We must copy one of them to prevent invalid rtl
7465	     sharing.  */
7466	  if (cond0 == 0)
7467	    true0 = copy_rtx (true0);
7468	  else if (cond1 == 0)
7469	    true1 = copy_rtx (true1);
7470
7471	  *ptrue = gen_binary (code, mode, true0, true1);
7472	  *pfalse = gen_binary (code, mode, false0, false1);
7473	  return cond0 ? cond0 : cond1;
7474	}
7475
7476      /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
7477	 operands is zero when the other is nonzero, and vice-versa,
7478	 and STORE_FLAG_VALUE is 1 or -1.  */
7479
7480      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7481	  && (code == PLUS || code == IOR || code == XOR || code == MINUS
7482	      || code == UMAX)
7483	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7484	{
7485	  rtx op0 = XEXP (XEXP (x, 0), 1);
7486	  rtx op1 = XEXP (XEXP (x, 1), 1);
7487
7488	  cond0 = XEXP (XEXP (x, 0), 0);
7489	  cond1 = XEXP (XEXP (x, 1), 0);
7490
7491	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7492	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7493	      && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7494		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7495		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7496		  || ((swap_condition (GET_CODE (cond0))
7497		       == combine_reversed_comparison_code (cond1))
7498		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7499		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7500	      && ! side_effects_p (x))
7501	    {
7502	      *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
7503	      *pfalse = gen_binary (MULT, mode,
7504				    (code == MINUS
7505				     ? simplify_gen_unary (NEG, mode, op1,
7506							   mode)
7507				     : op1),
7508				    const_true_rtx);
7509	      return cond0;
7510	    }
7511	}
7512
7513      /* Similarly for MULT, AND and UMIN, except that for these the result
7514	 is always zero.  */
7515      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7516	  && (code == MULT || code == AND || code == UMIN)
7517	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7518	{
7519	  cond0 = XEXP (XEXP (x, 0), 0);
7520	  cond1 = XEXP (XEXP (x, 1), 0);
7521
7522	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7523	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7524	      && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7525		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7526		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7527		  || ((swap_condition (GET_CODE (cond0))
7528		       == combine_reversed_comparison_code (cond1))
7529		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7530		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7531	      && ! side_effects_p (x))
7532	    {
7533	      *ptrue = *pfalse = const0_rtx;
7534	      return cond0;
7535	    }
7536	}
7537    }
7538
7539  else if (code == IF_THEN_ELSE)
7540    {
7541      /* If we have IF_THEN_ELSE already, extract the condition and
7542	 canonicalize it if it is NE or EQ.  */
7543      cond0 = XEXP (x, 0);
7544      *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7545      if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7546	return XEXP (cond0, 0);
7547      else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7548	{
7549	  *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7550	  return XEXP (cond0, 0);
7551	}
7552      else
7553	return cond0;
7554    }
7555
7556  /* If X is a SUBREG, we can narrow both the true and false values
7557     if the inner expression, if there is a condition.  */
7558  else if (code == SUBREG
7559	   && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7560					       &true0, &false0)))
7561    {
7562      *ptrue = simplify_gen_subreg (mode, true0,
7563				    GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7564      *pfalse = simplify_gen_subreg (mode, false0,
7565				     GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7566
7567      return cond0;
7568    }
7569
7570  /* If X is a constant, this isn't special and will cause confusions
7571     if we treat it as such.  Likewise if it is equivalent to a constant.  */
7572  else if (CONSTANT_P (x)
7573	   || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7574    ;
7575
7576  /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7577     will be least confusing to the rest of the compiler.  */
7578  else if (mode == BImode)
7579    {
7580      *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7581      return x;
7582    }
7583
7584  /* If X is known to be either 0 or -1, those are the true and
7585     false values when testing X.  */
7586  else if (x == constm1_rtx || x == const0_rtx
7587	   || (mode != VOIDmode
7588	       && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
7589    {
7590      *ptrue = constm1_rtx, *pfalse = const0_rtx;
7591      return x;
7592    }
7593
7594  /* Likewise for 0 or a single bit.  */
7595  else if (mode != VOIDmode
7596	   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7597	   && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
7598    {
7599      *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
7600      return x;
7601    }
7602
7603  /* Otherwise fail; show no condition with true and false values the same.  */
7604  *ptrue = *pfalse = x;
7605  return 0;
7606}
7607
7608/* Return the value of expression X given the fact that condition COND
7609   is known to be true when applied to REG as its first operand and VAL
7610   as its second.  X is known to not be shared and so can be modified in
7611   place.
7612
7613   We only handle the simplest cases, and specifically those cases that
7614   arise with IF_THEN_ELSE expressions.  */
7615
7616static rtx
7617known_cond (x, cond, reg, val)
7618     rtx x;
7619     enum rtx_code cond;
7620     rtx reg, val;
7621{
7622  enum rtx_code code = GET_CODE (x);
7623  rtx temp;
7624  const char *fmt;
7625  int i, j;
7626
7627  if (side_effects_p (x))
7628    return x;
7629
7630  /* If either operand of the condition is a floating point value,
7631     then we have to avoid collapsing an EQ comparison.  */
7632  if (cond == EQ
7633      && rtx_equal_p (x, reg)
7634      && ! FLOAT_MODE_P (GET_MODE (x))
7635      && ! FLOAT_MODE_P (GET_MODE (val)))
7636    return val;
7637
7638  if (cond == UNEQ && rtx_equal_p (x, reg))
7639    return val;
7640
7641  /* If X is (abs REG) and we know something about REG's relationship
7642     with zero, we may be able to simplify this.  */
7643
7644  if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7645    switch (cond)
7646      {
7647      case GE:  case GT:  case EQ:
7648	return XEXP (x, 0);
7649      case LT:  case LE:
7650	return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
7651				   XEXP (x, 0),
7652				   GET_MODE (XEXP (x, 0)));
7653      default:
7654	break;
7655      }
7656
7657  /* The only other cases we handle are MIN, MAX, and comparisons if the
7658     operands are the same as REG and VAL.  */
7659
7660  else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
7661    {
7662      if (rtx_equal_p (XEXP (x, 0), val))
7663	cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7664
7665      if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7666	{
7667	  if (GET_RTX_CLASS (code) == '<')
7668	    {
7669	      if (comparison_dominates_p (cond, code))
7670		return const_true_rtx;
7671
7672	      code = combine_reversed_comparison_code (x);
7673	      if (code != UNKNOWN
7674		  && comparison_dominates_p (cond, code))
7675		return const0_rtx;
7676	      else
7677		return x;
7678	    }
7679	  else if (code == SMAX || code == SMIN
7680		   || code == UMIN || code == UMAX)
7681	    {
7682	      int unsignedp = (code == UMIN || code == UMAX);
7683
7684	      /* Do not reverse the condition when it is NE or EQ.
7685		 This is because we cannot conclude anything about
7686		 the value of 'SMAX (x, y)' when x is not equal to y,
7687		 but we can when x equals y.  */
7688	      if ((code == SMAX || code == UMAX)
7689		  && ! (cond == EQ || cond == NE))
7690		cond = reverse_condition (cond);
7691
7692	      switch (cond)
7693		{
7694		case GE:   case GT:
7695		  return unsignedp ? x : XEXP (x, 1);
7696		case LE:   case LT:
7697		  return unsignedp ? x : XEXP (x, 0);
7698		case GEU:  case GTU:
7699		  return unsignedp ? XEXP (x, 1) : x;
7700		case LEU:  case LTU:
7701		  return unsignedp ? XEXP (x, 0) : x;
7702		default:
7703		  break;
7704		}
7705	    }
7706	}
7707    }
7708  else if (code == SUBREG)
7709    {
7710      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
7711      rtx new, r = known_cond (SUBREG_REG (x), cond, reg, val);
7712
7713      if (SUBREG_REG (x) != r)
7714	{
7715	  /* We must simplify subreg here, before we lose track of the
7716	     original inner_mode.  */
7717	  new = simplify_subreg (GET_MODE (x), r,
7718				 inner_mode, SUBREG_BYTE (x));
7719	  if (new)
7720	    return new;
7721	  else
7722	    SUBST (SUBREG_REG (x), r);
7723	}
7724
7725      return x;
7726    }
7727  /* We don't have to handle SIGN_EXTEND here, because even in the
7728     case of replacing something with a modeless CONST_INT, a
7729     CONST_INT is already (supposed to be) a valid sign extension for
7730     its narrower mode, which implies it's already properly
7731     sign-extended for the wider mode.  Now, for ZERO_EXTEND, the
7732     story is different.  */
7733  else if (code == ZERO_EXTEND)
7734    {
7735      enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
7736      rtx new, r = known_cond (XEXP (x, 0), cond, reg, val);
7737
7738      if (XEXP (x, 0) != r)
7739	{
7740	  /* We must simplify the zero_extend here, before we lose
7741             track of the original inner_mode.  */
7742	  new = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
7743					  r, inner_mode);
7744	  if (new)
7745	    return new;
7746	  else
7747	    SUBST (XEXP (x, 0), r);
7748	}
7749
7750      return x;
7751    }
7752
7753  fmt = GET_RTX_FORMAT (code);
7754  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7755    {
7756      if (fmt[i] == 'e')
7757	SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7758      else if (fmt[i] == 'E')
7759	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7760	  SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7761						cond, reg, val));
7762    }
7763
7764  return x;
7765}
7766
7767/* See if X and Y are equal for the purposes of seeing if we can rewrite an
7768   assignment as a field assignment.  */
7769
7770static int
7771rtx_equal_for_field_assignment_p (x, y)
7772     rtx x;
7773     rtx y;
7774{
7775  if (x == y || rtx_equal_p (x, y))
7776    return 1;
7777
7778  if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7779    return 0;
7780
7781  /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7782     Note that all SUBREGs of MEM are paradoxical; otherwise they
7783     would have been rewritten.  */
7784  if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7785      && GET_CODE (SUBREG_REG (y)) == MEM
7786      && rtx_equal_p (SUBREG_REG (y),
7787		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7788    return 1;
7789
7790  if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7791      && GET_CODE (SUBREG_REG (x)) == MEM
7792      && rtx_equal_p (SUBREG_REG (x),
7793		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7794    return 1;
7795
7796  /* We used to see if get_last_value of X and Y were the same but that's
7797     not correct.  In one direction, we'll cause the assignment to have
7798     the wrong destination and in the case, we'll import a register into this
7799     insn that might have already have been dead.   So fail if none of the
7800     above cases are true.  */
7801  return 0;
7802}
7803
7804/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7805   Return that assignment if so.
7806
7807   We only handle the most common cases.  */
7808
7809static rtx
7810make_field_assignment (x)
7811     rtx x;
7812{
7813  rtx dest = SET_DEST (x);
7814  rtx src = SET_SRC (x);
7815  rtx assign;
7816  rtx rhs, lhs;
7817  HOST_WIDE_INT c1;
7818  HOST_WIDE_INT pos;
7819  unsigned HOST_WIDE_INT len;
7820  rtx other;
7821  enum machine_mode mode;
7822
7823  /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7824     a clear of a one-bit field.  We will have changed it to
7825     (and (rotate (const_int -2) POS) DEST), so check for that.  Also check
7826     for a SUBREG.  */
7827
7828  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7829      && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7830      && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7831      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7832    {
7833      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7834				1, 1, 1, 0);
7835      if (assign != 0)
7836	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7837      return x;
7838    }
7839
7840  else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7841	   && subreg_lowpart_p (XEXP (src, 0))
7842	   && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7843	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7844	   && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7845	   && GET_CODE (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == CONST_INT
7846	   && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7847	   && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7848    {
7849      assign = make_extraction (VOIDmode, dest, 0,
7850				XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7851				1, 1, 1, 0);
7852      if (assign != 0)
7853	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7854      return x;
7855    }
7856
7857  /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
7858     one-bit field.  */
7859  else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7860	   && XEXP (XEXP (src, 0), 0) == const1_rtx
7861	   && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7862    {
7863      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7864				1, 1, 1, 0);
7865      if (assign != 0)
7866	return gen_rtx_SET (VOIDmode, assign, const1_rtx);
7867      return x;
7868    }
7869
7870  /* The other case we handle is assignments into a constant-position
7871     field.  They look like (ior/xor (and DEST C1) OTHER).  If C1 represents
7872     a mask that has all one bits except for a group of zero bits and
7873     OTHER is known to have zeros where C1 has ones, this is such an
7874     assignment.  Compute the position and length from C1.  Shift OTHER
7875     to the appropriate position, force it to the required mode, and
7876     make the extraction.  Check for the AND in both operands.  */
7877
7878  if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
7879    return x;
7880
7881  rhs = expand_compound_operation (XEXP (src, 0));
7882  lhs = expand_compound_operation (XEXP (src, 1));
7883
7884  if (GET_CODE (rhs) == AND
7885      && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7886      && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7887    c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7888  else if (GET_CODE (lhs) == AND
7889	   && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7890	   && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7891    c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
7892  else
7893    return x;
7894
7895  pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
7896  if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
7897      || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7898      || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
7899    return x;
7900
7901  assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
7902  if (assign == 0)
7903    return x;
7904
7905  /* The mode to use for the source is the mode of the assignment, or of
7906     what is inside a possible STRICT_LOW_PART.  */
7907  mode = (GET_CODE (assign) == STRICT_LOW_PART
7908	  ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
7909
7910  /* Shift OTHER right POS places and make it the source, restricting it
7911     to the proper length and mode.  */
7912
7913  src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7914					     GET_MODE (src), other, pos),
7915		       mode,
7916		       GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7917		       ? ~(unsigned HOST_WIDE_INT) 0
7918		       : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
7919		       dest, 0);
7920
7921  return gen_rtx_SET (VOIDmode, assign, src);
7922}
7923
7924/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7925   if so.  */
7926
7927static rtx
7928apply_distributive_law (x)
7929     rtx x;
7930{
7931  enum rtx_code code = GET_CODE (x);
7932  rtx lhs, rhs, other;
7933  rtx tem;
7934  enum rtx_code inner_code;
7935
7936  /* Distributivity is not true for floating point.
7937     It can change the value.  So don't do it.
7938     -- rms and moshier@world.std.com.  */
7939  if (FLOAT_MODE_P (GET_MODE (x)))
7940    return x;
7941
7942  /* The outer operation can only be one of the following:  */
7943  if (code != IOR && code != AND && code != XOR
7944      && code != PLUS && code != MINUS)
7945    return x;
7946
7947  lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7948
7949  /* If either operand is a primitive we can't do anything, so get out
7950     fast.  */
7951  if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
7952      || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
7953    return x;
7954
7955  lhs = expand_compound_operation (lhs);
7956  rhs = expand_compound_operation (rhs);
7957  inner_code = GET_CODE (lhs);
7958  if (inner_code != GET_CODE (rhs))
7959    return x;
7960
7961  /* See if the inner and outer operations distribute.  */
7962  switch (inner_code)
7963    {
7964    case LSHIFTRT:
7965    case ASHIFTRT:
7966    case AND:
7967    case IOR:
7968      /* These all distribute except over PLUS.  */
7969      if (code == PLUS || code == MINUS)
7970	return x;
7971      break;
7972
7973    case MULT:
7974      if (code != PLUS && code != MINUS)
7975	return x;
7976      break;
7977
7978    case ASHIFT:
7979      /* This is also a multiply, so it distributes over everything.  */
7980      break;
7981
7982    case SUBREG:
7983      /* Non-paradoxical SUBREGs distributes over all operations, provided
7984	 the inner modes and byte offsets are the same, this is an extraction
7985	 of a low-order part, we don't convert an fp operation to int or
7986	 vice versa, and we would not be converting a single-word
7987	 operation into a multi-word operation.  The latter test is not
7988	 required, but it prevents generating unneeded multi-word operations.
7989	 Some of the previous tests are redundant given the latter test, but
7990	 are retained because they are required for correctness.
7991
7992	 We produce the result slightly differently in this case.  */
7993
7994      if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7995	  || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
7996	  || ! subreg_lowpart_p (lhs)
7997	  || (GET_MODE_CLASS (GET_MODE (lhs))
7998	      != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
7999	  || (GET_MODE_SIZE (GET_MODE (lhs))
8000	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
8001	  || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
8002	return x;
8003
8004      tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
8005			SUBREG_REG (lhs), SUBREG_REG (rhs));
8006      return gen_lowpart_for_combine (GET_MODE (x), tem);
8007
8008    default:
8009      return x;
8010    }
8011
8012  /* Set LHS and RHS to the inner operands (A and B in the example
8013     above) and set OTHER to the common operand (C in the example).
8014     These is only one way to do this unless the inner operation is
8015     commutative.  */
8016  if (GET_RTX_CLASS (inner_code) == 'c'
8017      && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
8018    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
8019  else if (GET_RTX_CLASS (inner_code) == 'c'
8020	   && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
8021    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
8022  else if (GET_RTX_CLASS (inner_code) == 'c'
8023	   && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
8024    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
8025  else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
8026    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
8027  else
8028    return x;
8029
8030  /* Form the new inner operation, seeing if it simplifies first.  */
8031  tem = gen_binary (code, GET_MODE (x), lhs, rhs);
8032
8033  /* There is one exception to the general way of distributing:
8034     (a ^ b) | (a ^ c) -> (~a) & (b ^ c)  */
8035  if (code == XOR && inner_code == IOR)
8036    {
8037      inner_code = AND;
8038      other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
8039    }
8040
8041  /* We may be able to continuing distributing the result, so call
8042     ourselves recursively on the inner operation before forming the
8043     outer operation, which we return.  */
8044  return gen_binary (inner_code, GET_MODE (x),
8045		     apply_distributive_law (tem), other);
8046}
8047
8048/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
8049   in MODE.
8050
8051   Return an equivalent form, if different from X.  Otherwise, return X.  If
8052   X is zero, we are to always construct the equivalent form.  */
8053
8054static rtx
8055simplify_and_const_int (x, mode, varop, constop)
8056     rtx x;
8057     enum machine_mode mode;
8058     rtx varop;
8059     unsigned HOST_WIDE_INT constop;
8060{
8061  unsigned HOST_WIDE_INT nonzero;
8062  int i;
8063
8064  /* Simplify VAROP knowing that we will be only looking at some of the
8065     bits in it.
8066
8067     Note by passing in CONSTOP, we guarantee that the bits not set in
8068     CONSTOP are not significant and will never be examined.  We must
8069     ensure that is the case by explicitly masking out those bits
8070     before returning.  */
8071  varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
8072
8073  /* If VAROP is a CLOBBER, we will fail so return it.  */
8074  if (GET_CODE (varop) == CLOBBER)
8075    return varop;
8076
8077  /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
8078     to VAROP and return the new constant.  */
8079  if (GET_CODE (varop) == CONST_INT)
8080    return GEN_INT (trunc_int_for_mode (INTVAL (varop) & constop, mode));
8081
8082  /* See what bits may be nonzero in VAROP.  Unlike the general case of
8083     a call to nonzero_bits, here we don't care about bits outside
8084     MODE.  */
8085
8086  nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
8087
8088  /* Turn off all bits in the constant that are known to already be zero.
8089     Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
8090     which is tested below.  */
8091
8092  constop &= nonzero;
8093
8094  /* If we don't have any bits left, return zero.  */
8095  if (constop == 0)
8096    return const0_rtx;
8097
8098  /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
8099     a power of two, we can replace this with an ASHIFT.  */
8100  if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
8101      && (i = exact_log2 (constop)) >= 0)
8102    return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
8103
8104  /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
8105     or XOR, then try to apply the distributive law.  This may eliminate
8106     operations if either branch can be simplified because of the AND.
8107     It may also make some cases more complex, but those cases probably
8108     won't match a pattern either with or without this.  */
8109
8110  if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
8111    return
8112      gen_lowpart_for_combine
8113	(mode,
8114	 apply_distributive_law
8115	 (gen_binary (GET_CODE (varop), GET_MODE (varop),
8116		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
8117					      XEXP (varop, 0), constop),
8118		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
8119					      XEXP (varop, 1), constop))));
8120
8121  /* If VAROP is PLUS, and the constant is a mask of low bite, distribute
8122     the AND and see if one of the operands simplifies to zero.  If so, we
8123     may eliminate it.  */
8124
8125  if (GET_CODE (varop) == PLUS
8126      && exact_log2 (constop + 1) >= 0)
8127    {
8128      rtx o0, o1;
8129
8130      o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
8131      o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
8132      if (o0 == const0_rtx)
8133	return o1;
8134      if (o1 == const0_rtx)
8135	return o0;
8136    }
8137
8138  /* Get VAROP in MODE.  Try to get a SUBREG if not.  Don't make a new SUBREG
8139     if we already had one (just check for the simplest cases).  */
8140  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8141      && GET_MODE (XEXP (x, 0)) == mode
8142      && SUBREG_REG (XEXP (x, 0)) == varop)
8143    varop = XEXP (x, 0);
8144  else
8145    varop = gen_lowpart_for_combine (mode, varop);
8146
8147  /* If we can't make the SUBREG, try to return what we were given.  */
8148  if (GET_CODE (varop) == CLOBBER)
8149    return x ? x : varop;
8150
8151  /* If we are only masking insignificant bits, return VAROP.  */
8152  if (constop == nonzero)
8153    x = varop;
8154  else
8155    {
8156      /* Otherwise, return an AND.  */
8157      constop = trunc_int_for_mode (constop, mode);
8158      /* See how much, if any, of X we can use.  */
8159      if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
8160	x = gen_binary (AND, mode, varop, GEN_INT (constop));
8161
8162      else
8163	{
8164	  if (GET_CODE (XEXP (x, 1)) != CONST_INT
8165	      || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
8166	    SUBST (XEXP (x, 1), GEN_INT (constop));
8167
8168	  SUBST (XEXP (x, 0), varop);
8169	}
8170    }
8171
8172  return x;
8173}
8174
8175#define nonzero_bits_with_known(X, MODE) \
8176  cached_nonzero_bits (X, MODE, known_x, known_mode, known_ret)
8177
8178/* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
8179   It avoids exponential behavior in nonzero_bits1 when X has
8180   identical subexpressions on the first or the second level.  */
8181
8182static unsigned HOST_WIDE_INT
8183cached_nonzero_bits (x, mode, known_x, known_mode, known_ret)
8184     rtx x;
8185     enum machine_mode mode;
8186     rtx known_x;
8187     enum machine_mode known_mode;
8188     unsigned HOST_WIDE_INT known_ret;
8189{
8190  if (x == known_x && mode == known_mode)
8191    return known_ret;
8192
8193  /* Try to find identical subexpressions.  If found call
8194     nonzero_bits1 on X with the subexpressions as KNOWN_X and the
8195     precomputed value for the subexpression as KNOWN_RET.  */
8196
8197  if (GET_RTX_CLASS (GET_CODE (x)) == '2'
8198      || GET_RTX_CLASS (GET_CODE (x)) == 'c')
8199    {
8200      rtx x0 = XEXP (x, 0);
8201      rtx x1 = XEXP (x, 1);
8202
8203      /* Check the first level.  */
8204      if (x0 == x1)
8205	return nonzero_bits1 (x, mode, x0, mode,
8206			      nonzero_bits_with_known (x0, mode));
8207
8208      /* Check the second level.  */
8209      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
8210	   || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
8211	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
8212	return nonzero_bits1 (x, mode, x1, mode,
8213			      nonzero_bits_with_known (x1, mode));
8214
8215      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
8216	   || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
8217	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
8218	return nonzero_bits1 (x, mode, x0, mode,
8219			 nonzero_bits_with_known (x0, mode));
8220    }
8221
8222  return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
8223}
8224
8225/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
8226   We don't let nonzero_bits recur into num_sign_bit_copies, because that
8227   is less useful.  We can't allow both, because that results in exponential
8228   run time recursion.  There is a nullstone testcase that triggered
8229   this.  This macro avoids accidental uses of num_sign_bit_copies.  */
8230#define cached_num_sign_bit_copies()
8231
8232/* Given an expression, X, compute which bits in X can be nonzero.
8233   We don't care about bits outside of those defined in MODE.
8234
8235   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
8236   a shift, AND, or zero_extract, we can do better.  */
8237
8238static unsigned HOST_WIDE_INT
8239nonzero_bits1 (x, mode, known_x, known_mode, known_ret)
8240     rtx x;
8241     enum machine_mode mode;
8242     rtx known_x;
8243     enum machine_mode known_mode;
8244     unsigned HOST_WIDE_INT known_ret;
8245{
8246  unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
8247  unsigned HOST_WIDE_INT inner_nz;
8248  enum rtx_code code;
8249  unsigned int mode_width = GET_MODE_BITSIZE (mode);
8250  rtx tem;
8251
8252  /* For floating-point values, assume all bits are needed.  */
8253  if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
8254    return nonzero;
8255
8256  /* If X is wider than MODE, use its mode instead.  */
8257  if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
8258    {
8259      mode = GET_MODE (x);
8260      nonzero = GET_MODE_MASK (mode);
8261      mode_width = GET_MODE_BITSIZE (mode);
8262    }
8263
8264  if (mode_width > HOST_BITS_PER_WIDE_INT)
8265    /* Our only callers in this case look for single bit values.  So
8266       just return the mode mask.  Those tests will then be false.  */
8267    return nonzero;
8268
8269#ifndef WORD_REGISTER_OPERATIONS
8270  /* If MODE is wider than X, but both are a single word for both the host
8271     and target machines, we can compute this from which bits of the
8272     object might be nonzero in its own mode, taking into account the fact
8273     that on many CISC machines, accessing an object in a wider mode
8274     causes the high-order bits to become undefined.  So they are
8275     not known to be zero.  */
8276
8277  if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
8278      && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
8279      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
8280      && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
8281    {
8282      nonzero &= nonzero_bits_with_known (x, GET_MODE (x));
8283      nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
8284      return nonzero;
8285    }
8286#endif
8287
8288  code = GET_CODE (x);
8289  switch (code)
8290    {
8291    case REG:
8292#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
8293      /* If pointers extend unsigned and this is a pointer in Pmode, say that
8294	 all the bits above ptr_mode are known to be zero.  */
8295      if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8296	  && REG_POINTER (x))
8297	nonzero &= GET_MODE_MASK (ptr_mode);
8298#endif
8299
8300      /* Include declared information about alignment of pointers.  */
8301      /* ??? We don't properly preserve REG_POINTER changes across
8302	 pointer-to-integer casts, so we can't trust it except for
8303	 things that we know must be pointers.  See execute/960116-1.c.  */
8304      if ((x == stack_pointer_rtx
8305	   || x == frame_pointer_rtx
8306	   || x == arg_pointer_rtx)
8307	  && REGNO_POINTER_ALIGN (REGNO (x)))
8308	{
8309	  unsigned HOST_WIDE_INT alignment
8310	    = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
8311
8312#ifdef PUSH_ROUNDING
8313	  /* If PUSH_ROUNDING is defined, it is possible for the
8314	     stack to be momentarily aligned only to that amount,
8315	     so we pick the least alignment.  */
8316	  if (x == stack_pointer_rtx && PUSH_ARGS)
8317	    alignment = MIN (PUSH_ROUNDING (1), alignment);
8318#endif
8319
8320	  nonzero &= ~(alignment - 1);
8321	}
8322
8323      /* If X is a register whose nonzero bits value is current, use it.
8324	 Otherwise, if X is a register whose value we can find, use that
8325	 value.  Otherwise, use the previously-computed global nonzero bits
8326	 for this register.  */
8327
8328      if (reg_last_set_value[REGNO (x)] != 0
8329	  && (reg_last_set_mode[REGNO (x)] == mode
8330	      || (GET_MODE_CLASS (reg_last_set_mode[REGNO (x)]) == MODE_INT
8331		  && GET_MODE_CLASS (mode) == MODE_INT))
8332	  && (reg_last_set_label[REGNO (x)] == label_tick
8333	      || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8334		  && REG_N_SETS (REGNO (x)) == 1
8335		  && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start,
8336					REGNO (x))))
8337	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8338	return reg_last_set_nonzero_bits[REGNO (x)] & nonzero;
8339
8340      tem = get_last_value (x);
8341
8342      if (tem)
8343	{
8344#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8345	  /* If X is narrower than MODE and TEM is a non-negative
8346	     constant that would appear negative in the mode of X,
8347	     sign-extend it for use in reg_nonzero_bits because some
8348	     machines (maybe most) will actually do the sign-extension
8349	     and this is the conservative approach.
8350
8351	     ??? For 2.5, try to tighten up the MD files in this regard
8352	     instead of this kludge.  */
8353
8354	  if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
8355	      && GET_CODE (tem) == CONST_INT
8356	      && INTVAL (tem) > 0
8357	      && 0 != (INTVAL (tem)
8358		       & ((HOST_WIDE_INT) 1
8359			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
8360	    tem = GEN_INT (INTVAL (tem)
8361			   | ((HOST_WIDE_INT) (-1)
8362			      << GET_MODE_BITSIZE (GET_MODE (x))));
8363#endif
8364	  return nonzero_bits_with_known (tem, mode) & nonzero;
8365	}
8366      else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
8367	{
8368	  unsigned HOST_WIDE_INT mask = reg_nonzero_bits[REGNO (x)];
8369
8370	  if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width)
8371	    /* We don't know anything about the upper bits.  */
8372	    mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
8373	  return nonzero & mask;
8374	}
8375      else
8376	return nonzero;
8377
8378    case CONST_INT:
8379#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8380      /* If X is negative in MODE, sign-extend the value.  */
8381      if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
8382	  && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
8383	return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
8384#endif
8385
8386      return INTVAL (x);
8387
8388    case MEM:
8389#ifdef LOAD_EXTEND_OP
8390      /* In many, if not most, RISC machines, reading a byte from memory
8391	 zeros the rest of the register.  Noticing that fact saves a lot
8392	 of extra zero-extends.  */
8393      if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
8394	nonzero &= GET_MODE_MASK (GET_MODE (x));
8395#endif
8396      break;
8397
8398    case EQ:  case NE:
8399    case UNEQ:  case LTGT:
8400    case GT:  case GTU:  case UNGT:
8401    case LT:  case LTU:  case UNLT:
8402    case GE:  case GEU:  case UNGE:
8403    case LE:  case LEU:  case UNLE:
8404    case UNORDERED: case ORDERED:
8405
8406      /* If this produces an integer result, we know which bits are set.
8407	 Code here used to clear bits outside the mode of X, but that is
8408	 now done above.  */
8409
8410      if (GET_MODE_CLASS (mode) == MODE_INT
8411	  && mode_width <= HOST_BITS_PER_WIDE_INT)
8412	nonzero = STORE_FLAG_VALUE;
8413      break;
8414
8415    case NEG:
8416#if 0
8417      /* Disabled to avoid exponential mutual recursion between nonzero_bits
8418	 and num_sign_bit_copies.  */
8419      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8420	  == GET_MODE_BITSIZE (GET_MODE (x)))
8421	nonzero = 1;
8422#endif
8423
8424      if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
8425	nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
8426      break;
8427
8428    case ABS:
8429#if 0
8430      /* Disabled to avoid exponential mutual recursion between nonzero_bits
8431	 and num_sign_bit_copies.  */
8432      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8433	  == GET_MODE_BITSIZE (GET_MODE (x)))
8434	nonzero = 1;
8435#endif
8436      break;
8437
8438    case TRUNCATE:
8439      nonzero &= (nonzero_bits_with_known (XEXP (x, 0), mode)
8440		  & GET_MODE_MASK (mode));
8441      break;
8442
8443    case ZERO_EXTEND:
8444      nonzero &= nonzero_bits_with_known (XEXP (x, 0), mode);
8445      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8446	nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8447      break;
8448
8449    case SIGN_EXTEND:
8450      /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
8451	 Otherwise, show all the bits in the outer mode but not the inner
8452	 may be nonzero.  */
8453      inner_nz = nonzero_bits_with_known (XEXP (x, 0), mode);
8454      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8455	{
8456	  inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8457	  if (inner_nz
8458	      & (((HOST_WIDE_INT) 1
8459		  << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
8460	    inner_nz |= (GET_MODE_MASK (mode)
8461			 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
8462	}
8463
8464      nonzero &= inner_nz;
8465      break;
8466
8467    case AND:
8468      nonzero &= (nonzero_bits_with_known (XEXP (x, 0), mode)
8469		  & nonzero_bits_with_known (XEXP (x, 1), mode));
8470      break;
8471
8472    case XOR:   case IOR:
8473    case UMIN:  case UMAX:  case SMIN:  case SMAX:
8474      {
8475	unsigned HOST_WIDE_INT nonzero0 =
8476	  nonzero_bits_with_known (XEXP (x, 0), mode);
8477
8478	/* Don't call nonzero_bits for the second time if it cannot change
8479	   anything.  */
8480	if ((nonzero & nonzero0) != nonzero)
8481	  nonzero &= (nonzero0
8482		      | nonzero_bits_with_known (XEXP (x, 1), mode));
8483      }
8484      break;
8485
8486    case PLUS:  case MINUS:
8487    case MULT:
8488    case DIV:   case UDIV:
8489    case MOD:   case UMOD:
8490      /* We can apply the rules of arithmetic to compute the number of
8491	 high- and low-order zero bits of these operations.  We start by
8492	 computing the width (position of the highest-order nonzero bit)
8493	 and the number of low-order zero bits for each value.  */
8494      {
8495	unsigned HOST_WIDE_INT nz0 =
8496	  nonzero_bits_with_known (XEXP (x, 0), mode);
8497	unsigned HOST_WIDE_INT nz1 =
8498	  nonzero_bits_with_known (XEXP (x, 1), mode);
8499	int width0 = floor_log2 (nz0) + 1;
8500	int width1 = floor_log2 (nz1) + 1;
8501	int low0 = floor_log2 (nz0 & -nz0);
8502	int low1 = floor_log2 (nz1 & -nz1);
8503	HOST_WIDE_INT op0_maybe_minusp
8504	  = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8505	HOST_WIDE_INT op1_maybe_minusp
8506	  = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8507	unsigned int result_width = mode_width;
8508	int result_low = 0;
8509
8510	switch (code)
8511	  {
8512	  case PLUS:
8513	    result_width = MAX (width0, width1) + 1;
8514	    result_low = MIN (low0, low1);
8515	    break;
8516	  case MINUS:
8517	    result_low = MIN (low0, low1);
8518	    break;
8519	  case MULT:
8520	    result_width = width0 + width1;
8521	    result_low = low0 + low1;
8522	    break;
8523	  case DIV:
8524	    if (width1 == 0)
8525	      break;
8526	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
8527	      result_width = width0;
8528	    break;
8529	  case UDIV:
8530	    if (width1 == 0)
8531	      break;
8532	    result_width = width0;
8533	    break;
8534	  case MOD:
8535	    if (width1 == 0)
8536	      break;
8537	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
8538	      result_width = MIN (width0, width1);
8539	    result_low = MIN (low0, low1);
8540	    break;
8541	  case UMOD:
8542	    if (width1 == 0)
8543	      break;
8544	    result_width = MIN (width0, width1);
8545	    result_low = MIN (low0, low1);
8546	    break;
8547	  default:
8548	    abort ();
8549	  }
8550
8551	if (result_width < mode_width)
8552	  nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
8553
8554	if (result_low > 0)
8555	  nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
8556
8557#ifdef POINTERS_EXTEND_UNSIGNED
8558	/* If pointers extend unsigned and this is an addition or subtraction
8559	   to a pointer in Pmode, all the bits above ptr_mode are known to be
8560	   zero.  */
8561	if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
8562	    && (code == PLUS || code == MINUS)
8563	    && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8564	  nonzero &= GET_MODE_MASK (ptr_mode);
8565#endif
8566      }
8567      break;
8568
8569    case ZERO_EXTRACT:
8570      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8571	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8572	nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
8573      break;
8574
8575    case SUBREG:
8576      /* If this is a SUBREG formed for a promoted variable that has
8577	 been zero-extended, we know that at least the high-order bits
8578	 are zero, though others might be too.  */
8579
8580      if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
8581	nonzero = (GET_MODE_MASK (GET_MODE (x))
8582		   & nonzero_bits_with_known (SUBREG_REG (x), GET_MODE (x)));
8583
8584      /* If the inner mode is a single word for both the host and target
8585	 machines, we can compute this from which bits of the inner
8586	 object might be nonzero.  */
8587      if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
8588	  && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8589	      <= HOST_BITS_PER_WIDE_INT))
8590	{
8591	  nonzero &= nonzero_bits_with_known (SUBREG_REG (x), mode);
8592
8593#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
8594	  /* If this is a typical RISC machine, we only have to worry
8595	     about the way loads are extended.  */
8596	  if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
8597	       ? (((nonzero
8598		    & (((unsigned HOST_WIDE_INT) 1
8599			<< (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
8600		   != 0))
8601	       : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
8602	      || GET_CODE (SUBREG_REG (x)) != MEM)
8603#endif
8604	    {
8605	      /* On many CISC machines, accessing an object in a wider mode
8606		 causes the high-order bits to become undefined.  So they are
8607		 not known to be zero.  */
8608	      if (GET_MODE_SIZE (GET_MODE (x))
8609		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8610		nonzero |= (GET_MODE_MASK (GET_MODE (x))
8611			    & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
8612	    }
8613	}
8614      break;
8615
8616    case ASHIFTRT:
8617    case LSHIFTRT:
8618    case ASHIFT:
8619    case ROTATE:
8620      /* The nonzero bits are in two classes: any bits within MODE
8621	 that aren't in GET_MODE (x) are always significant.  The rest of the
8622	 nonzero bits are those that are significant in the operand of
8623	 the shift when shifted the appropriate number of bits.  This
8624	 shows that high-order bits are cleared by the right shift and
8625	 low-order bits by left shifts.  */
8626      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8627	  && INTVAL (XEXP (x, 1)) >= 0
8628	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8629	{
8630	  enum machine_mode inner_mode = GET_MODE (x);
8631	  unsigned int width = GET_MODE_BITSIZE (inner_mode);
8632	  int count = INTVAL (XEXP (x, 1));
8633	  unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
8634	  unsigned HOST_WIDE_INT op_nonzero =
8635	    nonzero_bits_with_known (XEXP (x, 0), mode);
8636	  unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
8637	  unsigned HOST_WIDE_INT outer = 0;
8638
8639	  if (mode_width > width)
8640	    outer = (op_nonzero & nonzero & ~mode_mask);
8641
8642	  if (code == LSHIFTRT)
8643	    inner >>= count;
8644	  else if (code == ASHIFTRT)
8645	    {
8646	      inner >>= count;
8647
8648	      /* If the sign bit may have been nonzero before the shift, we
8649		 need to mark all the places it could have been copied to
8650		 by the shift as possibly nonzero.  */
8651	      if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
8652		inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
8653	    }
8654	  else if (code == ASHIFT)
8655	    inner <<= count;
8656	  else
8657	    inner = ((inner << (count % width)
8658		      | (inner >> (width - (count % width)))) & mode_mask);
8659
8660	  nonzero &= (outer | inner);
8661	}
8662      break;
8663
8664    case FFS:
8665      /* This is at most the number of bits in the mode.  */
8666      nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
8667      break;
8668
8669    case IF_THEN_ELSE:
8670      nonzero &= (nonzero_bits_with_known (XEXP (x, 1), mode)
8671		  | nonzero_bits_with_known (XEXP (x, 2), mode));
8672      break;
8673
8674    default:
8675      break;
8676    }
8677
8678  return nonzero;
8679}
8680
8681/* See the macro definition above.  */
8682#undef cached_num_sign_bit_copies
8683
8684#define num_sign_bit_copies_with_known(X, M) \
8685  cached_num_sign_bit_copies (X, M, known_x, known_mode, known_ret)
8686
8687/* The function cached_num_sign_bit_copies is a wrapper around
8688   num_sign_bit_copies1.  It avoids exponential behavior in
8689   num_sign_bit_copies1 when X has identical subexpressions on the
8690   first or the second level.  */
8691
8692static unsigned int
8693cached_num_sign_bit_copies (x, mode, known_x, known_mode, known_ret)
8694     rtx x;
8695     enum machine_mode mode;
8696     rtx known_x;
8697     enum machine_mode known_mode;
8698     unsigned int known_ret;
8699{
8700  if (x == known_x && mode == known_mode)
8701    return known_ret;
8702
8703  /* Try to find identical subexpressions.  If found call
8704     num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
8705     the precomputed value for the subexpression as KNOWN_RET.  */
8706
8707  if (GET_RTX_CLASS (GET_CODE (x)) == '2'
8708      || GET_RTX_CLASS (GET_CODE (x)) == 'c')
8709    {
8710      rtx x0 = XEXP (x, 0);
8711      rtx x1 = XEXP (x, 1);
8712
8713      /* Check the first level.  */
8714      if (x0 == x1)
8715	return
8716	  num_sign_bit_copies1 (x, mode, x0, mode,
8717				num_sign_bit_copies_with_known (x0, mode));
8718
8719      /* Check the second level.  */
8720      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
8721	   || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
8722	  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
8723	return
8724	  num_sign_bit_copies1 (x, mode, x1, mode,
8725				num_sign_bit_copies_with_known (x1, mode));
8726
8727      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
8728	   || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
8729	  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
8730	return
8731	  num_sign_bit_copies1 (x, mode, x0, mode,
8732				num_sign_bit_copies_with_known (x0, mode));
8733    }
8734
8735  return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
8736}
8737
8738/* Return the number of bits at the high-order end of X that are known to
8739   be equal to the sign bit.  X will be used in mode MODE; if MODE is
8740   VOIDmode, X will be used in its own mode.  The returned value  will always
8741   be between 1 and the number of bits in MODE.  */
8742
8743static unsigned int
8744num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret)
8745     rtx x;
8746     enum machine_mode mode;
8747     rtx known_x;
8748     enum machine_mode known_mode;
8749     unsigned int known_ret;
8750{
8751  enum rtx_code code = GET_CODE (x);
8752  unsigned int bitwidth;
8753  int num0, num1, result;
8754  unsigned HOST_WIDE_INT nonzero;
8755  rtx tem;
8756
8757  /* If we weren't given a mode, use the mode of X.  If the mode is still
8758     VOIDmode, we don't know anything.  Likewise if one of the modes is
8759     floating-point.  */
8760
8761  if (mode == VOIDmode)
8762    mode = GET_MODE (x);
8763
8764  if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
8765    return 1;
8766
8767  bitwidth = GET_MODE_BITSIZE (mode);
8768
8769  /* For a smaller object, just ignore the high bits.  */
8770  if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
8771    {
8772      num0 = num_sign_bit_copies_with_known (x, GET_MODE (x));
8773      return MAX (1,
8774		  num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
8775    }
8776
8777  if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
8778    {
8779#ifndef WORD_REGISTER_OPERATIONS
8780  /* If this machine does not do all register operations on the entire
8781     register and MODE is wider than the mode of X, we can say nothing
8782     at all about the high-order bits.  */
8783      return 1;
8784#else
8785      /* Likewise on machines that do, if the mode of the object is smaller
8786	 than a word and loads of that size don't sign extend, we can say
8787	 nothing about the high order bits.  */
8788      if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
8789#ifdef LOAD_EXTEND_OP
8790	  && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
8791#endif
8792	  )
8793	return 1;
8794#endif
8795    }
8796
8797  switch (code)
8798    {
8799    case REG:
8800
8801#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
8802      /* If pointers extend signed and this is a pointer in Pmode, say that
8803	 all the bits above ptr_mode are known to be sign bit copies.  */
8804      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
8805	  && REG_POINTER (x))
8806	return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
8807#endif
8808
8809      if (reg_last_set_value[REGNO (x)] != 0
8810	  && reg_last_set_mode[REGNO (x)] == mode
8811	  && (reg_last_set_label[REGNO (x)] == label_tick
8812	      || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8813		  && REG_N_SETS (REGNO (x)) == 1
8814		  && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start,
8815					REGNO (x))))
8816	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8817	return reg_last_set_sign_bit_copies[REGNO (x)];
8818
8819      tem = get_last_value (x);
8820      if (tem != 0)
8821	return num_sign_bit_copies_with_known (tem, mode);
8822
8823      if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0
8824	  && GET_MODE_BITSIZE (GET_MODE (x)) == bitwidth)
8825	return reg_sign_bit_copies[REGNO (x)];
8826      break;
8827
8828    case MEM:
8829#ifdef LOAD_EXTEND_OP
8830      /* Some RISC machines sign-extend all loads of smaller than a word.  */
8831      if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
8832	return MAX (1, ((int) bitwidth
8833			- (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
8834#endif
8835      break;
8836
8837    case CONST_INT:
8838      /* If the constant is negative, take its 1's complement and remask.
8839	 Then see how many zero bits we have.  */
8840      nonzero = INTVAL (x) & GET_MODE_MASK (mode);
8841      if (bitwidth <= HOST_BITS_PER_WIDE_INT
8842	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8843	nonzero = (~nonzero) & GET_MODE_MASK (mode);
8844
8845      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8846
8847    case SUBREG:
8848      /* If this is a SUBREG for a promoted object that is sign-extended
8849	 and we are looking at it in a wider mode, we know that at least the
8850	 high-order bits are known to be sign bit copies.  */
8851
8852      if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
8853	{
8854	  num0 = num_sign_bit_copies_with_known (SUBREG_REG (x), mode);
8855	  return MAX ((int) bitwidth
8856		      - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
8857		      num0);
8858	}
8859
8860      /* For a smaller object, just ignore the high bits.  */
8861      if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
8862	{
8863	  num0 = num_sign_bit_copies_with_known (SUBREG_REG (x), VOIDmode);
8864	  return MAX (1, (num0
8865			  - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8866				   - bitwidth)));
8867	}
8868
8869#ifdef WORD_REGISTER_OPERATIONS
8870#ifdef LOAD_EXTEND_OP
8871      /* For paradoxical SUBREGs on machines where all register operations
8872	 affect the entire register, just look inside.  Note that we are
8873	 passing MODE to the recursive call, so the number of sign bit copies
8874	 will remain relative to that mode, not the inner mode.  */
8875
8876      /* This works only if loads sign extend.  Otherwise, if we get a
8877	 reload for the inner part, it may be loaded from the stack, and
8878	 then we lose all sign bit copies that existed before the store
8879	 to the stack.  */
8880
8881      if ((GET_MODE_SIZE (GET_MODE (x))
8882	   > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8883	  && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
8884	  && GET_CODE (SUBREG_REG (x)) == MEM)
8885	return num_sign_bit_copies_with_known (SUBREG_REG (x), mode);
8886#endif
8887#endif
8888      break;
8889
8890    case SIGN_EXTRACT:
8891      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
8892	return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
8893      break;
8894
8895    case SIGN_EXTEND:
8896      return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8897	      + num_sign_bit_copies_with_known (XEXP (x, 0), VOIDmode));
8898
8899    case TRUNCATE:
8900      /* For a smaller object, just ignore the high bits.  */
8901      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), VOIDmode);
8902      return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8903				    - bitwidth)));
8904
8905    case NOT:
8906      return num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8907
8908    case ROTATE:       case ROTATERT:
8909      /* If we are rotating left by a number of bits less than the number
8910	 of sign bit copies, we can just subtract that amount from the
8911	 number.  */
8912      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8913	  && INTVAL (XEXP (x, 1)) >= 0
8914	  && INTVAL (XEXP (x, 1)) < (int) bitwidth)
8915	{
8916	  num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8917	  return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
8918				 : (int) bitwidth - INTVAL (XEXP (x, 1))));
8919	}
8920      break;
8921
8922    case NEG:
8923      /* In general, this subtracts one sign bit copy.  But if the value
8924	 is known to be positive, the number of sign bit copies is the
8925	 same as that of the input.  Finally, if the input has just one bit
8926	 that might be nonzero, all the bits are copies of the sign bit.  */
8927      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8928      if (bitwidth > HOST_BITS_PER_WIDE_INT)
8929	return num0 > 1 ? num0 - 1 : 1;
8930
8931      nonzero = nonzero_bits (XEXP (x, 0), mode);
8932      if (nonzero == 1)
8933	return bitwidth;
8934
8935      if (num0 > 1
8936	  && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
8937	num0--;
8938
8939      return num0;
8940
8941    case IOR:   case AND:   case XOR:
8942    case SMIN:  case SMAX:  case UMIN:  case UMAX:
8943      /* Logical operations will preserve the number of sign-bit copies.
8944	 MIN and MAX operations always return one of the operands.  */
8945      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8946      num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8947      return MIN (num0, num1);
8948
8949    case PLUS:  case MINUS:
8950      /* For addition and subtraction, we can have a 1-bit carry.  However,
8951	 if we are subtracting 1 from a positive number, there will not
8952	 be such a carry.  Furthermore, if the positive number is known to
8953	 be 0 or 1, we know the result is either -1 or 0.  */
8954
8955      if (code == PLUS && XEXP (x, 1) == constm1_rtx
8956	  && bitwidth <= HOST_BITS_PER_WIDE_INT)
8957	{
8958	  nonzero = nonzero_bits (XEXP (x, 0), mode);
8959	  if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8960	    return (nonzero == 1 || nonzero == 0 ? bitwidth
8961		    : bitwidth - floor_log2 (nonzero) - 1);
8962	}
8963
8964      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8965      num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8966      result = MAX (1, MIN (num0, num1) - 1);
8967
8968#ifdef POINTERS_EXTEND_UNSIGNED
8969      /* If pointers extend signed and this is an addition or subtraction
8970	 to a pointer in Pmode, all the bits above ptr_mode are known to be
8971	 sign bit copies.  */
8972      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8973	  && (code == PLUS || code == MINUS)
8974	  && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8975	result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
8976			     - GET_MODE_BITSIZE (ptr_mode) + 1),
8977		      result);
8978#endif
8979      return result;
8980
8981    case MULT:
8982      /* The number of bits of the product is the sum of the number of
8983	 bits of both terms.  However, unless one of the terms if known
8984	 to be positive, we must allow for an additional bit since negating
8985	 a negative number can remove one sign bit copy.  */
8986
8987      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
8988      num1 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
8989
8990      result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8991      if (result > 0
8992	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8993	      || (((nonzero_bits (XEXP (x, 0), mode)
8994		    & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8995		  && ((nonzero_bits (XEXP (x, 1), mode)
8996		       & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
8997	result--;
8998
8999      return MAX (1, result);
9000
9001    case UDIV:
9002      /* The result must be <= the first operand.  If the first operand
9003         has the high bit set, we know nothing about the number of sign
9004         bit copies.  */
9005      if (bitwidth > HOST_BITS_PER_WIDE_INT)
9006	return 1;
9007      else if ((nonzero_bits (XEXP (x, 0), mode)
9008		& ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
9009	return 1;
9010      else
9011	return num_sign_bit_copies_with_known (XEXP (x, 0), mode);
9012
9013    case UMOD:
9014      /* The result must be <= the second operand.  */
9015      return num_sign_bit_copies_with_known (XEXP (x, 1), mode);
9016
9017    case DIV:
9018      /* Similar to unsigned division, except that we have to worry about
9019	 the case where the divisor is negative, in which case we have
9020	 to add 1.  */
9021      result = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
9022      if (result > 1
9023	  && (bitwidth > HOST_BITS_PER_WIDE_INT
9024	      || (nonzero_bits (XEXP (x, 1), mode)
9025		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
9026	result--;
9027
9028      return result;
9029
9030    case MOD:
9031      result = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
9032      if (result > 1
9033	  && (bitwidth > HOST_BITS_PER_WIDE_INT
9034	      || (nonzero_bits (XEXP (x, 1), mode)
9035		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
9036	result--;
9037
9038      return result;
9039
9040    case ASHIFTRT:
9041      /* Shifts by a constant add to the number of bits equal to the
9042	 sign bit.  */
9043      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
9044      if (GET_CODE (XEXP (x, 1)) == CONST_INT
9045	  && INTVAL (XEXP (x, 1)) > 0)
9046	num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
9047
9048      return num0;
9049
9050    case ASHIFT:
9051      /* Left shifts destroy copies.  */
9052      if (GET_CODE (XEXP (x, 1)) != CONST_INT
9053	  || INTVAL (XEXP (x, 1)) < 0
9054	  || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
9055	return 1;
9056
9057      num0 = num_sign_bit_copies_with_known (XEXP (x, 0), mode);
9058      return MAX (1, num0 - INTVAL (XEXP (x, 1)));
9059
9060    case IF_THEN_ELSE:
9061      num0 = num_sign_bit_copies_with_known (XEXP (x, 1), mode);
9062      num1 = num_sign_bit_copies_with_known (XEXP (x, 2), mode);
9063      return MIN (num0, num1);
9064
9065    case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
9066    case UNEQ:  case LTGT:  case UNGE:  case UNGT:  case UNLE:  case UNLT:
9067    case GEU: case GTU: case LEU: case LTU:
9068    case UNORDERED: case ORDERED:
9069      /* If the constant is negative, take its 1's complement and remask.
9070	 Then see how many zero bits we have.  */
9071      nonzero = STORE_FLAG_VALUE;
9072      if (bitwidth <= HOST_BITS_PER_WIDE_INT
9073	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
9074	nonzero = (~nonzero) & GET_MODE_MASK (mode);
9075
9076      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
9077      break;
9078
9079    default:
9080      break;
9081    }
9082
9083  /* If we haven't been able to figure it out by one of the above rules,
9084     see if some of the high-order bits are known to be zero.  If so,
9085     count those bits and return one less than that amount.  If we can't
9086     safely compute the mask for this mode, always return BITWIDTH.  */
9087
9088  if (bitwidth > HOST_BITS_PER_WIDE_INT)
9089    return 1;
9090
9091  nonzero = nonzero_bits (x, mode);
9092  return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
9093	  ? 1 : bitwidth - floor_log2 (nonzero) - 1);
9094}
9095
9096/* Return the number of "extended" bits there are in X, when interpreted
9097   as a quantity in MODE whose signedness is indicated by UNSIGNEDP.  For
9098   unsigned quantities, this is the number of high-order zero bits.
9099   For signed quantities, this is the number of copies of the sign bit
9100   minus 1.  In both case, this function returns the number of "spare"
9101   bits.  For example, if two quantities for which this function returns
9102   at least 1 are added, the addition is known not to overflow.
9103
9104   This function will always return 0 unless called during combine, which
9105   implies that it must be called from a define_split.  */
9106
9107unsigned int
9108extended_count (x, mode, unsignedp)
9109     rtx x;
9110     enum machine_mode mode;
9111     int unsignedp;
9112{
9113  if (nonzero_sign_valid == 0)
9114    return 0;
9115
9116  return (unsignedp
9117	  ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9118	     ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1
9119			       - floor_log2 (nonzero_bits (x, mode)))
9120	     : 0)
9121	  : num_sign_bit_copies (x, mode) - 1);
9122}
9123
9124/* This function is called from `simplify_shift_const' to merge two
9125   outer operations.  Specifically, we have already found that we need
9126   to perform operation *POP0 with constant *PCONST0 at the outermost
9127   position.  We would now like to also perform OP1 with constant CONST1
9128   (with *POP0 being done last).
9129
9130   Return 1 if we can do the operation and update *POP0 and *PCONST0 with
9131   the resulting operation.  *PCOMP_P is set to 1 if we would need to
9132   complement the innermost operand, otherwise it is unchanged.
9133
9134   MODE is the mode in which the operation will be done.  No bits outside
9135   the width of this mode matter.  It is assumed that the width of this mode
9136   is smaller than or equal to HOST_BITS_PER_WIDE_INT.
9137
9138   If *POP0 or OP1 are NIL, it means no operation is required.  Only NEG, PLUS,
9139   IOR, XOR, and AND are supported.  We may set *POP0 to SET if the proper
9140   result is simply *PCONST0.
9141
9142   If the resulting operation cannot be expressed as one operation, we
9143   return 0 and do not change *POP0, *PCONST0, and *PCOMP_P.  */
9144
9145static int
9146merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
9147     enum rtx_code *pop0;
9148     HOST_WIDE_INT *pconst0;
9149     enum rtx_code op1;
9150     HOST_WIDE_INT const1;
9151     enum machine_mode mode;
9152     int *pcomp_p;
9153{
9154  enum rtx_code op0 = *pop0;
9155  HOST_WIDE_INT const0 = *pconst0;
9156
9157  const0 &= GET_MODE_MASK (mode);
9158  const1 &= GET_MODE_MASK (mode);
9159
9160  /* If OP0 is an AND, clear unimportant bits in CONST1.  */
9161  if (op0 == AND)
9162    const1 &= const0;
9163
9164  /* If OP0 or OP1 is NIL, this is easy.  Similarly if they are the same or
9165     if OP0 is SET.  */
9166
9167  if (op1 == NIL || op0 == SET)
9168    return 1;
9169
9170  else if (op0 == NIL)
9171    op0 = op1, const0 = const1;
9172
9173  else if (op0 == op1)
9174    {
9175      switch (op0)
9176	{
9177	case AND:
9178	  const0 &= const1;
9179	  break;
9180	case IOR:
9181	  const0 |= const1;
9182	  break;
9183	case XOR:
9184	  const0 ^= const1;
9185	  break;
9186	case PLUS:
9187	  const0 += const1;
9188	  break;
9189	case NEG:
9190	  op0 = NIL;
9191	  break;
9192	default:
9193	  break;
9194	}
9195    }
9196
9197  /* Otherwise, if either is a PLUS or NEG, we can't do anything.  */
9198  else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
9199    return 0;
9200
9201  /* If the two constants aren't the same, we can't do anything.  The
9202     remaining six cases can all be done.  */
9203  else if (const0 != const1)
9204    return 0;
9205
9206  else
9207    switch (op0)
9208      {
9209      case IOR:
9210	if (op1 == AND)
9211	  /* (a & b) | b == b */
9212	  op0 = SET;
9213	else /* op1 == XOR */
9214	  /* (a ^ b) | b == a | b */
9215	  {;}
9216	break;
9217
9218      case XOR:
9219	if (op1 == AND)
9220	  /* (a & b) ^ b == (~a) & b */
9221	  op0 = AND, *pcomp_p = 1;
9222	else /* op1 == IOR */
9223	  /* (a | b) ^ b == a & ~b */
9224	  op0 = AND, *pconst0 = ~const0;
9225	break;
9226
9227      case AND:
9228	if (op1 == IOR)
9229	  /* (a | b) & b == b */
9230	op0 = SET;
9231	else /* op1 == XOR */
9232	  /* (a ^ b) & b) == (~a) & b */
9233	  *pcomp_p = 1;
9234	break;
9235      default:
9236	break;
9237      }
9238
9239  /* Check for NO-OP cases.  */
9240  const0 &= GET_MODE_MASK (mode);
9241  if (const0 == 0
9242      && (op0 == IOR || op0 == XOR || op0 == PLUS))
9243    op0 = NIL;
9244  else if (const0 == 0 && op0 == AND)
9245    op0 = SET;
9246  else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
9247	   && op0 == AND)
9248    op0 = NIL;
9249
9250  /* ??? Slightly redundant with the above mask, but not entirely.
9251     Moving this above means we'd have to sign-extend the mode mask
9252     for the final test.  */
9253  const0 = trunc_int_for_mode (const0, mode);
9254
9255  *pop0 = op0;
9256  *pconst0 = const0;
9257
9258  return 1;
9259}
9260
9261/* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
9262   The result of the shift is RESULT_MODE.  X, if nonzero, is an expression
9263   that we started with.
9264
9265   The shift is normally computed in the widest mode we find in VAROP, as
9266   long as it isn't a different number of words than RESULT_MODE.  Exceptions
9267   are ASHIFTRT and ROTATE, which are always done in their original mode,  */
9268
9269static rtx
9270simplify_shift_const (x, code, result_mode, varop, orig_count)
9271     rtx x;
9272     enum rtx_code code;
9273     enum machine_mode result_mode;
9274     rtx varop;
9275     int orig_count;
9276{
9277  enum rtx_code orig_code = code;
9278  unsigned int count;
9279  int signed_count;
9280  enum machine_mode mode = result_mode;
9281  enum machine_mode shift_mode, tmode;
9282  unsigned int mode_words
9283    = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
9284  /* We form (outer_op (code varop count) (outer_const)).  */
9285  enum rtx_code outer_op = NIL;
9286  HOST_WIDE_INT outer_const = 0;
9287  rtx const_rtx;
9288  int complement_p = 0;
9289  rtx new;
9290
9291  /* Make sure and truncate the "natural" shift on the way in.  We don't
9292     want to do this inside the loop as it makes it more difficult to
9293     combine shifts.  */
9294#ifdef SHIFT_COUNT_TRUNCATED
9295  if (SHIFT_COUNT_TRUNCATED)
9296    orig_count &= GET_MODE_BITSIZE (mode) - 1;
9297#endif
9298
9299  /* If we were given an invalid count, don't do anything except exactly
9300     what was requested.  */
9301
9302  if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
9303    {
9304      if (x)
9305	return x;
9306
9307      return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (orig_count));
9308    }
9309
9310  count = orig_count;
9311
9312  /* Unless one of the branches of the `if' in this loop does a `continue',
9313     we will `break' the loop after the `if'.  */
9314
9315  while (count != 0)
9316    {
9317      /* If we have an operand of (clobber (const_int 0)), just return that
9318	 value.  */
9319      if (GET_CODE (varop) == CLOBBER)
9320	return varop;
9321
9322      /* If we discovered we had to complement VAROP, leave.  Making a NOT
9323	 here would cause an infinite loop.  */
9324      if (complement_p)
9325	break;
9326
9327      /* Convert ROTATERT to ROTATE.  */
9328      if (code == ROTATERT)
9329	{
9330	  unsigned int bitsize = GET_MODE_BITSIZE (result_mode);;
9331	  code = ROTATE;
9332	  if (VECTOR_MODE_P (result_mode))
9333	    count = bitsize / GET_MODE_NUNITS (result_mode) - count;
9334	  else
9335	    count = bitsize - count;
9336	}
9337
9338      /* We need to determine what mode we will do the shift in.  If the
9339	 shift is a right shift or a ROTATE, we must always do it in the mode
9340	 it was originally done in.  Otherwise, we can do it in MODE, the
9341	 widest mode encountered.  */
9342      shift_mode
9343	= (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9344	   ? result_mode : mode);
9345
9346      /* Handle cases where the count is greater than the size of the mode
9347	 minus 1.  For ASHIFT, use the size minus one as the count (this can
9348	 occur when simplifying (lshiftrt (ashiftrt ..))).  For rotates,
9349	 take the count modulo the size.  For other shifts, the result is
9350	 zero.
9351
9352	 Since these shifts are being produced by the compiler by combining
9353	 multiple operations, each of which are defined, we know what the
9354	 result is supposed to be.  */
9355
9356      if (count > (unsigned int) (GET_MODE_BITSIZE (shift_mode) - 1))
9357	{
9358	  if (code == ASHIFTRT)
9359	    count = GET_MODE_BITSIZE (shift_mode) - 1;
9360	  else if (code == ROTATE || code == ROTATERT)
9361	    count %= GET_MODE_BITSIZE (shift_mode);
9362	  else
9363	    {
9364	      /* We can't simply return zero because there may be an
9365		 outer op.  */
9366	      varop = const0_rtx;
9367	      count = 0;
9368	      break;
9369	    }
9370	}
9371
9372      /* An arithmetic right shift of a quantity known to be -1 or 0
9373	 is a no-op.  */
9374      if (code == ASHIFTRT
9375	  && (num_sign_bit_copies (varop, shift_mode)
9376	      == GET_MODE_BITSIZE (shift_mode)))
9377	{
9378	  count = 0;
9379	  break;
9380	}
9381
9382      /* If we are doing an arithmetic right shift and discarding all but
9383	 the sign bit copies, this is equivalent to doing a shift by the
9384	 bitsize minus one.  Convert it into that shift because it will often
9385	 allow other simplifications.  */
9386
9387      if (code == ASHIFTRT
9388	  && (count + num_sign_bit_copies (varop, shift_mode)
9389	      >= GET_MODE_BITSIZE (shift_mode)))
9390	count = GET_MODE_BITSIZE (shift_mode) - 1;
9391
9392      /* We simplify the tests below and elsewhere by converting
9393	 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
9394	 `make_compound_operation' will convert it to an ASHIFTRT for
9395	 those machines (such as VAX) that don't have an LSHIFTRT.  */
9396      if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9397	  && code == ASHIFTRT
9398	  && ((nonzero_bits (varop, shift_mode)
9399	       & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
9400	      == 0))
9401	code = LSHIFTRT;
9402
9403      switch (GET_CODE (varop))
9404	{
9405	case SIGN_EXTEND:
9406	case ZERO_EXTEND:
9407	case SIGN_EXTRACT:
9408	case ZERO_EXTRACT:
9409	  new = expand_compound_operation (varop);
9410	  if (new != varop)
9411	    {
9412	      varop = new;
9413	      continue;
9414	    }
9415	  break;
9416
9417	case MEM:
9418	  /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
9419	     minus the width of a smaller mode, we can do this with a
9420	     SIGN_EXTEND or ZERO_EXTEND from the narrower memory location.  */
9421	  if ((code == ASHIFTRT || code == LSHIFTRT)
9422	      && ! mode_dependent_address_p (XEXP (varop, 0))
9423	      && ! MEM_VOLATILE_P (varop)
9424	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9425					 MODE_INT, 1)) != BLKmode)
9426	    {
9427	      new = adjust_address_nv (varop, tmode,
9428				       BYTES_BIG_ENDIAN ? 0
9429				       : count / BITS_PER_UNIT);
9430
9431	      varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9432				     : ZERO_EXTEND, mode, new);
9433	      count = 0;
9434	      continue;
9435	    }
9436	  break;
9437
9438	case USE:
9439	  /* Similar to the case above, except that we can only do this if
9440	     the resulting mode is the same as that of the underlying
9441	     MEM and adjust the address depending on the *bits* endianness
9442	     because of the way that bit-field extract insns are defined.  */
9443	  if ((code == ASHIFTRT || code == LSHIFTRT)
9444	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9445					 MODE_INT, 1)) != BLKmode
9446	      && tmode == GET_MODE (XEXP (varop, 0)))
9447	    {
9448	      if (BITS_BIG_ENDIAN)
9449		new = XEXP (varop, 0);
9450	      else
9451		{
9452		  new = copy_rtx (XEXP (varop, 0));
9453		  SUBST (XEXP (new, 0),
9454			 plus_constant (XEXP (new, 0),
9455					count / BITS_PER_UNIT));
9456		}
9457
9458	      varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9459				     : ZERO_EXTEND, mode, new);
9460	      count = 0;
9461	      continue;
9462	    }
9463	  break;
9464
9465	case SUBREG:
9466	  /* If VAROP is a SUBREG, strip it as long as the inner operand has
9467	     the same number of words as what we've seen so far.  Then store
9468	     the widest mode in MODE.  */
9469	  if (subreg_lowpart_p (varop)
9470	      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9471		  > GET_MODE_SIZE (GET_MODE (varop)))
9472	      && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9473				  + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9474		 == mode_words)
9475	    {
9476	      varop = SUBREG_REG (varop);
9477	      if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9478		mode = GET_MODE (varop);
9479	      continue;
9480	    }
9481	  break;
9482
9483	case MULT:
9484	  /* Some machines use MULT instead of ASHIFT because MULT
9485	     is cheaper.  But it is still better on those machines to
9486	     merge two shifts into one.  */
9487	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9488	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9489	    {
9490	      varop
9491		= gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
9492			      GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9493	      continue;
9494	    }
9495	  break;
9496
9497	case UDIV:
9498	  /* Similar, for when divides are cheaper.  */
9499	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9500	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9501	    {
9502	      varop
9503		= gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
9504			      GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9505	      continue;
9506	    }
9507	  break;
9508
9509	case ASHIFTRT:
9510	  /* If we are extracting just the sign bit of an arithmetic
9511	     right shift, that shift is not needed.  However, the sign
9512	     bit of a wider mode may be different from what would be
9513	     interpreted as the sign bit in a narrower mode, so, if
9514	     the result is narrower, don't discard the shift.  */
9515	  if (code == LSHIFTRT
9516	      && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9517	      && (GET_MODE_BITSIZE (result_mode)
9518		  >= GET_MODE_BITSIZE (GET_MODE (varop))))
9519	    {
9520	      varop = XEXP (varop, 0);
9521	      continue;
9522	    }
9523
9524	  /* ... fall through ...  */
9525
9526	case LSHIFTRT:
9527	case ASHIFT:
9528	case ROTATE:
9529	  /* Here we have two nested shifts.  The result is usually the
9530	     AND of a new shift with a mask.  We compute the result below.  */
9531	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9532	      && INTVAL (XEXP (varop, 1)) >= 0
9533	      && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
9534	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9535	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9536	    {
9537	      enum rtx_code first_code = GET_CODE (varop);
9538	      unsigned int first_count = INTVAL (XEXP (varop, 1));
9539	      unsigned HOST_WIDE_INT mask;
9540	      rtx mask_rtx;
9541
9542	      /* We have one common special case.  We can't do any merging if
9543		 the inner code is an ASHIFTRT of a smaller mode.  However, if
9544		 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9545		 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9546		 we can convert it to
9547		 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9548		 This simplifies certain SIGN_EXTEND operations.  */
9549	      if (code == ASHIFT && first_code == ASHIFTRT
9550		  && count == (unsigned int)
9551			      (GET_MODE_BITSIZE (result_mode)
9552			       - GET_MODE_BITSIZE (GET_MODE (varop))))
9553		{
9554		  /* C3 has the low-order C1 bits zero.  */
9555
9556		  mask = (GET_MODE_MASK (mode)
9557			  & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
9558
9559		  varop = simplify_and_const_int (NULL_RTX, result_mode,
9560						  XEXP (varop, 0), mask);
9561		  varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
9562						varop, count);
9563		  count = first_count;
9564		  code = ASHIFTRT;
9565		  continue;
9566		}
9567
9568	      /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9569		 than C1 high-order bits equal to the sign bit, we can convert
9570		 this to either an ASHIFT or an ASHIFTRT depending on the
9571		 two counts.
9572
9573		 We cannot do this if VAROP's mode is not SHIFT_MODE.  */
9574
9575	      if (code == ASHIFTRT && first_code == ASHIFT
9576		  && GET_MODE (varop) == shift_mode
9577		  && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9578		      > first_count))
9579		{
9580		  varop = XEXP (varop, 0);
9581
9582		  signed_count = count - first_count;
9583		  if (signed_count < 0)
9584		    count = -signed_count, code = ASHIFT;
9585		  else
9586		    count = signed_count;
9587
9588		  continue;
9589		}
9590
9591	      /* There are some cases we can't do.  If CODE is ASHIFTRT,
9592		 we can only do this if FIRST_CODE is also ASHIFTRT.
9593
9594		 We can't do the case when CODE is ROTATE and FIRST_CODE is
9595		 ASHIFTRT.
9596
9597		 If the mode of this shift is not the mode of the outer shift,
9598		 we can't do this if either shift is a right shift or ROTATE.
9599
9600		 Finally, we can't do any of these if the mode is too wide
9601		 unless the codes are the same.
9602
9603		 Handle the case where the shift codes are the same
9604		 first.  */
9605
9606	      if (code == first_code)
9607		{
9608		  if (GET_MODE (varop) != result_mode
9609		      && (code == ASHIFTRT || code == LSHIFTRT
9610			  || code == ROTATE))
9611		    break;
9612
9613		  count += first_count;
9614		  varop = XEXP (varop, 0);
9615		  continue;
9616		}
9617
9618	      if (code == ASHIFTRT
9619		  || (code == ROTATE && first_code == ASHIFTRT)
9620		  || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
9621		  || (GET_MODE (varop) != result_mode
9622		      && (first_code == ASHIFTRT || first_code == LSHIFTRT
9623			  || first_code == ROTATE
9624			  || code == ROTATE)))
9625		break;
9626
9627	      /* To compute the mask to apply after the shift, shift the
9628		 nonzero bits of the inner shift the same way the
9629		 outer shift will.  */
9630
9631	      mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
9632
9633	      mask_rtx
9634		= simplify_binary_operation (code, result_mode, mask_rtx,
9635					     GEN_INT (count));
9636
9637	      /* Give up if we can't compute an outer operation to use.  */
9638	      if (mask_rtx == 0
9639		  || GET_CODE (mask_rtx) != CONST_INT
9640		  || ! merge_outer_ops (&outer_op, &outer_const, AND,
9641					INTVAL (mask_rtx),
9642					result_mode, &complement_p))
9643		break;
9644
9645	      /* If the shifts are in the same direction, we add the
9646		 counts.  Otherwise, we subtract them.  */
9647	      signed_count = count;
9648	      if ((code == ASHIFTRT || code == LSHIFTRT)
9649		  == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9650		signed_count += first_count;
9651	      else
9652		signed_count -= first_count;
9653
9654	      /* If COUNT is positive, the new shift is usually CODE,
9655		 except for the two exceptions below, in which case it is
9656		 FIRST_CODE.  If the count is negative, FIRST_CODE should
9657		 always be used  */
9658	      if (signed_count > 0
9659		  && ((first_code == ROTATE && code == ASHIFT)
9660		      || (first_code == ASHIFTRT && code == LSHIFTRT)))
9661		code = first_code, count = signed_count;
9662	      else if (signed_count < 0)
9663		code = first_code, count = -signed_count;
9664	      else
9665		count = signed_count;
9666
9667	      varop = XEXP (varop, 0);
9668	      continue;
9669	    }
9670
9671	  /* If we have (A << B << C) for any shift, we can convert this to
9672	     (A << C << B).  This wins if A is a constant.  Only try this if
9673	     B is not a constant.  */
9674
9675	  else if (GET_CODE (varop) == code
9676		   && GET_CODE (XEXP (varop, 1)) != CONST_INT
9677		   && 0 != (new
9678			    = simplify_binary_operation (code, mode,
9679							 XEXP (varop, 0),
9680							 GEN_INT (count))))
9681	    {
9682	      varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1));
9683	      count = 0;
9684	      continue;
9685	    }
9686	  break;
9687
9688	case NOT:
9689	  /* Make this fit the case below.  */
9690	  varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9691			       GEN_INT (GET_MODE_MASK (mode)));
9692	  continue;
9693
9694	case IOR:
9695	case AND:
9696	case XOR:
9697	  /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9698	     with C the size of VAROP - 1 and the shift is logical if
9699	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9700	     we have an (le X 0) operation.   If we have an arithmetic shift
9701	     and STORE_FLAG_VALUE is 1 or we have a logical shift with
9702	     STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
9703
9704	  if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9705	      && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9706	      && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9707	      && (code == LSHIFTRT || code == ASHIFTRT)
9708	      && count == (unsigned int)
9709			  (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9710	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9711	    {
9712	      count = 0;
9713	      varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9714				  const0_rtx);
9715
9716	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9717		varop = gen_rtx_NEG (GET_MODE (varop), varop);
9718
9719	      continue;
9720	    }
9721
9722	  /* If we have (shift (logical)), move the logical to the outside
9723	     to allow it to possibly combine with another logical and the
9724	     shift to combine with another shift.  This also canonicalizes to
9725	     what a ZERO_EXTRACT looks like.  Also, some machines have
9726	     (and (shift)) insns.  */
9727
9728	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9729	      && (new = simplify_binary_operation (code, result_mode,
9730						   XEXP (varop, 1),
9731						   GEN_INT (count))) != 0
9732	      && GET_CODE (new) == CONST_INT
9733	      && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9734				  INTVAL (new), result_mode, &complement_p))
9735	    {
9736	      varop = XEXP (varop, 0);
9737	      continue;
9738	    }
9739
9740	  /* If we can't do that, try to simplify the shift in each arm of the
9741	     logical expression, make a new logical expression, and apply
9742	     the inverse distributive law.  */
9743	  {
9744	    rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9745					    XEXP (varop, 0), count);
9746	    rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9747					    XEXP (varop, 1), count);
9748
9749	    varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
9750	    varop = apply_distributive_law (varop);
9751
9752	    count = 0;
9753	  }
9754	  break;
9755
9756	case EQ:
9757	  /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9758	     says that the sign bit can be tested, FOO has mode MODE, C is
9759	     GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9760	     that may be nonzero.  */
9761	  if (code == LSHIFTRT
9762	      && XEXP (varop, 1) == const0_rtx
9763	      && GET_MODE (XEXP (varop, 0)) == result_mode
9764	      && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9765	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9766	      && ((STORE_FLAG_VALUE
9767		   & ((HOST_WIDE_INT) 1
9768		      < (GET_MODE_BITSIZE (result_mode) - 1))))
9769	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9770	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9771				  (HOST_WIDE_INT) 1, result_mode,
9772				  &complement_p))
9773	    {
9774	      varop = XEXP (varop, 0);
9775	      count = 0;
9776	      continue;
9777	    }
9778	  break;
9779
9780	case NEG:
9781	  /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9782	     than the number of bits in the mode is equivalent to A.  */
9783	  if (code == LSHIFTRT
9784	      && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9785	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9786	    {
9787	      varop = XEXP (varop, 0);
9788	      count = 0;
9789	      continue;
9790	    }
9791
9792	  /* NEG commutes with ASHIFT since it is multiplication.  Move the
9793	     NEG outside to allow shifts to combine.  */
9794	  if (code == ASHIFT
9795	      && merge_outer_ops (&outer_op, &outer_const, NEG,
9796				  (HOST_WIDE_INT) 0, result_mode,
9797				  &complement_p))
9798	    {
9799	      varop = XEXP (varop, 0);
9800	      continue;
9801	    }
9802	  break;
9803
9804	case PLUS:
9805	  /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9806	     is one less than the number of bits in the mode is
9807	     equivalent to (xor A 1).  */
9808	  if (code == LSHIFTRT
9809	      && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9810	      && XEXP (varop, 1) == constm1_rtx
9811	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9812	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9813				  (HOST_WIDE_INT) 1, result_mode,
9814				  &complement_p))
9815	    {
9816	      count = 0;
9817	      varop = XEXP (varop, 0);
9818	      continue;
9819	    }
9820
9821	  /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9822	     that might be nonzero in BAR are those being shifted out and those
9823	     bits are known zero in FOO, we can replace the PLUS with FOO.
9824	     Similarly in the other operand order.  This code occurs when
9825	     we are computing the size of a variable-size array.  */
9826
9827	  if ((code == ASHIFTRT || code == LSHIFTRT)
9828	      && count < HOST_BITS_PER_WIDE_INT
9829	      && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9830	      && (nonzero_bits (XEXP (varop, 1), result_mode)
9831		  & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9832	    {
9833	      varop = XEXP (varop, 0);
9834	      continue;
9835	    }
9836	  else if ((code == ASHIFTRT || code == LSHIFTRT)
9837		   && count < HOST_BITS_PER_WIDE_INT
9838		   && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9839		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9840			    >> count)
9841		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9842			    & nonzero_bits (XEXP (varop, 1),
9843						 result_mode)))
9844	    {
9845	      varop = XEXP (varop, 1);
9846	      continue;
9847	    }
9848
9849	  /* (ashift (plus foo C) N) is (plus (ashift foo N) C').  */
9850	  if (code == ASHIFT
9851	      && GET_CODE (XEXP (varop, 1)) == CONST_INT
9852	      && (new = simplify_binary_operation (ASHIFT, result_mode,
9853						   XEXP (varop, 1),
9854						   GEN_INT (count))) != 0
9855	      && GET_CODE (new) == CONST_INT
9856	      && merge_outer_ops (&outer_op, &outer_const, PLUS,
9857				  INTVAL (new), result_mode, &complement_p))
9858	    {
9859	      varop = XEXP (varop, 0);
9860	      continue;
9861	    }
9862	  break;
9863
9864	case MINUS:
9865	  /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9866	     with C the size of VAROP - 1 and the shift is logical if
9867	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9868	     we have a (gt X 0) operation.  If the shift is arithmetic with
9869	     STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9870	     we have a (neg (gt X 0)) operation.  */
9871
9872	  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9873	      && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9874	      && count == (unsigned int)
9875			  (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9876	      && (code == LSHIFTRT || code == ASHIFTRT)
9877	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9878	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (varop, 0), 1))
9879		 == count
9880	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9881	    {
9882	      count = 0;
9883	      varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9884				  const0_rtx);
9885
9886	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9887		varop = gen_rtx_NEG (GET_MODE (varop), varop);
9888
9889	      continue;
9890	    }
9891	  break;
9892
9893	case TRUNCATE:
9894	  /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9895	     if the truncate does not affect the value.  */
9896	  if (code == LSHIFTRT
9897	      && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9898	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9899	      && (INTVAL (XEXP (XEXP (varop, 0), 1))
9900		  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9901		      - GET_MODE_BITSIZE (GET_MODE (varop)))))
9902	    {
9903	      rtx varop_inner = XEXP (varop, 0);
9904
9905	      varop_inner
9906		= gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9907				    XEXP (varop_inner, 0),
9908				    GEN_INT
9909				    (count + INTVAL (XEXP (varop_inner, 1))));
9910	      varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
9911	      count = 0;
9912	      continue;
9913	    }
9914	  break;
9915
9916	default:
9917	  break;
9918	}
9919
9920      break;
9921    }
9922
9923  /* We need to determine what mode to do the shift in.  If the shift is
9924     a right shift or ROTATE, we must always do it in the mode it was
9925     originally done in.  Otherwise, we can do it in MODE, the widest mode
9926     encountered.  The code we care about is that of the shift that will
9927     actually be done, not the shift that was originally requested.  */
9928  shift_mode
9929    = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9930       ? result_mode : mode);
9931
9932  /* We have now finished analyzing the shift.  The result should be
9933     a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places.  If
9934     OUTER_OP is non-NIL, it is an operation that needs to be applied
9935     to the result of the shift.  OUTER_CONST is the relevant constant,
9936     but we must turn off all bits turned off in the shift.
9937
9938     If we were passed a value for X, see if we can use any pieces of
9939     it.  If not, make new rtx.  */
9940
9941  if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
9942      && GET_CODE (XEXP (x, 1)) == CONST_INT
9943      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == count)
9944    const_rtx = XEXP (x, 1);
9945  else
9946    const_rtx = GEN_INT (count);
9947
9948  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9949      && GET_MODE (XEXP (x, 0)) == shift_mode
9950      && SUBREG_REG (XEXP (x, 0)) == varop)
9951    varop = XEXP (x, 0);
9952  else if (GET_MODE (varop) != shift_mode)
9953    varop = gen_lowpart_for_combine (shift_mode, varop);
9954
9955  /* If we can't make the SUBREG, try to return what we were given.  */
9956  if (GET_CODE (varop) == CLOBBER)
9957    return x ? x : varop;
9958
9959  new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9960  if (new != 0)
9961    x = new;
9962  else
9963    x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx);
9964
9965  /* If we have an outer operation and we just made a shift, it is
9966     possible that we could have simplified the shift were it not
9967     for the outer operation.  So try to do the simplification
9968     recursively.  */
9969
9970  if (outer_op != NIL && GET_CODE (x) == code
9971      && GET_CODE (XEXP (x, 1)) == CONST_INT)
9972    x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9973			      INTVAL (XEXP (x, 1)));
9974
9975  /* If we were doing an LSHIFTRT in a wider mode than it was originally,
9976     turn off all the bits that the shift would have turned off.  */
9977  if (orig_code == LSHIFTRT && result_mode != shift_mode)
9978    x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9979				GET_MODE_MASK (result_mode) >> orig_count);
9980
9981  /* Do the remainder of the processing in RESULT_MODE.  */
9982  x = gen_lowpart_for_combine (result_mode, x);
9983
9984  /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9985     operation.  */
9986  if (complement_p)
9987    x =simplify_gen_unary (NOT, result_mode, x, result_mode);
9988
9989  if (outer_op != NIL)
9990    {
9991      if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9992	outer_const = trunc_int_for_mode (outer_const, result_mode);
9993
9994      if (outer_op == AND)
9995	x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9996      else if (outer_op == SET)
9997	/* This means that we have determined that the result is
9998	   equivalent to a constant.  This should be rare.  */
9999	x = GEN_INT (outer_const);
10000      else if (GET_RTX_CLASS (outer_op) == '1')
10001	x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
10002      else
10003	x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
10004    }
10005
10006  return x;
10007}
10008
10009/* Like recog, but we receive the address of a pointer to a new pattern.
10010   We try to match the rtx that the pointer points to.
10011   If that fails, we may try to modify or replace the pattern,
10012   storing the replacement into the same pointer object.
10013
10014   Modifications include deletion or addition of CLOBBERs.
10015
10016   PNOTES is a pointer to a location where any REG_UNUSED notes added for
10017   the CLOBBERs are placed.
10018
10019   The value is the final insn code from the pattern ultimately matched,
10020   or -1.  */
10021
10022static int
10023recog_for_combine (pnewpat, insn, pnotes)
10024     rtx *pnewpat;
10025     rtx insn;
10026     rtx *pnotes;
10027{
10028  rtx pat = *pnewpat;
10029  int insn_code_number;
10030  int num_clobbers_to_add = 0;
10031  int i;
10032  rtx notes = 0;
10033  rtx dummy_insn;
10034
10035  /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
10036     we use to indicate that something didn't match.  If we find such a
10037     thing, force rejection.  */
10038  if (GET_CODE (pat) == PARALLEL)
10039    for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
10040      if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
10041	  && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
10042	return -1;
10043
10044  /* *pnewpat does not have to be actual PATTERN (insn), so make a dummy
10045     instruction for pattern recognition.  */
10046  dummy_insn = shallow_copy_rtx (insn);
10047  PATTERN (dummy_insn) = pat;
10048  REG_NOTES (dummy_insn) = 0;
10049
10050  insn_code_number = recog (pat, dummy_insn, &num_clobbers_to_add);
10051
10052  /* If it isn't, there is the possibility that we previously had an insn
10053     that clobbered some register as a side effect, but the combined
10054     insn doesn't need to do that.  So try once more without the clobbers
10055     unless this represents an ASM insn.  */
10056
10057  if (insn_code_number < 0 && ! check_asm_operands (pat)
10058      && GET_CODE (pat) == PARALLEL)
10059    {
10060      int pos;
10061
10062      for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
10063	if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
10064	  {
10065	    if (i != pos)
10066	      SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
10067	    pos++;
10068	  }
10069
10070      SUBST_INT (XVECLEN (pat, 0), pos);
10071
10072      if (pos == 1)
10073	pat = XVECEXP (pat, 0, 0);
10074
10075      PATTERN (dummy_insn) = pat;
10076      insn_code_number = recog (pat, dummy_insn, &num_clobbers_to_add);
10077    }
10078
10079  /* Recognize all noop sets, these will be killed by followup pass.  */
10080  if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
10081    insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
10082
10083  /* If we had any clobbers to add, make a new pattern than contains
10084     them.  Then check to make sure that all of them are dead.  */
10085  if (num_clobbers_to_add)
10086    {
10087      rtx newpat = gen_rtx_PARALLEL (VOIDmode,
10088				     rtvec_alloc (GET_CODE (pat) == PARALLEL
10089						  ? (XVECLEN (pat, 0)
10090						     + num_clobbers_to_add)
10091						  : num_clobbers_to_add + 1));
10092
10093      if (GET_CODE (pat) == PARALLEL)
10094	for (i = 0; i < XVECLEN (pat, 0); i++)
10095	  XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
10096      else
10097	XVECEXP (newpat, 0, 0) = pat;
10098
10099      add_clobbers (newpat, insn_code_number);
10100
10101      for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
10102	   i < XVECLEN (newpat, 0); i++)
10103	{
10104	  if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
10105	      && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
10106	    return -1;
10107	  notes = gen_rtx_EXPR_LIST (REG_UNUSED,
10108				     XEXP (XVECEXP (newpat, 0, i), 0), notes);
10109	}
10110      pat = newpat;
10111    }
10112
10113  *pnewpat = pat;
10114  *pnotes = notes;
10115
10116  return insn_code_number;
10117}
10118
10119/* Like gen_lowpart but for use by combine.  In combine it is not possible
10120   to create any new pseudoregs.  However, it is safe to create
10121   invalid memory addresses, because combine will try to recognize
10122   them and all they will do is make the combine attempt fail.
10123
10124   If for some reason this cannot do its job, an rtx
10125   (clobber (const_int 0)) is returned.
10126   An insn containing that will not be recognized.  */
10127
10128#undef gen_lowpart
10129
10130static rtx
10131gen_lowpart_for_combine (mode, x)
10132     enum machine_mode mode;
10133     rtx x;
10134{
10135  rtx result;
10136
10137  if (GET_MODE (x) == mode)
10138    return x;
10139
10140  /* We can only support MODE being wider than a word if X is a
10141     constant integer or has a mode the same size.  */
10142
10143  if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
10144      && ! ((GET_MODE (x) == VOIDmode
10145	     && (GET_CODE (x) == CONST_INT
10146		 || GET_CODE (x) == CONST_DOUBLE))
10147	    || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
10148    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10149
10150  /* X might be a paradoxical (subreg (mem)).  In that case, gen_lowpart
10151     won't know what to do.  So we will strip off the SUBREG here and
10152     process normally.  */
10153  if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
10154    {
10155      x = SUBREG_REG (x);
10156      if (GET_MODE (x) == mode)
10157	return x;
10158    }
10159
10160  result = gen_lowpart_common (mode, x);
10161#ifdef CANNOT_CHANGE_MODE_CLASS
10162  if (result != 0
10163      && GET_CODE (result) == SUBREG
10164      && GET_CODE (SUBREG_REG (result)) == REG
10165      && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER)
10166    bitmap_set_bit (&subregs_of_mode, REGNO (SUBREG_REG (result))
10167				      * MAX_MACHINE_MODE
10168				      + GET_MODE (result));
10169#endif
10170
10171  if (result)
10172    return result;
10173
10174  if (GET_CODE (x) == MEM)
10175    {
10176      int offset = 0;
10177
10178      /* Refuse to work on a volatile memory ref or one with a mode-dependent
10179	 address.  */
10180      if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
10181	return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10182
10183      /* If we want to refer to something bigger than the original memref,
10184	 generate a perverse subreg instead.  That will force a reload
10185	 of the original memref X.  */
10186      if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
10187	return gen_rtx_SUBREG (mode, x, 0);
10188
10189      if (WORDS_BIG_ENDIAN)
10190	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
10191		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
10192
10193      if (BYTES_BIG_ENDIAN)
10194	{
10195	  /* Adjust the address so that the address-after-the-data is
10196	     unchanged.  */
10197	  offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
10198		     - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
10199	}
10200
10201      return adjust_address_nv (x, mode, offset);
10202    }
10203
10204  /* If X is a comparison operator, rewrite it in a new mode.  This
10205     probably won't match, but may allow further simplifications.  */
10206  else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
10207    return gen_rtx_fmt_ee (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
10208
10209  /* If we couldn't simplify X any other way, just enclose it in a
10210     SUBREG.  Normally, this SUBREG won't match, but some patterns may
10211     include an explicit SUBREG or we may simplify it further in combine.  */
10212  else
10213    {
10214      int offset = 0;
10215      rtx res;
10216      enum machine_mode sub_mode = GET_MODE (x);
10217
10218      offset = subreg_lowpart_offset (mode, sub_mode);
10219      if (sub_mode == VOIDmode)
10220	{
10221	  sub_mode = int_mode_for_mode (mode);
10222	  x = gen_lowpart_common (sub_mode, x);
10223	}
10224      res = simplify_gen_subreg (mode, x, sub_mode, offset);
10225      if (res)
10226	return res;
10227      return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10228    }
10229}
10230
10231/* These routines make binary and unary operations by first seeing if they
10232   fold; if not, a new expression is allocated.  */
10233
10234static rtx
10235gen_binary (code, mode, op0, op1)
10236     enum rtx_code code;
10237     enum machine_mode mode;
10238     rtx op0, op1;
10239{
10240  rtx result;
10241  rtx tem;
10242
10243  if (GET_CODE (op0) == CLOBBER)
10244    return op0;
10245  else if (GET_CODE (op1) == CLOBBER)
10246    return op1;
10247
10248  if (GET_RTX_CLASS (code) == 'c'
10249      && swap_commutative_operands_p (op0, op1))
10250    tem = op0, op0 = op1, op1 = tem;
10251
10252  if (GET_RTX_CLASS (code) == '<')
10253    {
10254      enum machine_mode op_mode = GET_MODE (op0);
10255
10256      /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
10257	 just (REL_OP X Y).  */
10258      if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
10259	{
10260	  op1 = XEXP (op0, 1);
10261	  op0 = XEXP (op0, 0);
10262	  op_mode = GET_MODE (op0);
10263	}
10264
10265      if (op_mode == VOIDmode)
10266	op_mode = GET_MODE (op1);
10267      result = simplify_relational_operation (code, op_mode, op0, op1);
10268    }
10269  else
10270    result = simplify_binary_operation (code, mode, op0, op1);
10271
10272  if (result)
10273    return result;
10274
10275  /* Put complex operands first and constants second.  */
10276  if (GET_RTX_CLASS (code) == 'c'
10277      && swap_commutative_operands_p (op0, op1))
10278    return gen_rtx_fmt_ee (code, mode, op1, op0);
10279
10280  /* If we are turning off bits already known off in OP0, we need not do
10281     an AND.  */
10282  else if (code == AND && GET_CODE (op1) == CONST_INT
10283	   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
10284	   && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
10285    return op0;
10286
10287  return gen_rtx_fmt_ee (code, mode, op0, op1);
10288}
10289
10290/* Simplify a comparison between *POP0 and *POP1 where CODE is the
10291   comparison code that will be tested.
10292
10293   The result is a possibly different comparison code to use.  *POP0 and
10294   *POP1 may be updated.
10295
10296   It is possible that we might detect that a comparison is either always
10297   true or always false.  However, we do not perform general constant
10298   folding in combine, so this knowledge isn't useful.  Such tautologies
10299   should have been detected earlier.  Hence we ignore all such cases.  */
10300
10301static enum rtx_code
10302simplify_comparison (code, pop0, pop1)
10303     enum rtx_code code;
10304     rtx *pop0;
10305     rtx *pop1;
10306{
10307  rtx op0 = *pop0;
10308  rtx op1 = *pop1;
10309  rtx tem, tem1;
10310  int i;
10311  enum machine_mode mode, tmode;
10312
10313  /* Try a few ways of applying the same transformation to both operands.  */
10314  while (1)
10315    {
10316#ifndef WORD_REGISTER_OPERATIONS
10317      /* The test below this one won't handle SIGN_EXTENDs on these machines,
10318	 so check specially.  */
10319      if (code != GTU && code != GEU && code != LTU && code != LEU
10320	  && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
10321	  && GET_CODE (XEXP (op0, 0)) == ASHIFT
10322	  && GET_CODE (XEXP (op1, 0)) == ASHIFT
10323	  && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
10324	  && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
10325	  && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
10326	      == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
10327	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
10328	  && GET_CODE (XEXP (op1, 1)) == CONST_INT
10329	  && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10330	  && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
10331	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
10332	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
10333	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
10334	  && (INTVAL (XEXP (op0, 1))
10335	      == (GET_MODE_BITSIZE (GET_MODE (op0))
10336		  - (GET_MODE_BITSIZE
10337		     (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
10338	{
10339	  op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
10340	  op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
10341	}
10342#endif
10343
10344      /* If both operands are the same constant shift, see if we can ignore the
10345	 shift.  We can if the shift is a rotate or if the bits shifted out of
10346	 this shift are known to be zero for both inputs and if the type of
10347	 comparison is compatible with the shift.  */
10348      if (GET_CODE (op0) == GET_CODE (op1)
10349	  && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10350	  && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
10351	      || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
10352		  && (code != GT && code != LT && code != GE && code != LE))
10353	      || (GET_CODE (op0) == ASHIFTRT
10354		  && (code != GTU && code != LTU
10355		      && code != GEU && code != LEU)))
10356	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
10357	  && INTVAL (XEXP (op0, 1)) >= 0
10358	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10359	  && XEXP (op0, 1) == XEXP (op1, 1))
10360	{
10361	  enum machine_mode mode = GET_MODE (op0);
10362	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10363	  int shift_count = INTVAL (XEXP (op0, 1));
10364
10365	  if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
10366	    mask &= (mask >> shift_count) << shift_count;
10367	  else if (GET_CODE (op0) == ASHIFT)
10368	    mask = (mask & (mask << shift_count)) >> shift_count;
10369
10370	  if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
10371	      && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
10372	    op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
10373	  else
10374	    break;
10375	}
10376
10377      /* If both operands are AND's of a paradoxical SUBREG by constant, the
10378	 SUBREGs are of the same mode, and, in both cases, the AND would
10379	 be redundant if the comparison was done in the narrower mode,
10380	 do the comparison in the narrower mode (e.g., we are AND'ing with 1
10381	 and the operand's possibly nonzero bits are 0xffffff01; in that case
10382	 if we only care about QImode, we don't need the AND).  This case
10383	 occurs if the output mode of an scc insn is not SImode and
10384	 STORE_FLAG_VALUE == 1 (e.g., the 386).
10385
10386	 Similarly, check for a case where the AND's are ZERO_EXTEND
10387	 operations from some narrower mode even though a SUBREG is not
10388	 present.  */
10389
10390      else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
10391	       && GET_CODE (XEXP (op0, 1)) == CONST_INT
10392	       && GET_CODE (XEXP (op1, 1)) == CONST_INT)
10393	{
10394	  rtx inner_op0 = XEXP (op0, 0);
10395	  rtx inner_op1 = XEXP (op1, 0);
10396	  HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
10397	  HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
10398	  int changed = 0;
10399
10400	  if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
10401	      && (GET_MODE_SIZE (GET_MODE (inner_op0))
10402		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
10403	      && (GET_MODE (SUBREG_REG (inner_op0))
10404		  == GET_MODE (SUBREG_REG (inner_op1)))
10405	      && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
10406		  <= HOST_BITS_PER_WIDE_INT)
10407	      && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
10408					     GET_MODE (SUBREG_REG (inner_op0)))))
10409	      && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
10410					     GET_MODE (SUBREG_REG (inner_op1))))))
10411	    {
10412	      op0 = SUBREG_REG (inner_op0);
10413	      op1 = SUBREG_REG (inner_op1);
10414
10415	      /* The resulting comparison is always unsigned since we masked
10416		 off the original sign bit.  */
10417	      code = unsigned_condition (code);
10418
10419	      changed = 1;
10420	    }
10421
10422	  else if (c0 == c1)
10423	    for (tmode = GET_CLASS_NARROWEST_MODE
10424		 (GET_MODE_CLASS (GET_MODE (op0)));
10425		 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
10426	      if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
10427		{
10428		  op0 = gen_lowpart_for_combine (tmode, inner_op0);
10429		  op1 = gen_lowpart_for_combine (tmode, inner_op1);
10430		  code = unsigned_condition (code);
10431		  changed = 1;
10432		  break;
10433		}
10434
10435	  if (! changed)
10436	    break;
10437	}
10438
10439      /* If both operands are NOT, we can strip off the outer operation
10440	 and adjust the comparison code for swapped operands; similarly for
10441	 NEG, except that this must be an equality comparison.  */
10442      else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
10443	       || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
10444		   && (code == EQ || code == NE)))
10445	op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
10446
10447      else
10448	break;
10449    }
10450
10451  /* If the first operand is a constant, swap the operands and adjust the
10452     comparison code appropriately, but don't do this if the second operand
10453     is already a constant integer.  */
10454  if (swap_commutative_operands_p (op0, op1))
10455    {
10456      tem = op0, op0 = op1, op1 = tem;
10457      code = swap_condition (code);
10458    }
10459
10460  /* We now enter a loop during which we will try to simplify the comparison.
10461     For the most part, we only are concerned with comparisons with zero,
10462     but some things may really be comparisons with zero but not start
10463     out looking that way.  */
10464
10465  while (GET_CODE (op1) == CONST_INT)
10466    {
10467      enum machine_mode mode = GET_MODE (op0);
10468      unsigned int mode_width = GET_MODE_BITSIZE (mode);
10469      unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10470      int equality_comparison_p;
10471      int sign_bit_comparison_p;
10472      int unsigned_comparison_p;
10473      HOST_WIDE_INT const_op;
10474
10475      /* We only want to handle integral modes.  This catches VOIDmode,
10476	 CCmode, and the floating-point modes.  An exception is that we
10477	 can handle VOIDmode if OP0 is a COMPARE or a comparison
10478	 operation.  */
10479
10480      if (GET_MODE_CLASS (mode) != MODE_INT
10481	  && ! (mode == VOIDmode
10482		&& (GET_CODE (op0) == COMPARE
10483		    || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
10484	break;
10485
10486      /* Get the constant we are comparing against and turn off all bits
10487	 not on in our mode.  */
10488      const_op = INTVAL (op1);
10489      if (mode != VOIDmode)
10490	const_op = trunc_int_for_mode (const_op, mode);
10491      op1 = GEN_INT (const_op);
10492
10493      /* If we are comparing against a constant power of two and the value
10494	 being compared can only have that single bit nonzero (e.g., it was
10495	 `and'ed with that bit), we can replace this with a comparison
10496	 with zero.  */
10497      if (const_op
10498	  && (code == EQ || code == NE || code == GE || code == GEU
10499	      || code == LT || code == LTU)
10500	  && mode_width <= HOST_BITS_PER_WIDE_INT
10501	  && exact_log2 (const_op) >= 0
10502	  && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
10503	{
10504	  code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10505	  op1 = const0_rtx, const_op = 0;
10506	}
10507
10508      /* Similarly, if we are comparing a value known to be either -1 or
10509	 0 with -1, change it to the opposite comparison against zero.  */
10510
10511      if (const_op == -1
10512	  && (code == EQ || code == NE || code == GT || code == LE
10513	      || code == GEU || code == LTU)
10514	  && num_sign_bit_copies (op0, mode) == mode_width)
10515	{
10516	  code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10517	  op1 = const0_rtx, const_op = 0;
10518	}
10519
10520      /* Do some canonicalizations based on the comparison code.  We prefer
10521	 comparisons against zero and then prefer equality comparisons.
10522	 If we can reduce the size of a constant, we will do that too.  */
10523
10524      switch (code)
10525	{
10526	case LT:
10527	  /* < C is equivalent to <= (C - 1) */
10528	  if (const_op > 0)
10529	    {
10530	      const_op -= 1;
10531	      op1 = GEN_INT (const_op);
10532	      code = LE;
10533	      /* ... fall through to LE case below.  */
10534	    }
10535	  else
10536	    break;
10537
10538	case LE:
10539	  /* <= C is equivalent to < (C + 1); we do this for C < 0  */
10540	  if (const_op < 0)
10541	    {
10542	      const_op += 1;
10543	      op1 = GEN_INT (const_op);
10544	      code = LT;
10545	    }
10546
10547	  /* If we are doing a <= 0 comparison on a value known to have
10548	     a zero sign bit, we can replace this with == 0.  */
10549	  else if (const_op == 0
10550		   && mode_width <= HOST_BITS_PER_WIDE_INT
10551		   && (nonzero_bits (op0, mode)
10552		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10553	    code = EQ;
10554	  break;
10555
10556	case GE:
10557	  /* >= C is equivalent to > (C - 1).  */
10558	  if (const_op > 0)
10559	    {
10560	      const_op -= 1;
10561	      op1 = GEN_INT (const_op);
10562	      code = GT;
10563	      /* ... fall through to GT below.  */
10564	    }
10565	  else
10566	    break;
10567
10568	case GT:
10569	  /* > C is equivalent to >= (C + 1); we do this for C < 0.  */
10570	  if (const_op < 0)
10571	    {
10572	      const_op += 1;
10573	      op1 = GEN_INT (const_op);
10574	      code = GE;
10575	    }
10576
10577	  /* If we are doing a > 0 comparison on a value known to have
10578	     a zero sign bit, we can replace this with != 0.  */
10579	  else if (const_op == 0
10580		   && mode_width <= HOST_BITS_PER_WIDE_INT
10581		   && (nonzero_bits (op0, mode)
10582		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10583	    code = NE;
10584	  break;
10585
10586	case LTU:
10587	  /* < C is equivalent to <= (C - 1).  */
10588	  if (const_op > 0)
10589	    {
10590	      const_op -= 1;
10591	      op1 = GEN_INT (const_op);
10592	      code = LEU;
10593	      /* ... fall through ...  */
10594	    }
10595
10596	  /* (unsigned) < 0x80000000 is equivalent to >= 0.  */
10597	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10598		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10599	    {
10600	      const_op = 0, op1 = const0_rtx;
10601	      code = GE;
10602	      break;
10603	    }
10604	  else
10605	    break;
10606
10607	case LEU:
10608	  /* unsigned <= 0 is equivalent to == 0 */
10609	  if (const_op == 0)
10610	    code = EQ;
10611
10612	  /* (unsigned) <= 0x7fffffff is equivalent to >= 0.  */
10613	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10614		   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10615	    {
10616	      const_op = 0, op1 = const0_rtx;
10617	      code = GE;
10618	    }
10619	  break;
10620
10621	case GEU:
10622	  /* >= C is equivalent to < (C - 1).  */
10623	  if (const_op > 1)
10624	    {
10625	      const_op -= 1;
10626	      op1 = GEN_INT (const_op);
10627	      code = GTU;
10628	      /* ... fall through ...  */
10629	    }
10630
10631	  /* (unsigned) >= 0x80000000 is equivalent to < 0.  */
10632	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10633		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10634	    {
10635	      const_op = 0, op1 = const0_rtx;
10636	      code = LT;
10637	      break;
10638	    }
10639	  else
10640	    break;
10641
10642	case GTU:
10643	  /* unsigned > 0 is equivalent to != 0 */
10644	  if (const_op == 0)
10645	    code = NE;
10646
10647	  /* (unsigned) > 0x7fffffff is equivalent to < 0.  */
10648	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10649		    && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10650	    {
10651	      const_op = 0, op1 = const0_rtx;
10652	      code = LT;
10653	    }
10654	  break;
10655
10656	default:
10657	  break;
10658	}
10659
10660      /* Compute some predicates to simplify code below.  */
10661
10662      equality_comparison_p = (code == EQ || code == NE);
10663      sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10664      unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10665			       || code == GEU);
10666
10667      /* If this is a sign bit comparison and we can do arithmetic in
10668	 MODE, say that we will only be needing the sign bit of OP0.  */
10669      if (sign_bit_comparison_p
10670	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10671	op0 = force_to_mode (op0, mode,
10672			     ((HOST_WIDE_INT) 1
10673			      << (GET_MODE_BITSIZE (mode) - 1)),
10674			     NULL_RTX, 0);
10675
10676      /* Now try cases based on the opcode of OP0.  If none of the cases
10677	 does a "continue", we exit this loop immediately after the
10678	 switch.  */
10679
10680      switch (GET_CODE (op0))
10681	{
10682	case ZERO_EXTRACT:
10683	  /* If we are extracting a single bit from a variable position in
10684	     a constant that has only a single bit set and are comparing it
10685	     with zero, we can convert this into an equality comparison
10686	     between the position and the location of the single bit.  */
10687
10688	  if (GET_CODE (XEXP (op0, 0)) == CONST_INT
10689	      && XEXP (op0, 1) == const1_rtx
10690	      && equality_comparison_p && const_op == 0
10691	      && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
10692	    {
10693	      if (BITS_BIG_ENDIAN)
10694		{
10695		  enum machine_mode new_mode
10696		    = mode_for_extraction (EP_extzv, 1);
10697		  if (new_mode == MAX_MACHINE_MODE)
10698		    i = BITS_PER_WORD - 1 - i;
10699		  else
10700		    {
10701		      mode = new_mode;
10702		      i = (GET_MODE_BITSIZE (mode) - 1 - i);
10703		    }
10704		}
10705
10706	      op0 = XEXP (op0, 2);
10707	      op1 = GEN_INT (i);
10708	      const_op = i;
10709
10710	      /* Result is nonzero iff shift count is equal to I.  */
10711	      code = reverse_condition (code);
10712	      continue;
10713	    }
10714
10715	  /* ... fall through ...  */
10716
10717	case SIGN_EXTRACT:
10718	  tem = expand_compound_operation (op0);
10719	  if (tem != op0)
10720	    {
10721	      op0 = tem;
10722	      continue;
10723	    }
10724	  break;
10725
10726	case NOT:
10727	  /* If testing for equality, we can take the NOT of the constant.  */
10728	  if (equality_comparison_p
10729	      && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10730	    {
10731	      op0 = XEXP (op0, 0);
10732	      op1 = tem;
10733	      continue;
10734	    }
10735
10736	  /* If just looking at the sign bit, reverse the sense of the
10737	     comparison.  */
10738	  if (sign_bit_comparison_p)
10739	    {
10740	      op0 = XEXP (op0, 0);
10741	      code = (code == GE ? LT : GE);
10742	      continue;
10743	    }
10744	  break;
10745
10746	case NEG:
10747	  /* If testing for equality, we can take the NEG of the constant.  */
10748	  if (equality_comparison_p
10749	      && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10750	    {
10751	      op0 = XEXP (op0, 0);
10752	      op1 = tem;
10753	      continue;
10754	    }
10755
10756	  /* The remaining cases only apply to comparisons with zero.  */
10757	  if (const_op != 0)
10758	    break;
10759
10760	  /* When X is ABS or is known positive,
10761	     (neg X) is < 0 if and only if X != 0.  */
10762
10763	  if (sign_bit_comparison_p
10764	      && (GET_CODE (XEXP (op0, 0)) == ABS
10765		  || (mode_width <= HOST_BITS_PER_WIDE_INT
10766		      && (nonzero_bits (XEXP (op0, 0), mode)
10767			  & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
10768	    {
10769	      op0 = XEXP (op0, 0);
10770	      code = (code == LT ? NE : EQ);
10771	      continue;
10772	    }
10773
10774	  /* If we have NEG of something whose two high-order bits are the
10775	     same, we know that "(-a) < 0" is equivalent to "a > 0".  */
10776	  if (num_sign_bit_copies (op0, mode) >= 2)
10777	    {
10778	      op0 = XEXP (op0, 0);
10779	      code = swap_condition (code);
10780	      continue;
10781	    }
10782	  break;
10783
10784	case ROTATE:
10785	  /* If we are testing equality and our count is a constant, we
10786	     can perform the inverse operation on our RHS.  */
10787	  if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10788	      && (tem = simplify_binary_operation (ROTATERT, mode,
10789						   op1, XEXP (op0, 1))) != 0)
10790	    {
10791	      op0 = XEXP (op0, 0);
10792	      op1 = tem;
10793	      continue;
10794	    }
10795
10796	  /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10797	     a particular bit.  Convert it to an AND of a constant of that
10798	     bit.  This will be converted into a ZERO_EXTRACT.  */
10799	  if (const_op == 0 && sign_bit_comparison_p
10800	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10801	      && mode_width <= HOST_BITS_PER_WIDE_INT)
10802	    {
10803	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10804					    ((HOST_WIDE_INT) 1
10805					     << (mode_width - 1
10806						 - INTVAL (XEXP (op0, 1)))));
10807	      code = (code == LT ? NE : EQ);
10808	      continue;
10809	    }
10810
10811	  /* Fall through.  */
10812
10813	case ABS:
10814	  /* ABS is ignorable inside an equality comparison with zero.  */
10815	  if (const_op == 0 && equality_comparison_p)
10816	    {
10817	      op0 = XEXP (op0, 0);
10818	      continue;
10819	    }
10820	  break;
10821
10822	case SIGN_EXTEND:
10823	  /* Can simplify (compare (zero/sign_extend FOO) CONST)
10824	     to (compare FOO CONST) if CONST fits in FOO's mode and we
10825	     are either testing inequality or have an unsigned comparison
10826	     with ZERO_EXTEND or a signed comparison with SIGN_EXTEND.  */
10827	  if (! unsigned_comparison_p
10828	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10829		  <= HOST_BITS_PER_WIDE_INT)
10830	      && ((unsigned HOST_WIDE_INT) const_op
10831		  < (((unsigned HOST_WIDE_INT) 1
10832		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
10833	    {
10834	      op0 = XEXP (op0, 0);
10835	      continue;
10836	    }
10837	  break;
10838
10839	case SUBREG:
10840	  /* Check for the case where we are comparing A - C1 with C2,
10841	     both constants are smaller than 1/2 the maximum positive
10842	     value in MODE, and the comparison is equality or unsigned.
10843	     In that case, if A is either zero-extended to MODE or has
10844	     sufficient sign bits so that the high-order bit in MODE
10845	     is a copy of the sign in the inner mode, we can prove that it is
10846	     safe to do the operation in the wider mode.  This simplifies
10847	     many range checks.  */
10848
10849	  if (mode_width <= HOST_BITS_PER_WIDE_INT
10850	      && subreg_lowpart_p (op0)
10851	      && GET_CODE (SUBREG_REG (op0)) == PLUS
10852	      && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
10853	      && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
10854	      && (-INTVAL (XEXP (SUBREG_REG (op0), 1))
10855		  < (HOST_WIDE_INT) (GET_MODE_MASK (mode) / 2))
10856	      && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
10857	      && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
10858				      GET_MODE (SUBREG_REG (op0)))
10859			& ~GET_MODE_MASK (mode))
10860		  || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
10861					   GET_MODE (SUBREG_REG (op0)))
10862		      > (unsigned int)
10863			(GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10864			 - GET_MODE_BITSIZE (mode)))))
10865	    {
10866	      op0 = SUBREG_REG (op0);
10867	      continue;
10868	    }
10869
10870	  /* If the inner mode is narrower and we are extracting the low part,
10871	     we can treat the SUBREG as if it were a ZERO_EXTEND.  */
10872	  if (subreg_lowpart_p (op0)
10873	      && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10874	    /* Fall through */ ;
10875	  else
10876	    break;
10877
10878	  /* ... fall through ...  */
10879
10880	case ZERO_EXTEND:
10881	  if ((unsigned_comparison_p || equality_comparison_p)
10882	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10883		  <= HOST_BITS_PER_WIDE_INT)
10884	      && ((unsigned HOST_WIDE_INT) const_op
10885		  < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10886	    {
10887	      op0 = XEXP (op0, 0);
10888	      continue;
10889	    }
10890	  break;
10891
10892	case PLUS:
10893	  /* (eq (plus X A) B) -> (eq X (minus B A)).  We can only do
10894	     this for equality comparisons due to pathological cases involving
10895	     overflows.  */
10896	  if (equality_comparison_p
10897	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10898							op1, XEXP (op0, 1))))
10899	    {
10900	      op0 = XEXP (op0, 0);
10901	      op1 = tem;
10902	      continue;
10903	    }
10904
10905	  /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0.  */
10906	  if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10907	      && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10908	    {
10909	      op0 = XEXP (XEXP (op0, 0), 0);
10910	      code = (code == LT ? EQ : NE);
10911	      continue;
10912	    }
10913	  break;
10914
10915	case MINUS:
10916	  /* We used to optimize signed comparisons against zero, but that
10917	     was incorrect.  Unsigned comparisons against zero (GTU, LEU)
10918	     arrive here as equality comparisons, or (GEU, LTU) are
10919	     optimized away.  No need to special-case them.  */
10920
10921	  /* (eq (minus A B) C) -> (eq A (plus B C)) or
10922	     (eq B (minus A C)), whichever simplifies.  We can only do
10923	     this for equality comparisons due to pathological cases involving
10924	     overflows.  */
10925	  if (equality_comparison_p
10926	      && 0 != (tem = simplify_binary_operation (PLUS, mode,
10927							XEXP (op0, 1), op1)))
10928	    {
10929	      op0 = XEXP (op0, 0);
10930	      op1 = tem;
10931	      continue;
10932	    }
10933
10934	  if (equality_comparison_p
10935	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10936							XEXP (op0, 0), op1)))
10937	    {
10938	      op0 = XEXP (op0, 1);
10939	      op1 = tem;
10940	      continue;
10941	    }
10942
10943	  /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10944	     of bits in X minus 1, is one iff X > 0.  */
10945	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10946	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10947	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1))
10948		 == mode_width - 1
10949	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10950	    {
10951	      op0 = XEXP (op0, 1);
10952	      code = (code == GE ? LE : GT);
10953	      continue;
10954	    }
10955	  break;
10956
10957	case XOR:
10958	  /* (eq (xor A B) C) -> (eq A (xor B C)).  This is a simplification
10959	     if C is zero or B is a constant.  */
10960	  if (equality_comparison_p
10961	      && 0 != (tem = simplify_binary_operation (XOR, mode,
10962							XEXP (op0, 1), op1)))
10963	    {
10964	      op0 = XEXP (op0, 0);
10965	      op1 = tem;
10966	      continue;
10967	    }
10968	  break;
10969
10970	case EQ:  case NE:
10971	case UNEQ:  case LTGT:
10972	case LT:  case LTU:  case UNLT:  case LE:  case LEU:  case UNLE:
10973	case GT:  case GTU:  case UNGT:  case GE:  case GEU:  case UNGE:
10974        case UNORDERED: case ORDERED:
10975	  /* We can't do anything if OP0 is a condition code value, rather
10976	     than an actual data value.  */
10977	  if (const_op != 0
10978#ifdef HAVE_cc0
10979	      || XEXP (op0, 0) == cc0_rtx
10980#endif
10981	      || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10982	    break;
10983
10984	  /* Get the two operands being compared.  */
10985	  if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10986	    tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10987	  else
10988	    tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10989
10990	  /* Check for the cases where we simply want the result of the
10991	     earlier test or the opposite of that result.  */
10992	  if (code == NE || code == EQ
10993	      || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10994		  && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10995		  && (STORE_FLAG_VALUE
10996		      & (((HOST_WIDE_INT) 1
10997			  << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
10998		  && (code == LT || code == GE)))
10999	    {
11000	      enum rtx_code new_code;
11001	      if (code == LT || code == NE)
11002		new_code = GET_CODE (op0);
11003	      else
11004		new_code = combine_reversed_comparison_code (op0);
11005
11006	      if (new_code != UNKNOWN)
11007		{
11008		  code = new_code;
11009		  op0 = tem;
11010		  op1 = tem1;
11011		  continue;
11012		}
11013	    }
11014	  break;
11015
11016	case IOR:
11017	  /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
11018	     iff X <= 0.  */
11019	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
11020	      && XEXP (XEXP (op0, 0), 1) == constm1_rtx
11021	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
11022	    {
11023	      op0 = XEXP (op0, 1);
11024	      code = (code == GE ? GT : LE);
11025	      continue;
11026	    }
11027	  break;
11028
11029	case AND:
11030	  /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1).  This
11031	     will be converted to a ZERO_EXTRACT later.  */
11032	  if (const_op == 0 && equality_comparison_p
11033	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
11034	      && XEXP (XEXP (op0, 0), 0) == const1_rtx)
11035	    {
11036	      op0 = simplify_and_const_int
11037		(op0, mode, gen_rtx_LSHIFTRT (mode,
11038					      XEXP (op0, 1),
11039					      XEXP (XEXP (op0, 0), 1)),
11040		 (HOST_WIDE_INT) 1);
11041	      continue;
11042	    }
11043
11044	  /* If we are comparing (and (lshiftrt X C1) C2) for equality with
11045	     zero and X is a comparison and C1 and C2 describe only bits set
11046	     in STORE_FLAG_VALUE, we can compare with X.  */
11047	  if (const_op == 0 && equality_comparison_p
11048	      && mode_width <= HOST_BITS_PER_WIDE_INT
11049	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11050	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
11051	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
11052	      && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
11053	      && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
11054	    {
11055	      mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
11056		      << INTVAL (XEXP (XEXP (op0, 0), 1)));
11057	      if ((~STORE_FLAG_VALUE & mask) == 0
11058		  && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
11059		      || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
11060			  && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
11061		{
11062		  op0 = XEXP (XEXP (op0, 0), 0);
11063		  continue;
11064		}
11065	    }
11066
11067	  /* If we are doing an equality comparison of an AND of a bit equal
11068	     to the sign bit, replace this with a LT or GE comparison of
11069	     the underlying value.  */
11070	  if (equality_comparison_p
11071	      && const_op == 0
11072	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11073	      && mode_width <= HOST_BITS_PER_WIDE_INT
11074	      && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
11075		  == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
11076	    {
11077	      op0 = XEXP (op0, 0);
11078	      code = (code == EQ ? GE : LT);
11079	      continue;
11080	    }
11081
11082	  /* If this AND operation is really a ZERO_EXTEND from a narrower
11083	     mode, the constant fits within that mode, and this is either an
11084	     equality or unsigned comparison, try to do this comparison in
11085	     the narrower mode.  */
11086	  if ((equality_comparison_p || unsigned_comparison_p)
11087	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11088	      && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
11089				   & GET_MODE_MASK (mode))
11090				  + 1)) >= 0
11091	      && const_op >> i == 0
11092	      && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
11093	    {
11094	      op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
11095	      continue;
11096	    }
11097
11098	  /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
11099	     in both M1 and M2 and the SUBREG is either paradoxical or
11100	     represents the low part, permute the SUBREG and the AND and
11101	     try again.  */
11102	  if (GET_CODE (XEXP (op0, 0)) == SUBREG
11103	      /* Require an integral mode, to avoid creating something like
11104		 (AND:SF ...).  */
11105	      && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
11106	      && (0
11107#ifdef WORD_REGISTER_OPERATIONS
11108		  || ((mode_width
11109		       > (GET_MODE_BITSIZE
11110			   (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
11111		      && mode_width <= BITS_PER_WORD)
11112#endif
11113		  || ((mode_width
11114		       <= (GET_MODE_BITSIZE
11115			   (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
11116		      && subreg_lowpart_p (XEXP (op0, 0))))
11117#ifndef WORD_REGISTER_OPERATIONS
11118	      /* It is unsafe to commute the AND into the SUBREG if the SUBREG
11119		 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
11120		 As originally written the upper bits have a defined value
11121		 due to the AND operation.  However, if we commute the AND
11122		 inside the SUBREG then they no longer have defined values
11123		 and the meaning of the code has been changed.  */
11124	      && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
11125		  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
11126#endif
11127	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11128	      && mode_width <= HOST_BITS_PER_WIDE_INT
11129	      && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
11130		  <= HOST_BITS_PER_WIDE_INT)
11131	      && (INTVAL (XEXP (op0, 1)) & ~mask) == 0
11132	      && 0 == (~GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
11133		       & INTVAL (XEXP (op0, 1)))
11134	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) != mask
11135	      && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11136		  != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
11137
11138	    {
11139	      op0
11140		= gen_lowpart_for_combine
11141		  (mode,
11142		   gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
11143			       SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
11144	      continue;
11145	    }
11146
11147	  /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
11148	     (eq (and (lshiftrt X) 1) 0).  */
11149	  if (const_op == 0 && equality_comparison_p
11150	      && XEXP (op0, 1) == const1_rtx
11151	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
11152	      && GET_CODE (XEXP (XEXP (op0, 0), 0)) == NOT)
11153	    {
11154	      op0 = simplify_and_const_int
11155		(op0, mode,
11156		 gen_rtx_LSHIFTRT (mode, XEXP (XEXP (XEXP (op0, 0), 0), 0),
11157				   XEXP (XEXP (op0, 0), 1)),
11158		 (HOST_WIDE_INT) 1);
11159	      code = (code == NE ? EQ : NE);
11160	      continue;
11161	    }
11162	  break;
11163
11164	case ASHIFT:
11165	  /* If we have (compare (ashift FOO N) (const_int C)) and
11166	     the high order N bits of FOO (N+1 if an inequality comparison)
11167	     are known to be zero, we can do this by comparing FOO with C
11168	     shifted right N bits so long as the low-order N bits of C are
11169	     zero.  */
11170	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
11171	      && INTVAL (XEXP (op0, 1)) >= 0
11172	      && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
11173		  < HOST_BITS_PER_WIDE_INT)
11174	      && ((const_op
11175		   & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
11176	      && mode_width <= HOST_BITS_PER_WIDE_INT
11177	      && (nonzero_bits (XEXP (op0, 0), mode)
11178		  & ~(mask >> (INTVAL (XEXP (op0, 1))
11179			       + ! equality_comparison_p))) == 0)
11180	    {
11181	      /* We must perform a logical shift, not an arithmetic one,
11182		 as we want the top N bits of C to be zero.  */
11183	      unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
11184
11185	      temp >>= INTVAL (XEXP (op0, 1));
11186	      op1 = gen_int_mode (temp, mode);
11187	      op0 = XEXP (op0, 0);
11188	      continue;
11189	    }
11190
11191	  /* If we are doing a sign bit comparison, it means we are testing
11192	     a particular bit.  Convert it to the appropriate AND.  */
11193	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
11194	      && mode_width <= HOST_BITS_PER_WIDE_INT)
11195	    {
11196	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11197					    ((HOST_WIDE_INT) 1
11198					     << (mode_width - 1
11199						 - INTVAL (XEXP (op0, 1)))));
11200	      code = (code == LT ? NE : EQ);
11201	      continue;
11202	    }
11203
11204	  /* If this an equality comparison with zero and we are shifting
11205	     the low bit to the sign bit, we can convert this to an AND of the
11206	     low-order bit.  */
11207	  if (const_op == 0 && equality_comparison_p
11208	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11209	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11210		 == mode_width - 1)
11211	    {
11212	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11213					    (HOST_WIDE_INT) 1);
11214	      continue;
11215	    }
11216	  break;
11217
11218	case ASHIFTRT:
11219	  /* If this is an equality comparison with zero, we can do this
11220	     as a logical shift, which might be much simpler.  */
11221	  if (equality_comparison_p && const_op == 0
11222	      && GET_CODE (XEXP (op0, 1)) == CONST_INT)
11223	    {
11224	      op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
11225					  XEXP (op0, 0),
11226					  INTVAL (XEXP (op0, 1)));
11227	      continue;
11228	    }
11229
11230	  /* If OP0 is a sign extension and CODE is not an unsigned comparison,
11231	     do the comparison in a narrower mode.  */
11232	  if (! unsigned_comparison_p
11233	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11234	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
11235	      && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
11236	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11237					 MODE_INT, 1)) != BLKmode
11238	      && (((unsigned HOST_WIDE_INT) const_op
11239		   + (GET_MODE_MASK (tmode) >> 1) + 1)
11240		  <= GET_MODE_MASK (tmode)))
11241	    {
11242	      op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
11243	      continue;
11244	    }
11245
11246	  /* Likewise if OP0 is a PLUS of a sign extension with a
11247	     constant, which is usually represented with the PLUS
11248	     between the shifts.  */
11249	  if (! unsigned_comparison_p
11250	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11251	      && GET_CODE (XEXP (op0, 0)) == PLUS
11252	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
11253	      && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
11254	      && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
11255	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11256					 MODE_INT, 1)) != BLKmode
11257	      && (((unsigned HOST_WIDE_INT) const_op
11258		   + (GET_MODE_MASK (tmode) >> 1) + 1)
11259		  <= GET_MODE_MASK (tmode)))
11260	    {
11261	      rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
11262	      rtx add_const = XEXP (XEXP (op0, 0), 1);
11263	      rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const,
11264					  XEXP (op0, 1));
11265
11266	      op0 = gen_binary (PLUS, tmode,
11267				gen_lowpart_for_combine (tmode, inner),
11268				new_const);
11269	      continue;
11270	    }
11271
11272	  /* ... fall through ...  */
11273	case LSHIFTRT:
11274	  /* If we have (compare (xshiftrt FOO N) (const_int C)) and
11275	     the low order N bits of FOO are known to be zero, we can do this
11276	     by comparing FOO with C shifted left N bits so long as no
11277	     overflow occurs.  */
11278	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
11279	      && INTVAL (XEXP (op0, 1)) >= 0
11280	      && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
11281	      && mode_width <= HOST_BITS_PER_WIDE_INT
11282	      && (nonzero_bits (XEXP (op0, 0), mode)
11283		  & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
11284	      && (((unsigned HOST_WIDE_INT) const_op
11285		   + (GET_CODE (op0) != LSHIFTRT
11286		      ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
11287			 + 1)
11288		      : 0))
11289		  <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
11290	    {
11291	      /* If the shift was logical, then we must make the condition
11292		 unsigned.  */
11293	      if (GET_CODE (op0) == LSHIFTRT)
11294		code = unsigned_condition (code);
11295
11296	      const_op <<= INTVAL (XEXP (op0, 1));
11297	      op1 = GEN_INT (const_op);
11298	      op0 = XEXP (op0, 0);
11299	      continue;
11300	    }
11301
11302	  /* If we are using this shift to extract just the sign bit, we
11303	     can replace this with an LT or GE comparison.  */
11304	  if (const_op == 0
11305	      && (equality_comparison_p || sign_bit_comparison_p)
11306	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
11307	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11308		 == mode_width - 1)
11309	    {
11310	      op0 = XEXP (op0, 0);
11311	      code = (code == NE || code == GT ? LT : GE);
11312	      continue;
11313	    }
11314	  break;
11315
11316	default:
11317	  break;
11318	}
11319
11320      break;
11321    }
11322
11323  /* Now make any compound operations involved in this comparison.  Then,
11324     check for an outmost SUBREG on OP0 that is not doing anything or is
11325     paradoxical.  The latter transformation must only be performed when
11326     it is known that the "extra" bits will be the same in op0 and op1 or
11327     that they don't matter.  There are three cases to consider:
11328
11329     1. SUBREG_REG (op0) is a register.  In this case the bits are don't
11330     care bits and we can assume they have any convenient value.  So
11331     making the transformation is safe.
11332
11333     2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
11334     In this case the upper bits of op0 are undefined.  We should not make
11335     the simplification in that case as we do not know the contents of
11336     those bits.
11337
11338     3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
11339     NIL.  In that case we know those bits are zeros or ones.  We must
11340     also be sure that they are the same as the upper bits of op1.
11341
11342     We can never remove a SUBREG for a non-equality comparison because
11343     the sign bit is in a different place in the underlying object.  */
11344
11345  op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
11346  op1 = make_compound_operation (op1, SET);
11347
11348  if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
11349      /* Case 3 above, to sometimes allow (subreg (mem x)), isn't
11350	 implemented.  */
11351      && GET_CODE (SUBREG_REG (op0)) == REG
11352      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11353      && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
11354      && (code == NE || code == EQ))
11355    {
11356      if (GET_MODE_SIZE (GET_MODE (op0))
11357	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
11358	{
11359	  op0 = SUBREG_REG (op0);
11360	  op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
11361	}
11362      else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
11363		<= HOST_BITS_PER_WIDE_INT)
11364	       && (nonzero_bits (SUBREG_REG (op0),
11365				 GET_MODE (SUBREG_REG (op0)))
11366		   & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11367	{
11368	  tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)), op1);
11369
11370	  if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
11371	       & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11372	    op0 = SUBREG_REG (op0), op1 = tem;
11373	}
11374    }
11375
11376  /* We now do the opposite procedure: Some machines don't have compare
11377     insns in all modes.  If OP0's mode is an integer mode smaller than a
11378     word and we can't do a compare in that mode, see if there is a larger
11379     mode for which we can do the compare.  There are a number of cases in
11380     which we can use the wider mode.  */
11381
11382  mode = GET_MODE (op0);
11383  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
11384      && GET_MODE_SIZE (mode) < UNITS_PER_WORD
11385      && ! have_insn_for (COMPARE, mode))
11386    for (tmode = GET_MODE_WIDER_MODE (mode);
11387	 (tmode != VOIDmode
11388	  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
11389	 tmode = GET_MODE_WIDER_MODE (tmode))
11390      if (have_insn_for (COMPARE, tmode))
11391	{
11392	  int zero_extended;
11393
11394	  /* If the only nonzero bits in OP0 and OP1 are those in the
11395	     narrower mode and this is an equality or unsigned comparison,
11396	     we can use the wider mode.  Similarly for sign-extended
11397	     values, in which case it is true for all comparisons.  */
11398	  zero_extended = ((code == EQ || code == NE
11399			    || code == GEU || code == GTU
11400			    || code == LEU || code == LTU)
11401			   && (nonzero_bits (op0, tmode)
11402			       & ~GET_MODE_MASK (mode)) == 0
11403			   && ((GET_CODE (op1) == CONST_INT
11404				|| (nonzero_bits (op1, tmode)
11405				    & ~GET_MODE_MASK (mode)) == 0)));
11406
11407	  if (zero_extended
11408	      || ((num_sign_bit_copies (op0, tmode)
11409		   > (unsigned int) (GET_MODE_BITSIZE (tmode)
11410				     - GET_MODE_BITSIZE (mode)))
11411		  && (num_sign_bit_copies (op1, tmode)
11412		      > (unsigned int) (GET_MODE_BITSIZE (tmode)
11413					- GET_MODE_BITSIZE (mode)))))
11414	    {
11415	      /* If OP0 is an AND and we don't have an AND in MODE either,
11416		 make a new AND in the proper mode.  */
11417	      if (GET_CODE (op0) == AND
11418		  && !have_insn_for (AND, mode))
11419		op0 = gen_binary (AND, tmode,
11420				  gen_lowpart_for_combine (tmode,
11421							   XEXP (op0, 0)),
11422				  gen_lowpart_for_combine (tmode,
11423							   XEXP (op0, 1)));
11424
11425	      op0 = gen_lowpart_for_combine (tmode, op0);
11426	      if (zero_extended && GET_CODE (op1) == CONST_INT)
11427		op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode));
11428	      op1 = gen_lowpart_for_combine (tmode, op1);
11429	      break;
11430	    }
11431
11432	  /* If this is a test for negative, we can make an explicit
11433	     test of the sign bit.  */
11434
11435	  if (op1 == const0_rtx && (code == LT || code == GE)
11436	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11437	    {
11438	      op0 = gen_binary (AND, tmode,
11439				gen_lowpart_for_combine (tmode, op0),
11440				GEN_INT ((HOST_WIDE_INT) 1
11441					 << (GET_MODE_BITSIZE (mode) - 1)));
11442	      code = (code == LT) ? NE : EQ;
11443	      break;
11444	    }
11445	}
11446
11447#ifdef CANONICALIZE_COMPARISON
11448  /* If this machine only supports a subset of valid comparisons, see if we
11449     can convert an unsupported one into a supported one.  */
11450  CANONICALIZE_COMPARISON (code, op0, op1);
11451#endif
11452
11453  *pop0 = op0;
11454  *pop1 = op1;
11455
11456  return code;
11457}
11458
11459/* Like jump.c' reversed_comparison_code, but use combine infrastructure for
11460   searching backward.  */
11461static enum rtx_code
11462combine_reversed_comparison_code (exp)
11463     rtx exp;
11464{
11465  enum rtx_code code1 = reversed_comparison_code (exp, NULL);
11466  rtx x;
11467
11468  if (code1 != UNKNOWN
11469      || GET_MODE_CLASS (GET_MODE (XEXP (exp, 0))) != MODE_CC)
11470    return code1;
11471  /* Otherwise try and find where the condition codes were last set and
11472     use that.  */
11473  x = get_last_value (XEXP (exp, 0));
11474  if (!x || GET_CODE (x) != COMPARE)
11475    return UNKNOWN;
11476  return reversed_comparison_code_parts (GET_CODE (exp),
11477					 XEXP (x, 0), XEXP (x, 1), NULL);
11478}
11479/* Return comparison with reversed code of EXP and operands OP0 and OP1.
11480   Return NULL_RTX in case we fail to do the reversal.  */
11481static rtx
11482reversed_comparison (exp, mode, op0, op1)
11483     rtx exp, op0, op1;
11484     enum machine_mode mode;
11485{
11486  enum rtx_code reversed_code = combine_reversed_comparison_code (exp);
11487  if (reversed_code == UNKNOWN)
11488    return NULL_RTX;
11489  else
11490    return gen_binary (reversed_code, mode, op0, op1);
11491}
11492
11493/* Utility function for following routine.  Called when X is part of a value
11494   being stored into reg_last_set_value.  Sets reg_last_set_table_tick
11495   for each register mentioned.  Similar to mention_regs in cse.c  */
11496
11497static void
11498update_table_tick (x)
11499     rtx x;
11500{
11501  enum rtx_code code = GET_CODE (x);
11502  const char *fmt = GET_RTX_FORMAT (code);
11503  int i;
11504
11505  if (code == REG)
11506    {
11507      unsigned int regno = REGNO (x);
11508      unsigned int endregno
11509	= regno + (regno < FIRST_PSEUDO_REGISTER
11510		   ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11511      unsigned int r;
11512
11513      for (r = regno; r < endregno; r++)
11514	reg_last_set_table_tick[r] = label_tick;
11515
11516      return;
11517    }
11518
11519  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11520    /* Note that we can't have an "E" in values stored; see
11521       get_last_value_validate.  */
11522    if (fmt[i] == 'e')
11523      {
11524	/* Check for identical subexpressions.  If x contains
11525	   identical subexpression we only have to traverse one of
11526	   them.  */
11527	if (i == 0
11528	    && (GET_RTX_CLASS (code) == '2'
11529		|| GET_RTX_CLASS (code) == 'c'))
11530	  {
11531	    /* Note that at this point x1 has already been
11532	       processed.  */
11533	    rtx x0 = XEXP (x, 0);
11534	    rtx x1 = XEXP (x, 1);
11535
11536	    /* If x0 and x1 are identical then there is no need to
11537	       process x0.  */
11538	    if (x0 == x1)
11539	      break;
11540
11541	    /* If x0 is identical to a subexpression of x1 then while
11542	       processing x1, x0 has already been processed.  Thus we
11543	       are done with x.  */
11544	    if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
11545		 || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
11546		&& (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11547	      break;
11548
11549	    /* If x1 is identical to a subexpression of x0 then we
11550	       still have to process the rest of x0.  */
11551	    if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
11552		 || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
11553		&& (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11554	      {
11555		update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
11556		break;
11557	      }
11558	  }
11559
11560	update_table_tick (XEXP (x, i));
11561      }
11562}
11563
11564/* Record that REG is set to VALUE in insn INSN.  If VALUE is zero, we
11565   are saying that the register is clobbered and we no longer know its
11566   value.  If INSN is zero, don't update reg_last_set; this is only permitted
11567   with VALUE also zero and is used to invalidate the register.  */
11568
11569static void
11570record_value_for_reg (reg, insn, value)
11571     rtx reg;
11572     rtx insn;
11573     rtx value;
11574{
11575  unsigned int regno = REGNO (reg);
11576  unsigned int endregno
11577    = regno + (regno < FIRST_PSEUDO_REGISTER
11578	       ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
11579  unsigned int i;
11580
11581  /* If VALUE contains REG and we have a previous value for REG, substitute
11582     the previous value.  */
11583  if (value && insn && reg_overlap_mentioned_p (reg, value))
11584    {
11585      rtx tem;
11586
11587      /* Set things up so get_last_value is allowed to see anything set up to
11588	 our insn.  */
11589      subst_low_cuid = INSN_CUID (insn);
11590      tem = get_last_value (reg);
11591
11592      /* If TEM is simply a binary operation with two CLOBBERs as operands,
11593	 it isn't going to be useful and will take a lot of time to process,
11594	 so just use the CLOBBER.  */
11595
11596      if (tem)
11597	{
11598	  if ((GET_RTX_CLASS (GET_CODE (tem)) == '2'
11599	       || GET_RTX_CLASS (GET_CODE (tem)) == 'c')
11600	      && GET_CODE (XEXP (tem, 0)) == CLOBBER
11601	      && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11602	    tem = XEXP (tem, 0);
11603
11604	  value = replace_rtx (copy_rtx (value), reg, tem);
11605	}
11606    }
11607
11608  /* For each register modified, show we don't know its value, that
11609     we don't know about its bitwise content, that its value has been
11610     updated, and that we don't know the location of the death of the
11611     register.  */
11612  for (i = regno; i < endregno; i++)
11613    {
11614      if (insn)
11615	reg_last_set[i] = insn;
11616
11617      reg_last_set_value[i] = 0;
11618      reg_last_set_mode[i] = 0;
11619      reg_last_set_nonzero_bits[i] = 0;
11620      reg_last_set_sign_bit_copies[i] = 0;
11621      reg_last_death[i] = 0;
11622    }
11623
11624  /* Mark registers that are being referenced in this value.  */
11625  if (value)
11626    update_table_tick (value);
11627
11628  /* Now update the status of each register being set.
11629     If someone is using this register in this block, set this register
11630     to invalid since we will get confused between the two lives in this
11631     basic block.  This makes using this register always invalid.  In cse, we
11632     scan the table to invalidate all entries using this register, but this
11633     is too much work for us.  */
11634
11635  for (i = regno; i < endregno; i++)
11636    {
11637      reg_last_set_label[i] = label_tick;
11638      if (value && reg_last_set_table_tick[i] == label_tick)
11639	reg_last_set_invalid[i] = 1;
11640      else
11641	reg_last_set_invalid[i] = 0;
11642    }
11643
11644  /* The value being assigned might refer to X (like in "x++;").  In that
11645     case, we must replace it with (clobber (const_int 0)) to prevent
11646     infinite loops.  */
11647  if (value && ! get_last_value_validate (&value, insn,
11648					  reg_last_set_label[regno], 0))
11649    {
11650      value = copy_rtx (value);
11651      if (! get_last_value_validate (&value, insn,
11652				     reg_last_set_label[regno], 1))
11653	value = 0;
11654    }
11655
11656  /* For the main register being modified, update the value, the mode, the
11657     nonzero bits, and the number of sign bit copies.  */
11658
11659  reg_last_set_value[regno] = value;
11660
11661  if (value)
11662    {
11663      enum machine_mode mode = GET_MODE (reg);
11664      subst_low_cuid = INSN_CUID (insn);
11665      reg_last_set_mode[regno] = mode;
11666      if (GET_MODE_CLASS (mode) == MODE_INT
11667	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11668	mode = nonzero_bits_mode;
11669      reg_last_set_nonzero_bits[regno] = nonzero_bits (value, mode);
11670      reg_last_set_sign_bit_copies[regno]
11671	= num_sign_bit_copies (value, GET_MODE (reg));
11672    }
11673}
11674
11675/* Called via note_stores from record_dead_and_set_regs to handle one
11676   SET or CLOBBER in an insn.  DATA is the instruction in which the
11677   set is occurring.  */
11678
11679static void
11680record_dead_and_set_regs_1 (dest, setter, data)
11681     rtx dest, setter;
11682     void *data;
11683{
11684  rtx record_dead_insn = (rtx) data;
11685
11686  if (GET_CODE (dest) == SUBREG)
11687    dest = SUBREG_REG (dest);
11688
11689  if (GET_CODE (dest) == REG)
11690    {
11691      /* If we are setting the whole register, we know its value.  Otherwise
11692	 show that we don't know the value.  We can handle SUBREG in
11693	 some cases.  */
11694      if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11695	record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11696      else if (GET_CODE (setter) == SET
11697	       && GET_CODE (SET_DEST (setter)) == SUBREG
11698	       && SUBREG_REG (SET_DEST (setter)) == dest
11699	       && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
11700	       && subreg_lowpart_p (SET_DEST (setter)))
11701	record_value_for_reg (dest, record_dead_insn,
11702			      gen_lowpart_for_combine (GET_MODE (dest),
11703						       SET_SRC (setter)));
11704      else
11705	record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11706    }
11707  else if (GET_CODE (dest) == MEM
11708	   /* Ignore pushes, they clobber nothing.  */
11709	   && ! push_operand (dest, GET_MODE (dest)))
11710    mem_last_set = INSN_CUID (record_dead_insn);
11711}
11712
11713/* Update the records of when each REG was most recently set or killed
11714   for the things done by INSN.  This is the last thing done in processing
11715   INSN in the combiner loop.
11716
11717   We update reg_last_set, reg_last_set_value, reg_last_set_mode,
11718   reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
11719   and also the similar information mem_last_set (which insn most recently
11720   modified memory) and last_call_cuid (which insn was the most recent
11721   subroutine call).  */
11722
11723static void
11724record_dead_and_set_regs (insn)
11725     rtx insn;
11726{
11727  rtx link;
11728  unsigned int i;
11729
11730  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11731    {
11732      if (REG_NOTE_KIND (link) == REG_DEAD
11733	  && GET_CODE (XEXP (link, 0)) == REG)
11734	{
11735	  unsigned int regno = REGNO (XEXP (link, 0));
11736	  unsigned int endregno
11737	    = regno + (regno < FIRST_PSEUDO_REGISTER
11738		       ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
11739		       : 1);
11740
11741	  for (i = regno; i < endregno; i++)
11742	    reg_last_death[i] = insn;
11743	}
11744      else if (REG_NOTE_KIND (link) == REG_INC)
11745	record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11746    }
11747
11748  if (GET_CODE (insn) == CALL_INSN)
11749    {
11750      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11751	if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
11752	  {
11753	    reg_last_set_value[i] = 0;
11754	    reg_last_set_mode[i] = 0;
11755	    reg_last_set_nonzero_bits[i] = 0;
11756	    reg_last_set_sign_bit_copies[i] = 0;
11757	    reg_last_death[i] = 0;
11758	  }
11759
11760      last_call_cuid = mem_last_set = INSN_CUID (insn);
11761
11762      /* Don't bother recording what this insn does.  It might set the
11763	 return value register, but we can't combine into a call
11764	 pattern anyway, so there's no point trying (and it may cause
11765	 a crash, if e.g. we wind up asking for last_set_value of a
11766	 SUBREG of the return value register).  */
11767      return;
11768    }
11769
11770  note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11771}
11772
11773/* If a SUBREG has the promoted bit set, it is in fact a property of the
11774   register present in the SUBREG, so for each such SUBREG go back and
11775   adjust nonzero and sign bit information of the registers that are
11776   known to have some zero/sign bits set.
11777
11778   This is needed because when combine blows the SUBREGs away, the
11779   information on zero/sign bits is lost and further combines can be
11780   missed because of that.  */
11781
11782static void
11783record_promoted_value (insn, subreg)
11784     rtx insn;
11785     rtx subreg;
11786{
11787  rtx links, set;
11788  unsigned int regno = REGNO (SUBREG_REG (subreg));
11789  enum machine_mode mode = GET_MODE (subreg);
11790
11791  if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11792    return;
11793
11794  for (links = LOG_LINKS (insn); links;)
11795    {
11796      insn = XEXP (links, 0);
11797      set = single_set (insn);
11798
11799      if (! set || GET_CODE (SET_DEST (set)) != REG
11800	  || REGNO (SET_DEST (set)) != regno
11801	  || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11802	{
11803	  links = XEXP (links, 1);
11804	  continue;
11805	}
11806
11807      if (reg_last_set[regno] == insn)
11808	{
11809	  if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
11810	    reg_last_set_nonzero_bits[regno] &= GET_MODE_MASK (mode);
11811	}
11812
11813      if (GET_CODE (SET_SRC (set)) == REG)
11814	{
11815	  regno = REGNO (SET_SRC (set));
11816	  links = LOG_LINKS (insn);
11817	}
11818      else
11819	break;
11820    }
11821}
11822
11823/* Scan X for promoted SUBREGs.  For each one found,
11824   note what it implies to the registers used in it.  */
11825
11826static void
11827check_promoted_subreg (insn, x)
11828     rtx insn;
11829     rtx x;
11830{
11831  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11832      && GET_CODE (SUBREG_REG (x)) == REG)
11833    record_promoted_value (insn, x);
11834  else
11835    {
11836      const char *format = GET_RTX_FORMAT (GET_CODE (x));
11837      int i, j;
11838
11839      for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
11840	switch (format[i])
11841	  {
11842	  case 'e':
11843	    check_promoted_subreg (insn, XEXP (x, i));
11844	    break;
11845	  case 'V':
11846	  case 'E':
11847	    if (XVEC (x, i) != 0)
11848	      for (j = 0; j < XVECLEN (x, i); j++)
11849		check_promoted_subreg (insn, XVECEXP (x, i, j));
11850	    break;
11851	  }
11852    }
11853}
11854
11855/* Utility routine for the following function.  Verify that all the registers
11856   mentioned in *LOC are valid when *LOC was part of a value set when
11857   label_tick == TICK.  Return 0 if some are not.
11858
11859   If REPLACE is nonzero, replace the invalid reference with
11860   (clobber (const_int 0)) and return 1.  This replacement is useful because
11861   we often can get useful information about the form of a value (e.g., if
11862   it was produced by a shift that always produces -1 or 0) even though
11863   we don't know exactly what registers it was produced from.  */
11864
11865static int
11866get_last_value_validate (loc, insn, tick, replace)
11867     rtx *loc;
11868     rtx insn;
11869     int tick;
11870     int replace;
11871{
11872  rtx x = *loc;
11873  const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
11874  int len = GET_RTX_LENGTH (GET_CODE (x));
11875  int i;
11876
11877  if (GET_CODE (x) == REG)
11878    {
11879      unsigned int regno = REGNO (x);
11880      unsigned int endregno
11881	= regno + (regno < FIRST_PSEUDO_REGISTER
11882		   ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11883      unsigned int j;
11884
11885      for (j = regno; j < endregno; j++)
11886	if (reg_last_set_invalid[j]
11887	    /* If this is a pseudo-register that was only set once and not
11888	       live at the beginning of the function, it is always valid.  */
11889	    || (! (regno >= FIRST_PSEUDO_REGISTER
11890		   && REG_N_SETS (regno) == 1
11891		   && (! REGNO_REG_SET_P
11892		       (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno)))
11893		&& reg_last_set_label[j] > tick))
11894	  {
11895	    if (replace)
11896	      *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11897	    return replace;
11898	  }
11899
11900      return 1;
11901    }
11902  /* If this is a memory reference, make sure that there were
11903     no stores after it that might have clobbered the value.  We don't
11904     have alias info, so we assume any store invalidates it.  */
11905  else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
11906	   && INSN_CUID (insn) <= mem_last_set)
11907    {
11908      if (replace)
11909	*loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11910      return replace;
11911    }
11912
11913  for (i = 0; i < len; i++)
11914    {
11915      if (fmt[i] == 'e')
11916	{
11917	  /* Check for identical subexpressions.  If x contains
11918	     identical subexpression we only have to traverse one of
11919	     them.  */
11920	  if (i == 1
11921	      && (GET_RTX_CLASS (GET_CODE (x)) == '2'
11922		  || GET_RTX_CLASS (GET_CODE (x)) == 'c'))
11923	    {
11924	      /* Note that at this point x0 has already been checked
11925		 and found valid.  */
11926	      rtx x0 = XEXP (x, 0);
11927	      rtx x1 = XEXP (x, 1);
11928
11929	      /* If x0 and x1 are identical then x is also valid.  */
11930	      if (x0 == x1)
11931		return 1;
11932
11933	      /* If x1 is identical to a subexpression of x0 then
11934		 while checking x0, x1 has already been checked.  Thus
11935		 it is valid and so as x.  */
11936	      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
11937		   || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
11938		  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11939		return 1;
11940
11941	      /* If x0 is identical to a subexpression of x1 then x is
11942		 valid iff the rest of x1 is valid.  */
11943	      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
11944		   || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
11945		  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11946		return
11947		  get_last_value_validate (&XEXP (x1,
11948						  x0 == XEXP (x1, 0) ? 1 : 0),
11949					   insn, tick, replace);
11950	    }
11951
11952	  if (get_last_value_validate (&XEXP (x, i), insn, tick,
11953				       replace) == 0)
11954	    return 0;
11955	}
11956      /* Don't bother with these.  They shouldn't occur anyway.  */
11957      else if (fmt[i] == 'E')
11958	return 0;
11959    }
11960
11961  /* If we haven't found a reason for it to be invalid, it is valid.  */
11962  return 1;
11963}
11964
11965/* Get the last value assigned to X, if known.  Some registers
11966   in the value may be replaced with (clobber (const_int 0)) if their value
11967   is known longer known reliably.  */
11968
11969static rtx
11970get_last_value (x)
11971     rtx x;
11972{
11973  unsigned int regno;
11974  rtx value;
11975
11976  /* If this is a non-paradoxical SUBREG, get the value of its operand and
11977     then convert it to the desired mode.  If this is a paradoxical SUBREG,
11978     we cannot predict what values the "extra" bits might have.  */
11979  if (GET_CODE (x) == SUBREG
11980      && subreg_lowpart_p (x)
11981      && (GET_MODE_SIZE (GET_MODE (x))
11982	  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11983      && (value = get_last_value (SUBREG_REG (x))) != 0)
11984    return gen_lowpart_for_combine (GET_MODE (x), value);
11985
11986  if (GET_CODE (x) != REG)
11987    return 0;
11988
11989  regno = REGNO (x);
11990  value = reg_last_set_value[regno];
11991
11992  /* If we don't have a value, or if it isn't for this basic block and
11993     it's either a hard register, set more than once, or it's a live
11994     at the beginning of the function, return 0.
11995
11996     Because if it's not live at the beginning of the function then the reg
11997     is always set before being used (is never used without being set).
11998     And, if it's set only once, and it's always set before use, then all
11999     uses must have the same last value, even if it's not from this basic
12000     block.  */
12001
12002  if (value == 0
12003      || (reg_last_set_label[regno] != label_tick
12004	  && (regno < FIRST_PSEUDO_REGISTER
12005	      || REG_N_SETS (regno) != 1
12006	      || (REGNO_REG_SET_P
12007		  (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno)))))
12008    return 0;
12009
12010  /* If the value was set in a later insn than the ones we are processing,
12011     we can't use it even if the register was only set once.  */
12012  if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
12013    return 0;
12014
12015  /* If the value has all its registers valid, return it.  */
12016  if (get_last_value_validate (&value, reg_last_set[regno],
12017			       reg_last_set_label[regno], 0))
12018    return value;
12019
12020  /* Otherwise, make a copy and replace any invalid register with
12021     (clobber (const_int 0)).  If that fails for some reason, return 0.  */
12022
12023  value = copy_rtx (value);
12024  if (get_last_value_validate (&value, reg_last_set[regno],
12025			       reg_last_set_label[regno], 1))
12026    return value;
12027
12028  return 0;
12029}
12030
12031/* Return nonzero if expression X refers to a REG or to memory
12032   that is set in an instruction more recent than FROM_CUID.  */
12033
12034static int
12035use_crosses_set_p (x, from_cuid)
12036     rtx x;
12037     int from_cuid;
12038{
12039  const char *fmt;
12040  int i;
12041  enum rtx_code code = GET_CODE (x);
12042
12043  if (code == REG)
12044    {
12045      unsigned int regno = REGNO (x);
12046      unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
12047				 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
12048
12049#ifdef PUSH_ROUNDING
12050      /* Don't allow uses of the stack pointer to be moved,
12051	 because we don't know whether the move crosses a push insn.  */
12052      if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
12053	return 1;
12054#endif
12055      for (; regno < endreg; regno++)
12056	if (reg_last_set[regno]
12057	    && INSN_CUID (reg_last_set[regno]) > from_cuid)
12058	  return 1;
12059      return 0;
12060    }
12061
12062  if (code == MEM && mem_last_set > from_cuid)
12063    return 1;
12064
12065  fmt = GET_RTX_FORMAT (code);
12066
12067  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12068    {
12069      if (fmt[i] == 'E')
12070	{
12071	  int j;
12072	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12073	    if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
12074	      return 1;
12075	}
12076      else if (fmt[i] == 'e'
12077	       && use_crosses_set_p (XEXP (x, i), from_cuid))
12078	return 1;
12079    }
12080  return 0;
12081}
12082
12083/* Define three variables used for communication between the following
12084   routines.  */
12085
12086static unsigned int reg_dead_regno, reg_dead_endregno;
12087static int reg_dead_flag;
12088
12089/* Function called via note_stores from reg_dead_at_p.
12090
12091   If DEST is within [reg_dead_regno, reg_dead_endregno), set
12092   reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET.  */
12093
12094static void
12095reg_dead_at_p_1 (dest, x, data)
12096     rtx dest;
12097     rtx x;
12098     void *data ATTRIBUTE_UNUSED;
12099{
12100  unsigned int regno, endregno;
12101
12102  if (GET_CODE (dest) != REG)
12103    return;
12104
12105  regno = REGNO (dest);
12106  endregno = regno + (regno < FIRST_PSEUDO_REGISTER
12107		      ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
12108
12109  if (reg_dead_endregno > regno && reg_dead_regno < endregno)
12110    reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
12111}
12112
12113/* Return nonzero if REG is known to be dead at INSN.
12114
12115   We scan backwards from INSN.  If we hit a REG_DEAD note or a CLOBBER
12116   referencing REG, it is dead.  If we hit a SET referencing REG, it is
12117   live.  Otherwise, see if it is live or dead at the start of the basic
12118   block we are in.  Hard regs marked as being live in NEWPAT_USED_REGS
12119   must be assumed to be always live.  */
12120
12121static int
12122reg_dead_at_p (reg, insn)
12123     rtx reg;
12124     rtx insn;
12125{
12126  basic_block block;
12127  unsigned int i;
12128
12129  /* Set variables for reg_dead_at_p_1.  */
12130  reg_dead_regno = REGNO (reg);
12131  reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
12132					? HARD_REGNO_NREGS (reg_dead_regno,
12133							    GET_MODE (reg))
12134					: 1);
12135
12136  reg_dead_flag = 0;
12137
12138  /* Check that reg isn't mentioned in NEWPAT_USED_REGS.  */
12139  if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
12140    {
12141      for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12142	if (TEST_HARD_REG_BIT (newpat_used_regs, i))
12143	  return 0;
12144    }
12145
12146  /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
12147     beginning of function.  */
12148  for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
12149       insn = prev_nonnote_insn (insn))
12150    {
12151      note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
12152      if (reg_dead_flag)
12153	return reg_dead_flag == 1 ? 1 : 0;
12154
12155      if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
12156	return 1;
12157    }
12158
12159  /* Get the basic block that we were in.  */
12160  if (insn == 0)
12161    block = ENTRY_BLOCK_PTR->next_bb;
12162  else
12163    {
12164      FOR_EACH_BB (block)
12165	if (insn == block->head)
12166	  break;
12167
12168      if (block == EXIT_BLOCK_PTR)
12169	return 0;
12170    }
12171
12172  for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12173    if (REGNO_REG_SET_P (block->global_live_at_start, i))
12174      return 0;
12175
12176  return 1;
12177}
12178
12179/* Note hard registers in X that are used.  This code is similar to
12180   that in flow.c, but much simpler since we don't care about pseudos.  */
12181
12182static void
12183mark_used_regs_combine (x)
12184     rtx x;
12185{
12186  RTX_CODE code = GET_CODE (x);
12187  unsigned int regno;
12188  int i;
12189
12190  switch (code)
12191    {
12192    case LABEL_REF:
12193    case SYMBOL_REF:
12194    case CONST_INT:
12195    case CONST:
12196    case CONST_DOUBLE:
12197    case CONST_VECTOR:
12198    case PC:
12199    case ADDR_VEC:
12200    case ADDR_DIFF_VEC:
12201    case ASM_INPUT:
12202#ifdef HAVE_cc0
12203    /* CC0 must die in the insn after it is set, so we don't need to take
12204       special note of it here.  */
12205    case CC0:
12206#endif
12207      return;
12208
12209    case CLOBBER:
12210      /* If we are clobbering a MEM, mark any hard registers inside the
12211	 address as used.  */
12212      if (GET_CODE (XEXP (x, 0)) == MEM)
12213	mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
12214      return;
12215
12216    case REG:
12217      regno = REGNO (x);
12218      /* A hard reg in a wide mode may really be multiple registers.
12219	 If so, mark all of them just like the first.  */
12220      if (regno < FIRST_PSEUDO_REGISTER)
12221	{
12222	  unsigned int endregno, r;
12223
12224	  /* None of this applies to the stack, frame or arg pointers.  */
12225	  if (regno == STACK_POINTER_REGNUM
12226#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
12227	      || regno == HARD_FRAME_POINTER_REGNUM
12228#endif
12229#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
12230	      || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
12231#endif
12232	      || regno == FRAME_POINTER_REGNUM)
12233	    return;
12234
12235	  endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12236	  for (r = regno; r < endregno; r++)
12237	    SET_HARD_REG_BIT (newpat_used_regs, r);
12238	}
12239      return;
12240
12241    case SET:
12242      {
12243	/* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
12244	   the address.  */
12245	rtx testreg = SET_DEST (x);
12246
12247	while (GET_CODE (testreg) == SUBREG
12248	       || GET_CODE (testreg) == ZERO_EXTRACT
12249	       || GET_CODE (testreg) == SIGN_EXTRACT
12250	       || GET_CODE (testreg) == STRICT_LOW_PART)
12251	  testreg = XEXP (testreg, 0);
12252
12253	if (GET_CODE (testreg) == MEM)
12254	  mark_used_regs_combine (XEXP (testreg, 0));
12255
12256	mark_used_regs_combine (SET_SRC (x));
12257      }
12258      return;
12259
12260    default:
12261      break;
12262    }
12263
12264  /* Recursively scan the operands of this expression.  */
12265
12266  {
12267    const char *fmt = GET_RTX_FORMAT (code);
12268
12269    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12270      {
12271	if (fmt[i] == 'e')
12272	  mark_used_regs_combine (XEXP (x, i));
12273	else if (fmt[i] == 'E')
12274	  {
12275	    int j;
12276
12277	    for (j = 0; j < XVECLEN (x, i); j++)
12278	      mark_used_regs_combine (XVECEXP (x, i, j));
12279	  }
12280      }
12281  }
12282}
12283
12284/* Remove register number REGNO from the dead registers list of INSN.
12285
12286   Return the note used to record the death, if there was one.  */
12287
12288rtx
12289remove_death (regno, insn)
12290     unsigned int regno;
12291     rtx insn;
12292{
12293  rtx note = find_regno_note (insn, REG_DEAD, regno);
12294
12295  if (note)
12296    {
12297      REG_N_DEATHS (regno)--;
12298      remove_note (insn, note);
12299    }
12300
12301  return note;
12302}
12303
12304/* For each register (hardware or pseudo) used within expression X, if its
12305   death is in an instruction with cuid between FROM_CUID (inclusive) and
12306   TO_INSN (exclusive), put a REG_DEAD note for that register in the
12307   list headed by PNOTES.
12308
12309   That said, don't move registers killed by maybe_kill_insn.
12310
12311   This is done when X is being merged by combination into TO_INSN.  These
12312   notes will then be distributed as needed.  */
12313
12314static void
12315move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
12316     rtx x;
12317     rtx maybe_kill_insn;
12318     int from_cuid;
12319     rtx to_insn;
12320     rtx *pnotes;
12321{
12322  const char *fmt;
12323  int len, i;
12324  enum rtx_code code = GET_CODE (x);
12325
12326  if (code == REG)
12327    {
12328      unsigned int regno = REGNO (x);
12329      rtx where_dead = reg_last_death[regno];
12330      rtx before_dead, after_dead;
12331
12332      /* Don't move the register if it gets killed in between from and to.  */
12333      if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
12334	  && ! reg_referenced_p (x, maybe_kill_insn))
12335	return;
12336
12337      /* WHERE_DEAD could be a USE insn made by combine, so first we
12338	 make sure that we have insns with valid INSN_CUID values.  */
12339      before_dead = where_dead;
12340      while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
12341	before_dead = PREV_INSN (before_dead);
12342
12343      after_dead = where_dead;
12344      while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
12345	after_dead = NEXT_INSN (after_dead);
12346
12347      if (before_dead && after_dead
12348	  && INSN_CUID (before_dead) >= from_cuid
12349	  && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
12350	      || (where_dead != after_dead
12351		  && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
12352	{
12353	  rtx note = remove_death (regno, where_dead);
12354
12355	  /* It is possible for the call above to return 0.  This can occur
12356	     when reg_last_death points to I2 or I1 that we combined with.
12357	     In that case make a new note.
12358
12359	     We must also check for the case where X is a hard register
12360	     and NOTE is a death note for a range of hard registers
12361	     including X.  In that case, we must put REG_DEAD notes for
12362	     the remaining registers in place of NOTE.  */
12363
12364	  if (note != 0 && regno < FIRST_PSEUDO_REGISTER
12365	      && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12366		  > GET_MODE_SIZE (GET_MODE (x))))
12367	    {
12368	      unsigned int deadregno = REGNO (XEXP (note, 0));
12369	      unsigned int deadend
12370		= (deadregno + HARD_REGNO_NREGS (deadregno,
12371						 GET_MODE (XEXP (note, 0))));
12372	      unsigned int ourend
12373		= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12374	      unsigned int i;
12375
12376	      for (i = deadregno; i < deadend; i++)
12377		if (i < regno || i >= ourend)
12378		  REG_NOTES (where_dead)
12379		    = gen_rtx_EXPR_LIST (REG_DEAD,
12380					 regno_reg_rtx[i],
12381					 REG_NOTES (where_dead));
12382	    }
12383
12384	  /* If we didn't find any note, or if we found a REG_DEAD note that
12385	     covers only part of the given reg, and we have a multi-reg hard
12386	     register, then to be safe we must check for REG_DEAD notes
12387	     for each register other than the first.  They could have
12388	     their own REG_DEAD notes lying around.  */
12389	  else if ((note == 0
12390		    || (note != 0
12391			&& (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12392			    < GET_MODE_SIZE (GET_MODE (x)))))
12393		   && regno < FIRST_PSEUDO_REGISTER
12394		   && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
12395	    {
12396	      unsigned int ourend
12397		= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12398	      unsigned int i, offset;
12399	      rtx oldnotes = 0;
12400
12401	      if (note)
12402		offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
12403	      else
12404		offset = 1;
12405
12406	      for (i = regno + offset; i < ourend; i++)
12407		move_deaths (regno_reg_rtx[i],
12408			     maybe_kill_insn, from_cuid, to_insn, &oldnotes);
12409	    }
12410
12411	  if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
12412	    {
12413	      XEXP (note, 1) = *pnotes;
12414	      *pnotes = note;
12415	    }
12416	  else
12417	    *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
12418
12419	  REG_N_DEATHS (regno)++;
12420	}
12421
12422      return;
12423    }
12424
12425  else if (GET_CODE (x) == SET)
12426    {
12427      rtx dest = SET_DEST (x);
12428
12429      move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
12430
12431      /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
12432	 that accesses one word of a multi-word item, some
12433	 piece of everything register in the expression is used by
12434	 this insn, so remove any old death.  */
12435      /* ??? So why do we test for equality of the sizes?  */
12436
12437      if (GET_CODE (dest) == ZERO_EXTRACT
12438	  || GET_CODE (dest) == STRICT_LOW_PART
12439	  || (GET_CODE (dest) == SUBREG
12440	      && (((GET_MODE_SIZE (GET_MODE (dest))
12441		    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
12442		  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
12443		       + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
12444	{
12445	  move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
12446	  return;
12447	}
12448
12449      /* If this is some other SUBREG, we know it replaces the entire
12450	 value, so use that as the destination.  */
12451      if (GET_CODE (dest) == SUBREG)
12452	dest = SUBREG_REG (dest);
12453
12454      /* If this is a MEM, adjust deaths of anything used in the address.
12455	 For a REG (the only other possibility), the entire value is
12456	 being replaced so the old value is not used in this insn.  */
12457
12458      if (GET_CODE (dest) == MEM)
12459	move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
12460		     to_insn, pnotes);
12461      return;
12462    }
12463
12464  else if (GET_CODE (x) == CLOBBER)
12465    return;
12466
12467  len = GET_RTX_LENGTH (code);
12468  fmt = GET_RTX_FORMAT (code);
12469
12470  for (i = 0; i < len; i++)
12471    {
12472      if (fmt[i] == 'E')
12473	{
12474	  int j;
12475	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12476	    move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
12477			 to_insn, pnotes);
12478	}
12479      else if (fmt[i] == 'e')
12480	move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
12481    }
12482}
12483
12484/* Return 1 if X is the target of a bit-field assignment in BODY, the
12485   pattern of an insn.  X must be a REG.  */
12486
12487static int
12488reg_bitfield_target_p (x, body)
12489     rtx x;
12490     rtx body;
12491{
12492  int i;
12493
12494  if (GET_CODE (body) == SET)
12495    {
12496      rtx dest = SET_DEST (body);
12497      rtx target;
12498      unsigned int regno, tregno, endregno, endtregno;
12499
12500      if (GET_CODE (dest) == ZERO_EXTRACT)
12501	target = XEXP (dest, 0);
12502      else if (GET_CODE (dest) == STRICT_LOW_PART)
12503	target = SUBREG_REG (XEXP (dest, 0));
12504      else
12505	return 0;
12506
12507      if (GET_CODE (target) == SUBREG)
12508	target = SUBREG_REG (target);
12509
12510      if (GET_CODE (target) != REG)
12511	return 0;
12512
12513      tregno = REGNO (target), regno = REGNO (x);
12514      if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
12515	return target == x;
12516
12517      endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
12518      endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12519
12520      return endregno > tregno && regno < endtregno;
12521    }
12522
12523  else if (GET_CODE (body) == PARALLEL)
12524    for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
12525      if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
12526	return 1;
12527
12528  return 0;
12529}
12530
12531/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
12532   as appropriate.  I3 and I2 are the insns resulting from the combination
12533   insns including FROM (I2 may be zero).
12534
12535   ELIM_I2 and ELIM_I1 are either zero or registers that we know will
12536   not need REG_DEAD notes because they are being substituted for.  This
12537   saves searching in the most common cases.
12538
12539   Each note in the list is either ignored or placed on some insns, depending
12540   on the type of note.  */
12541
12542static void
12543distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
12544     rtx notes;
12545     rtx from_insn;
12546     rtx i3, i2;
12547     rtx elim_i2, elim_i1;
12548{
12549  rtx note, next_note;
12550  rtx tem;
12551
12552  for (note = notes; note; note = next_note)
12553    {
12554      rtx place = 0, place2 = 0;
12555
12556      /* If this NOTE references a pseudo register, ensure it references
12557	 the latest copy of that register.  */
12558      if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
12559	  && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
12560	XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
12561
12562      next_note = XEXP (note, 1);
12563      switch (REG_NOTE_KIND (note))
12564	{
12565	case REG_BR_PROB:
12566	case REG_BR_PRED:
12567	  /* Doesn't matter much where we put this, as long as it's somewhere.
12568	     It is preferable to keep these notes on branches, which is most
12569	     likely to be i3.  */
12570	  place = i3;
12571	  break;
12572
12573	case REG_VTABLE_REF:
12574	  /* ??? Should remain with *a particular* memory load.  Given the
12575	     nature of vtable data, the last insn seems relatively safe.  */
12576	  place = i3;
12577	  break;
12578
12579	case REG_NON_LOCAL_GOTO:
12580	  if (GET_CODE (i3) == JUMP_INSN)
12581	    place = i3;
12582	  else if (i2 && GET_CODE (i2) == JUMP_INSN)
12583	    place = i2;
12584	  else
12585	    abort ();
12586	  break;
12587
12588	case REG_EH_REGION:
12589	  /* These notes must remain with the call or trapping instruction.  */
12590	  if (GET_CODE (i3) == CALL_INSN)
12591	    place = i3;
12592	  else if (i2 && GET_CODE (i2) == CALL_INSN)
12593	    place = i2;
12594	  else if (flag_non_call_exceptions)
12595	    {
12596	      if (may_trap_p (i3))
12597		place = i3;
12598	      else if (i2 && may_trap_p (i2))
12599		place = i2;
12600	      /* ??? Otherwise assume we've combined things such that we
12601		 can now prove that the instructions can't trap.  Drop the
12602		 note in this case.  */
12603	    }
12604	  else
12605	    abort ();
12606	  break;
12607
12608	case REG_ALWAYS_RETURN:
12609	case REG_NORETURN:
12610	case REG_SETJMP:
12611	  /* These notes must remain with the call.  It should not be
12612	     possible for both I2 and I3 to be a call.  */
12613	  if (GET_CODE (i3) == CALL_INSN)
12614	    place = i3;
12615	  else if (i2 && GET_CODE (i2) == CALL_INSN)
12616	    place = i2;
12617	  else
12618	    abort ();
12619	  break;
12620
12621	case REG_UNUSED:
12622	  /* Any clobbers for i3 may still exist, and so we must process
12623	     REG_UNUSED notes from that insn.
12624
12625	     Any clobbers from i2 or i1 can only exist if they were added by
12626	     recog_for_combine.  In that case, recog_for_combine created the
12627	     necessary REG_UNUSED notes.  Trying to keep any original
12628	     REG_UNUSED notes from these insns can cause incorrect output
12629	     if it is for the same register as the original i3 dest.
12630	     In that case, we will notice that the register is set in i3,
12631	     and then add a REG_UNUSED note for the destination of i3, which
12632	     is wrong.  However, it is possible to have REG_UNUSED notes from
12633	     i2 or i1 for register which were both used and clobbered, so
12634	     we keep notes from i2 or i1 if they will turn into REG_DEAD
12635	     notes.  */
12636
12637	  /* If this register is set or clobbered in I3, put the note there
12638	     unless there is one already.  */
12639	  if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
12640	    {
12641	      if (from_insn != i3)
12642		break;
12643
12644	      if (! (GET_CODE (XEXP (note, 0)) == REG
12645		     ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12646		     : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12647		place = i3;
12648	    }
12649	  /* Otherwise, if this register is used by I3, then this register
12650	     now dies here, so we must put a REG_DEAD note here unless there
12651	     is one already.  */
12652	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12653		   && ! (GET_CODE (XEXP (note, 0)) == REG
12654			 ? find_regno_note (i3, REG_DEAD,
12655					    REGNO (XEXP (note, 0)))
12656			 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12657	    {
12658	      PUT_REG_NOTE_KIND (note, REG_DEAD);
12659	      place = i3;
12660	    }
12661	  break;
12662
12663	case REG_EQUAL:
12664	case REG_EQUIV:
12665	case REG_NOALIAS:
12666	  /* These notes say something about results of an insn.  We can
12667	     only support them if they used to be on I3 in which case they
12668	     remain on I3.  Otherwise they are ignored.
12669
12670	     If the note refers to an expression that is not a constant, we
12671	     must also ignore the note since we cannot tell whether the
12672	     equivalence is still true.  It might be possible to do
12673	     slightly better than this (we only have a problem if I2DEST
12674	     or I1DEST is present in the expression), but it doesn't
12675	     seem worth the trouble.  */
12676
12677	  if (from_insn == i3
12678	      && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12679	    place = i3;
12680	  break;
12681
12682	case REG_INC:
12683	case REG_NO_CONFLICT:
12684	  /* These notes say something about how a register is used.  They must
12685	     be present on any use of the register in I2 or I3.  */
12686	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12687	    place = i3;
12688
12689	  if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12690	    {
12691	      if (place)
12692		place2 = i2;
12693	      else
12694		place = i2;
12695	    }
12696	  break;
12697
12698	case REG_LABEL:
12699	  /* This can show up in several ways -- either directly in the
12700	     pattern, or hidden off in the constant pool with (or without?)
12701	     a REG_EQUAL note.  */
12702	  /* ??? Ignore the without-reg_equal-note problem for now.  */
12703	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12704	      || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12705		  && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12706		  && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12707	    place = i3;
12708
12709	  if (i2
12710	      && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12711		  || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12712		      && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12713		      && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12714	    {
12715	      if (place)
12716		place2 = i2;
12717	      else
12718		place = i2;
12719	    }
12720
12721	  /* Don't attach REG_LABEL note to a JUMP_INSN which has
12722	     JUMP_LABEL already.  Instead, decrement LABEL_NUSES.  */
12723	  if (place && GET_CODE (place) == JUMP_INSN && JUMP_LABEL (place))
12724	    {
12725	      if (JUMP_LABEL (place) != XEXP (note, 0))
12726		abort ();
12727	      if (GET_CODE (JUMP_LABEL (place)) == CODE_LABEL)
12728		LABEL_NUSES (JUMP_LABEL (place))--;
12729	      place = 0;
12730	    }
12731	  if (place2 && GET_CODE (place2) == JUMP_INSN && JUMP_LABEL (place2))
12732	    {
12733	      if (JUMP_LABEL (place2) != XEXP (note, 0))
12734		abort ();
12735	      if (GET_CODE (JUMP_LABEL (place2)) == CODE_LABEL)
12736		LABEL_NUSES (JUMP_LABEL (place2))--;
12737	      place2 = 0;
12738	    }
12739	  break;
12740
12741	case REG_NONNEG:
12742	case REG_WAS_0:
12743	  /* These notes say something about the value of a register prior
12744	     to the execution of an insn.  It is too much trouble to see
12745	     if the note is still correct in all situations.  It is better
12746	     to simply delete it.  */
12747	  break;
12748
12749	case REG_RETVAL:
12750	  /* If the insn previously containing this note still exists,
12751	     put it back where it was.  Otherwise move it to the previous
12752	     insn.  Adjust the corresponding REG_LIBCALL note.  */
12753	  if (GET_CODE (from_insn) != NOTE)
12754	    place = from_insn;
12755	  else
12756	    {
12757	      tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
12758	      place = prev_real_insn (from_insn);
12759	      if (tem && place)
12760		XEXP (tem, 0) = place;
12761	      /* If we're deleting the last remaining instruction of a
12762		 libcall sequence, don't add the notes.  */
12763	      else if (XEXP (note, 0) == from_insn)
12764		tem = place = 0;
12765	    }
12766	  break;
12767
12768	case REG_LIBCALL:
12769	  /* This is handled similarly to REG_RETVAL.  */
12770	  if (GET_CODE (from_insn) != NOTE)
12771	    place = from_insn;
12772	  else
12773	    {
12774	      tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
12775	      place = next_real_insn (from_insn);
12776	      if (tem && place)
12777		XEXP (tem, 0) = place;
12778	      /* If we're deleting the last remaining instruction of a
12779		 libcall sequence, don't add the notes.  */
12780	      else if (XEXP (note, 0) == from_insn)
12781		tem = place = 0;
12782	    }
12783	  break;
12784
12785	case REG_DEAD:
12786	  /* If the register is used as an input in I3, it dies there.
12787	     Similarly for I2, if it is nonzero and adjacent to I3.
12788
12789	     If the register is not used as an input in either I3 or I2
12790	     and it is not one of the registers we were supposed to eliminate,
12791	     there are two possibilities.  We might have a non-adjacent I2
12792	     or we might have somehow eliminated an additional register
12793	     from a computation.  For example, we might have had A & B where
12794	     we discover that B will always be zero.  In this case we will
12795	     eliminate the reference to A.
12796
12797	     In both cases, we must search to see if we can find a previous
12798	     use of A and put the death note there.  */
12799
12800	  if (from_insn
12801	      && GET_CODE (from_insn) == CALL_INSN
12802	      && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12803	    place = from_insn;
12804	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12805	    place = i3;
12806	  else if (i2 != 0 && next_nonnote_insn (i2) == i3
12807		   && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12808	    place = i2;
12809
12810	  if (rtx_equal_p (XEXP (note, 0), elim_i2)
12811	      || rtx_equal_p (XEXP (note, 0), elim_i1))
12812	    break;
12813
12814	  if (place == 0)
12815	    {
12816	      basic_block bb = this_basic_block;
12817
12818	      for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
12819		{
12820		  if (! INSN_P (tem))
12821		    {
12822		      if (tem == bb->head)
12823			break;
12824		      continue;
12825		    }
12826
12827		  /* If the register is being set at TEM, see if that is all
12828		     TEM is doing.  If so, delete TEM.  Otherwise, make this
12829		     into a REG_UNUSED note instead.  */
12830		  if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
12831		    {
12832		      rtx set = single_set (tem);
12833		      rtx inner_dest = 0;
12834#ifdef HAVE_cc0
12835		      rtx cc0_setter = NULL_RTX;
12836#endif
12837
12838		      if (set != 0)
12839			for (inner_dest = SET_DEST (set);
12840			     (GET_CODE (inner_dest) == STRICT_LOW_PART
12841			      || GET_CODE (inner_dest) == SUBREG
12842			      || GET_CODE (inner_dest) == ZERO_EXTRACT);
12843			     inner_dest = XEXP (inner_dest, 0))
12844			  ;
12845
12846		      /* Verify that it was the set, and not a clobber that
12847			 modified the register.
12848
12849			 CC0 targets must be careful to maintain setter/user
12850			 pairs.  If we cannot delete the setter due to side
12851			 effects, mark the user with an UNUSED note instead
12852			 of deleting it.  */
12853
12854		      if (set != 0 && ! side_effects_p (SET_SRC (set))
12855			  && rtx_equal_p (XEXP (note, 0), inner_dest)
12856#ifdef HAVE_cc0
12857			  && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12858			      || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12859				  && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12860#endif
12861			  )
12862			{
12863			  /* Move the notes and links of TEM elsewhere.
12864			     This might delete other dead insns recursively.
12865			     First set the pattern to something that won't use
12866			     any register.  */
12867
12868			  PATTERN (tem) = pc_rtx;
12869
12870			  distribute_notes (REG_NOTES (tem), tem, tem,
12871					    NULL_RTX, NULL_RTX, NULL_RTX);
12872			  distribute_links (LOG_LINKS (tem));
12873
12874			  PUT_CODE (tem, NOTE);
12875			  NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
12876			  NOTE_SOURCE_FILE (tem) = 0;
12877
12878#ifdef HAVE_cc0
12879			  /* Delete the setter too.  */
12880			  if (cc0_setter)
12881			    {
12882			      PATTERN (cc0_setter) = pc_rtx;
12883
12884			      distribute_notes (REG_NOTES (cc0_setter),
12885						cc0_setter, cc0_setter,
12886						NULL_RTX, NULL_RTX, NULL_RTX);
12887			      distribute_links (LOG_LINKS (cc0_setter));
12888
12889			      PUT_CODE (cc0_setter, NOTE);
12890			      NOTE_LINE_NUMBER (cc0_setter)
12891				= NOTE_INSN_DELETED;
12892			      NOTE_SOURCE_FILE (cc0_setter) = 0;
12893			    }
12894#endif
12895			}
12896		      /* If the register is both set and used here, put the
12897			 REG_DEAD note here, but place a REG_UNUSED note
12898			 here too unless there already is one.  */
12899		      else if (reg_referenced_p (XEXP (note, 0),
12900						 PATTERN (tem)))
12901			{
12902			  place = tem;
12903
12904			  if (! find_regno_note (tem, REG_UNUSED,
12905						 REGNO (XEXP (note, 0))))
12906			    REG_NOTES (tem)
12907			      = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0),
12908						   REG_NOTES (tem));
12909			}
12910		      else
12911			{
12912			  PUT_REG_NOTE_KIND (note, REG_UNUSED);
12913
12914			  /*  If there isn't already a REG_UNUSED note, put one
12915			      here.  */
12916			  if (! find_regno_note (tem, REG_UNUSED,
12917						 REGNO (XEXP (note, 0))))
12918			    place = tem;
12919			  break;
12920			}
12921		    }
12922		  else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12923			   || (GET_CODE (tem) == CALL_INSN
12924			       && find_reg_fusage (tem, USE, XEXP (note, 0))))
12925		    {
12926		      place = tem;
12927
12928		      /* If we are doing a 3->2 combination, and we have a
12929			 register which formerly died in i3 and was not used
12930			 by i2, which now no longer dies in i3 and is used in
12931			 i2 but does not die in i2, and place is between i2
12932			 and i3, then we may need to move a link from place to
12933			 i2.  */
12934		      if (i2 && INSN_UID (place) <= max_uid_cuid
12935			  && INSN_CUID (place) > INSN_CUID (i2)
12936			  && from_insn
12937			  && INSN_CUID (from_insn) > INSN_CUID (i2)
12938			  && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12939			{
12940			  rtx links = LOG_LINKS (place);
12941			  LOG_LINKS (place) = 0;
12942			  distribute_links (links);
12943			}
12944		      break;
12945		    }
12946
12947		  if (tem == bb->head)
12948		    break;
12949		}
12950
12951	      /* We haven't found an insn for the death note and it
12952		 is still a REG_DEAD note, but we have hit the beginning
12953		 of the block.  If the existing life info says the reg
12954		 was dead, there's nothing left to do.  Otherwise, we'll
12955		 need to do a global life update after combine.  */
12956	      if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12957		  && REGNO_REG_SET_P (bb->global_live_at_start,
12958				      REGNO (XEXP (note, 0))))
12959		{
12960		  SET_BIT (refresh_blocks, this_basic_block->index);
12961		  need_refresh = 1;
12962		}
12963	    }
12964
12965	  /* If the register is set or already dead at PLACE, we needn't do
12966	     anything with this note if it is still a REG_DEAD note.
12967	     We can here if it is set at all, not if is it totally replace,
12968	     which is what `dead_or_set_p' checks, so also check for it being
12969	     set partially.  */
12970
12971	  if (place && REG_NOTE_KIND (note) == REG_DEAD)
12972	    {
12973	      unsigned int regno = REGNO (XEXP (note, 0));
12974
12975	      /* Similarly, if the instruction on which we want to place
12976		 the note is a noop, we'll need do a global live update
12977		 after we remove them in delete_noop_moves.  */
12978	      if (noop_move_p (place))
12979		{
12980		  SET_BIT (refresh_blocks, this_basic_block->index);
12981		  need_refresh = 1;
12982		}
12983
12984	      if (dead_or_set_p (place, XEXP (note, 0))
12985		  || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12986		{
12987		  /* Unless the register previously died in PLACE, clear
12988		     reg_last_death.  [I no longer understand why this is
12989		     being done.] */
12990		  if (reg_last_death[regno] != place)
12991		    reg_last_death[regno] = 0;
12992		  place = 0;
12993		}
12994	      else
12995		reg_last_death[regno] = place;
12996
12997	      /* If this is a death note for a hard reg that is occupying
12998		 multiple registers, ensure that we are still using all
12999		 parts of the object.  If we find a piece of the object
13000		 that is unused, we must arrange for an appropriate REG_DEAD
13001		 note to be added for it.  However, we can't just emit a USE
13002		 and tag the note to it, since the register might actually
13003		 be dead; so we recourse, and the recursive call then finds
13004		 the previous insn that used this register.  */
13005
13006	      if (place && regno < FIRST_PSEUDO_REGISTER
13007		  && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
13008		{
13009		  unsigned int endregno
13010		    = regno + HARD_REGNO_NREGS (regno,
13011						GET_MODE (XEXP (note, 0)));
13012		  int all_used = 1;
13013		  unsigned int i;
13014
13015		  for (i = regno; i < endregno; i++)
13016		    if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
13017			 && ! find_regno_fusage (place, USE, i))
13018			|| dead_or_set_regno_p (place, i))
13019		      all_used = 0;
13020
13021		  if (! all_used)
13022		    {
13023		      /* Put only REG_DEAD notes for pieces that are
13024			 not already dead or set.  */
13025
13026		      for (i = regno; i < endregno;
13027			   i += HARD_REGNO_NREGS (i, reg_raw_mode[i]))
13028			{
13029			  rtx piece = regno_reg_rtx[i];
13030			  basic_block bb = this_basic_block;
13031
13032			  if (! dead_or_set_p (place, piece)
13033			      && ! reg_bitfield_target_p (piece,
13034							  PATTERN (place)))
13035			    {
13036			      rtx new_note
13037				= gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
13038
13039			      distribute_notes (new_note, place, place,
13040						NULL_RTX, NULL_RTX, NULL_RTX);
13041			    }
13042			  else if (! refers_to_regno_p (i, i + 1,
13043							PATTERN (place), 0)
13044				   && ! find_regno_fusage (place, USE, i))
13045			    for (tem = PREV_INSN (place); ;
13046				 tem = PREV_INSN (tem))
13047			      {
13048				if (! INSN_P (tem))
13049				  {
13050				    if (tem == bb->head)
13051				      {
13052					SET_BIT (refresh_blocks,
13053						 this_basic_block->index);
13054					need_refresh = 1;
13055					break;
13056				      }
13057				    continue;
13058				  }
13059				if (dead_or_set_p (tem, piece)
13060				    || reg_bitfield_target_p (piece,
13061							      PATTERN (tem)))
13062				  {
13063				    REG_NOTES (tem)
13064				      = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
13065							   REG_NOTES (tem));
13066				    break;
13067				  }
13068			      }
13069
13070			}
13071
13072		      place = 0;
13073		    }
13074		}
13075	    }
13076	  break;
13077
13078	default:
13079	  /* Any other notes should not be present at this point in the
13080	     compilation.  */
13081	  abort ();
13082	}
13083
13084      if (place)
13085	{
13086	  XEXP (note, 1) = REG_NOTES (place);
13087	  REG_NOTES (place) = note;
13088	}
13089      else if ((REG_NOTE_KIND (note) == REG_DEAD
13090		|| REG_NOTE_KIND (note) == REG_UNUSED)
13091	       && GET_CODE (XEXP (note, 0)) == REG)
13092	REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
13093
13094      if (place2)
13095	{
13096	  if ((REG_NOTE_KIND (note) == REG_DEAD
13097	       || REG_NOTE_KIND (note) == REG_UNUSED)
13098	      && GET_CODE (XEXP (note, 0)) == REG)
13099	    REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
13100
13101	  REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
13102					       REG_NOTE_KIND (note),
13103					       XEXP (note, 0),
13104					       REG_NOTES (place2));
13105	}
13106    }
13107}
13108
13109/* Similarly to above, distribute the LOG_LINKS that used to be present on
13110   I3, I2, and I1 to new locations.  This is also called to add a link
13111   pointing at I3 when I3's destination is changed.  */
13112
13113static void
13114distribute_links (links)
13115     rtx links;
13116{
13117  rtx link, next_link;
13118
13119  for (link = links; link; link = next_link)
13120    {
13121      rtx place = 0;
13122      rtx insn;
13123      rtx set, reg;
13124
13125      next_link = XEXP (link, 1);
13126
13127      /* If the insn that this link points to is a NOTE or isn't a single
13128	 set, ignore it.  In the latter case, it isn't clear what we
13129	 can do other than ignore the link, since we can't tell which
13130	 register it was for.  Such links wouldn't be used by combine
13131	 anyway.
13132
13133	 It is not possible for the destination of the target of the link to
13134	 have been changed by combine.  The only potential of this is if we
13135	 replace I3, I2, and I1 by I3 and I2.  But in that case the
13136	 destination of I2 also remains unchanged.  */
13137
13138      if (GET_CODE (XEXP (link, 0)) == NOTE
13139	  || (set = single_set (XEXP (link, 0))) == 0)
13140	continue;
13141
13142      reg = SET_DEST (set);
13143      while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
13144	     || GET_CODE (reg) == SIGN_EXTRACT
13145	     || GET_CODE (reg) == STRICT_LOW_PART)
13146	reg = XEXP (reg, 0);
13147
13148      /* A LOG_LINK is defined as being placed on the first insn that uses
13149	 a register and points to the insn that sets the register.  Start
13150	 searching at the next insn after the target of the link and stop
13151	 when we reach a set of the register or the end of the basic block.
13152
13153	 Note that this correctly handles the link that used to point from
13154	 I3 to I2.  Also note that not much searching is typically done here
13155	 since most links don't point very far away.  */
13156
13157      for (insn = NEXT_INSN (XEXP (link, 0));
13158	   (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
13159		     || this_basic_block->next_bb->head != insn));
13160	   insn = NEXT_INSN (insn))
13161	if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
13162	  {
13163	    if (reg_referenced_p (reg, PATTERN (insn)))
13164	      place = insn;
13165	    break;
13166	  }
13167	else if (GET_CODE (insn) == CALL_INSN
13168		 && find_reg_fusage (insn, USE, reg))
13169	  {
13170	    place = insn;
13171	    break;
13172	  }
13173
13174      /* If we found a place to put the link, place it there unless there
13175	 is already a link to the same insn as LINK at that point.  */
13176
13177      if (place)
13178	{
13179	  rtx link2;
13180
13181	  for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
13182	    if (XEXP (link2, 0) == XEXP (link, 0))
13183	      break;
13184
13185	  if (link2 == 0)
13186	    {
13187	      XEXP (link, 1) = LOG_LINKS (place);
13188	      LOG_LINKS (place) = link;
13189
13190	      /* Set added_links_insn to the earliest insn we added a
13191		 link to.  */
13192	      if (added_links_insn == 0
13193		  || INSN_CUID (added_links_insn) > INSN_CUID (place))
13194		added_links_insn = place;
13195	    }
13196	}
13197    }
13198}
13199
13200/* Compute INSN_CUID for INSN, which is an insn made by combine.  */
13201
13202static int
13203insn_cuid (insn)
13204     rtx insn;
13205{
13206  while (insn != 0 && INSN_UID (insn) > max_uid_cuid
13207	 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
13208    insn = NEXT_INSN (insn);
13209
13210  if (INSN_UID (insn) > max_uid_cuid)
13211    abort ();
13212
13213  return INSN_CUID (insn);
13214}
13215
13216void
13217dump_combine_stats (file)
13218     FILE *file;
13219{
13220  fnotice
13221    (file,
13222     ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
13223     combine_attempts, combine_merges, combine_extras, combine_successes);
13224}
13225
13226void
13227dump_combine_total_stats (file)
13228     FILE *file;
13229{
13230  fnotice
13231    (file,
13232     "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
13233     total_attempts, total_merges, total_extras, total_successes);
13234}
13235