combine.c revision 50397
1/* Optimize by combining instructions for GNU compiler.
2   Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING.  If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA.  */
20
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23   Portable Optimizer, but redone to work on our list-structured
24   representation for RTL instead of their string representation.
25
26   The LOG_LINKS of each insn identify the most recent assignment
27   to each REG used in the insn.  It is a list of previous insns,
28   each of which contains a SET for a REG that is used in this insn
29   and not used or set in between.  LOG_LINKs never cross basic blocks.
30   They were set up by the preceding pass (lifetime analysis).
31
32   We try to combine each pair of insns joined by a logical link.
33   We also try to combine triples of insns A, B and C when
34   C has a link back to B and B has a link back to A.
35
36   LOG_LINKS does not have links for use of the CC0.  They don't
37   need to, because the insn that sets the CC0 is always immediately
38   before the insn that tests it.  So we always regard a branch
39   insn as having a logical link to the preceding insn.  The same is true
40   for an insn explicitly using CC0.
41
42   We check (with use_crosses_set_p) to avoid combining in such a way
43   as to move a computation to a place where its value would be different.
44
45   Combination is done by mathematically substituting the previous
46   insn(s) values for the regs they set into the expressions in
47   the later insns that refer to these regs.  If the result is a valid insn
48   for our target machine, according to the machine description,
49   we install it, delete the earlier insns, and update the data flow
50   information (LOG_LINKS and REG_NOTES) for what we did.
51
52   There are a few exceptions where the dataflow information created by
53   flow.c aren't completely updated:
54
55   - reg_live_length is not updated
56   - reg_n_refs is not adjusted in the rare case when a register is
57     no longer required in a computation
58   - there are extremely rare cases (see distribute_regnotes) when a
59     REG_DEAD note is lost
60   - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61     removed because there is no way to know which register it was
62     linking
63
64   To simplify substitution, we combine only when the earlier insn(s)
65   consist of only a single assignment.  To simplify updating afterward,
66   we never combine when a subroutine call appears in the middle.
67
68   Since we do not represent assignments to CC0 explicitly except when that
69   is all an insn does, there is no LOG_LINKS entry in an insn that uses
70   the condition code for the insn that set the condition code.
71   Fortunately, these two insns must be consecutive.
72   Therefore, every JUMP_INSN is taken to have an implicit logical link
73   to the preceding insn.  This is not quite right, since non-jumps can
74   also use the condition code; but in practice such insns would not
75   combine anyway.  */
76
77#include "config.h"
78#ifdef __STDC__
79#include <stdarg.h>
80#else
81#include <varargs.h>
82#endif
83
84/* stdio.h must precede rtl.h for FFS.  */
85#include "system.h"
86
87#include "rtl.h"
88#include "flags.h"
89#include "regs.h"
90#include "hard-reg-set.h"
91#include "basic-block.h"
92#include "insn-config.h"
93/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
94#include "expr.h"
95#include "insn-flags.h"
96#include "insn-codes.h"
97#include "insn-attr.h"
98#include "recog.h"
99#include "real.h"
100#include "toplev.h"
101
102/* It is not safe to use ordinary gen_lowpart in combine.
103   Use gen_lowpart_for_combine instead.  See comments there.  */
104#define gen_lowpart dont_use_gen_lowpart_you_dummy
105
106/* Number of attempts to combine instructions in this function.  */
107
108static int combine_attempts;
109
110/* Number of attempts that got as far as substitution in this function.  */
111
112static int combine_merges;
113
114/* Number of instructions combined with added SETs in this function.  */
115
116static int combine_extras;
117
118/* Number of instructions combined in this function.  */
119
120static int combine_successes;
121
122/* Totals over entire compilation.  */
123
124static int total_attempts, total_merges, total_extras, total_successes;
125
126/* Define a default value for REVERSIBLE_CC_MODE.
127   We can never assume that a condition code mode is safe to reverse unless
128   the md tells us so.  */
129#ifndef REVERSIBLE_CC_MODE
130#define REVERSIBLE_CC_MODE(MODE) 0
131#endif
132
133/* Vector mapping INSN_UIDs to cuids.
134   The cuids are like uids but increase monotonically always.
135   Combine always uses cuids so that it can compare them.
136   But actually renumbering the uids, which we used to do,
137   proves to be a bad idea because it makes it hard to compare
138   the dumps produced by earlier passes with those from later passes.  */
139
140static int *uid_cuid;
141static int max_uid_cuid;
142
143/* Get the cuid of an insn.  */
144
145#define INSN_CUID(INSN) \
146(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
147
148/* Maximum register number, which is the size of the tables below.  */
149
150static int combine_max_regno;
151
152/* Record last point of death of (hard or pseudo) register n.  */
153
154static rtx *reg_last_death;
155
156/* Record last point of modification of (hard or pseudo) register n.  */
157
158static rtx *reg_last_set;
159
160/* Record the cuid of the last insn that invalidated memory
161   (anything that writes memory, and subroutine calls, but not pushes).  */
162
163static int mem_last_set;
164
165/* Record the cuid of the last CALL_INSN
166   so we can tell whether a potential combination crosses any calls.  */
167
168static int last_call_cuid;
169
170/* When `subst' is called, this is the insn that is being modified
171   (by combining in a previous insn).  The PATTERN of this insn
172   is still the old pattern partially modified and it should not be
173   looked at, but this may be used to examine the successors of the insn
174   to judge whether a simplification is valid.  */
175
176static rtx subst_insn;
177
178/* This is an insn that belongs before subst_insn, but is not currently
179   on the insn chain.  */
180
181static rtx subst_prev_insn;
182
183/* This is the lowest CUID that `subst' is currently dealing with.
184   get_last_value will not return a value if the register was set at or
185   after this CUID.  If not for this mechanism, we could get confused if
186   I2 or I1 in try_combine were an insn that used the old value of a register
187   to obtain a new value.  In that case, we might erroneously get the
188   new value of the register when we wanted the old one.  */
189
190static int subst_low_cuid;
191
192/* This contains any hard registers that are used in newpat; reg_dead_at_p
193   must consider all these registers to be always live.  */
194
195static HARD_REG_SET newpat_used_regs;
196
197/* This is an insn to which a LOG_LINKS entry has been added.  If this
198   insn is the earlier than I2 or I3, combine should rescan starting at
199   that location.  */
200
201static rtx added_links_insn;
202
203/* Basic block number of the block in which we are performing combines.  */
204static int this_basic_block;
205
206/* The next group of arrays allows the recording of the last value assigned
207   to (hard or pseudo) register n.  We use this information to see if a
208   operation being processed is redundant given a prior operation performed
209   on the register.  For example, an `and' with a constant is redundant if
210   all the zero bits are already known to be turned off.
211
212   We use an approach similar to that used by cse, but change it in the
213   following ways:
214
215   (1) We do not want to reinitialize at each label.
216   (2) It is useful, but not critical, to know the actual value assigned
217       to a register.  Often just its form is helpful.
218
219   Therefore, we maintain the following arrays:
220
221   reg_last_set_value		the last value assigned
222   reg_last_set_label		records the value of label_tick when the
223				register was assigned
224   reg_last_set_table_tick	records the value of label_tick when a
225				value using the register is assigned
226   reg_last_set_invalid		set to non-zero when it is not valid
227				to use the value of this register in some
228				register's value
229
230   To understand the usage of these tables, it is important to understand
231   the distinction between the value in reg_last_set_value being valid
232   and the register being validly contained in some other expression in the
233   table.
234
235   Entry I in reg_last_set_value is valid if it is non-zero, and either
236   reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
237
238   Register I may validly appear in any expression returned for the value
239   of another register if reg_n_sets[i] is 1.  It may also appear in the
240   value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
241   reg_last_set_invalid[j] is zero.
242
243   If an expression is found in the table containing a register which may
244   not validly appear in an expression, the register is replaced by
245   something that won't match, (clobber (const_int 0)).
246
247   reg_last_set_invalid[i] is set non-zero when register I is being assigned
248   to and reg_last_set_table_tick[i] == label_tick.  */
249
250/* Record last value assigned to (hard or pseudo) register n.  */
251
252static rtx *reg_last_set_value;
253
254/* Record the value of label_tick when the value for register n is placed in
255   reg_last_set_value[n].  */
256
257static int *reg_last_set_label;
258
259/* Record the value of label_tick when an expression involving register n
260   is placed in reg_last_set_value.  */
261
262static int *reg_last_set_table_tick;
263
264/* Set non-zero if references to register n in expressions should not be
265   used.  */
266
267static char *reg_last_set_invalid;
268
269/* Incremented for each label.  */
270
271static int label_tick;
272
273/* Some registers that are set more than once and used in more than one
274   basic block are nevertheless always set in similar ways.  For example,
275   a QImode register may be loaded from memory in two places on a machine
276   where byte loads zero extend.
277
278   We record in the following array what we know about the nonzero
279   bits of a register, specifically which bits are known to be zero.
280
281   If an entry is zero, it means that we don't know anything special.  */
282
283static unsigned HOST_WIDE_INT *reg_nonzero_bits;
284
285/* Mode used to compute significance in reg_nonzero_bits.  It is the largest
286   integer mode that can fit in HOST_BITS_PER_WIDE_INT.  */
287
288static enum machine_mode nonzero_bits_mode;
289
290/* Nonzero if we know that a register has some leading bits that are always
291   equal to the sign bit.  */
292
293static char *reg_sign_bit_copies;
294
295/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
296   It is zero while computing them and after combine has completed.  This
297   former test prevents propagating values based on previously set values,
298   which can be incorrect if a variable is modified in a loop.  */
299
300static int nonzero_sign_valid;
301
302/* These arrays are maintained in parallel with reg_last_set_value
303   and are used to store the mode in which the register was last set,
304   the bits that were known to be zero when it was last set, and the
305   number of sign bits copies it was known to have when it was last set.  */
306
307static enum machine_mode *reg_last_set_mode;
308static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
309static char *reg_last_set_sign_bit_copies;
310
311/* Record one modification to rtl structure
312   to be undone by storing old_contents into *where.
313   is_int is 1 if the contents are an int.  */
314
315struct undo
316{
317  struct undo *next;
318  int is_int;
319  union {rtx r; int i;} old_contents;
320  union {rtx *r; int *i;} where;
321};
322
323/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
324   num_undo says how many are currently recorded.
325
326   storage is nonzero if we must undo the allocation of new storage.
327   The value of storage is what to pass to obfree.
328
329   other_insn is nonzero if we have modified some other insn in the process
330   of working on subst_insn.  It must be verified too.
331
332   previous_undos is the value of undobuf.undos when we started processing
333   this substitution.  This will prevent gen_rtx_combine from re-used a piece
334   from the previous expression.  Doing so can produce circular rtl
335   structures.  */
336
337struct undobuf
338{
339  char *storage;
340  struct undo *undos;
341  struct undo *frees;
342  struct undo *previous_undos;
343  rtx other_insn;
344};
345
346static struct undobuf undobuf;
347
348/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
349   insn.  The substitution can be undone by undo_all.  If INTO is already
350   set to NEWVAL, do not record this change.  Because computing NEWVAL might
351   also call SUBST, we have to compute it before we put anything into
352   the undo table.  */
353
354#define SUBST(INTO, NEWVAL)  \
355 do { rtx _new = (NEWVAL);					\
356      struct undo *_buf;					\
357								\
358      if (undobuf.frees)					\
359	_buf = undobuf.frees, undobuf.frees = _buf->next;	\
360      else							\
361	_buf = (struct undo *) xmalloc (sizeof (struct undo));	\
362								\
363      _buf->is_int = 0;						\
364      _buf->where.r = &INTO;					\
365      _buf->old_contents.r = INTO;				\
366      INTO = _new;						\
367      if (_buf->old_contents.r == INTO)				\
368	_buf->next = undobuf.frees, undobuf.frees = _buf;	\
369      else							\
370	_buf->next = undobuf.undos, undobuf.undos = _buf;	\
371    } while (0)
372
373/* Similar to SUBST, but NEWVAL is an int expression.  Note that substitution
374   for the value of a HOST_WIDE_INT value (including CONST_INT) is
375   not safe.  */
376
377#define SUBST_INT(INTO, NEWVAL)  \
378 do { struct undo *_buf;					\
379								\
380      if (undobuf.frees)					\
381	_buf = undobuf.frees, undobuf.frees = _buf->next;	\
382      else							\
383	_buf = (struct undo *) xmalloc (sizeof (struct undo));	\
384								\
385      _buf->is_int = 1;						\
386      _buf->where.i = (int *) &INTO;				\
387      _buf->old_contents.i = INTO;				\
388      INTO = NEWVAL;						\
389      if (_buf->old_contents.i == INTO)				\
390	_buf->next = undobuf.frees, undobuf.frees = _buf;	\
391      else							\
392	_buf->next = undobuf.undos, undobuf.undos = _buf;	\
393     } while (0)
394
395/* Number of times the pseudo being substituted for
396   was found and replaced.  */
397
398static int n_occurrences;
399
400static void init_reg_last_arrays	PROTO((void));
401static void setup_incoming_promotions   PROTO((void));
402static void set_nonzero_bits_and_sign_copies  PROTO((rtx, rtx));
403static int can_combine_p	PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
404static int sets_function_arg_p	PROTO((rtx));
405static int combinable_i3pat	PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
406static rtx try_combine		PROTO((rtx, rtx, rtx));
407static void undo_all		PROTO((void));
408static rtx *find_split_point	PROTO((rtx *, rtx));
409static rtx subst		PROTO((rtx, rtx, rtx, int, int));
410static rtx simplify_rtx		PROTO((rtx, enum machine_mode, int, int));
411static rtx simplify_if_then_else  PROTO((rtx));
412static rtx simplify_set		PROTO((rtx));
413static rtx simplify_logical	PROTO((rtx, int));
414static rtx expand_compound_operation  PROTO((rtx));
415static rtx expand_field_assignment  PROTO((rtx));
416static rtx make_extraction	PROTO((enum machine_mode, rtx, int, rtx, int,
417				       int, int, int));
418static rtx extract_left_shift	PROTO((rtx, int));
419static rtx make_compound_operation  PROTO((rtx, enum rtx_code));
420static int get_pos_from_mask	PROTO((unsigned HOST_WIDE_INT, int *));
421static rtx force_to_mode	PROTO((rtx, enum machine_mode,
422				       unsigned HOST_WIDE_INT, rtx, int));
423static rtx if_then_else_cond	PROTO((rtx, rtx *, rtx *));
424static rtx known_cond		PROTO((rtx, enum rtx_code, rtx, rtx));
425static int rtx_equal_for_field_assignment_p PROTO((rtx, rtx));
426static rtx make_field_assignment  PROTO((rtx));
427static rtx apply_distributive_law  PROTO((rtx));
428static rtx simplify_and_const_int  PROTO((rtx, enum machine_mode, rtx,
429					  unsigned HOST_WIDE_INT));
430static unsigned HOST_WIDE_INT nonzero_bits  PROTO((rtx, enum machine_mode));
431static int num_sign_bit_copies  PROTO((rtx, enum machine_mode));
432static int merge_outer_ops	PROTO((enum rtx_code *, HOST_WIDE_INT *,
433				       enum rtx_code, HOST_WIDE_INT,
434				       enum machine_mode, int *));
435static rtx simplify_shift_const	PROTO((rtx, enum rtx_code, enum machine_mode,
436				       rtx, int));
437static int recog_for_combine	PROTO((rtx *, rtx, rtx *, int *));
438static rtx gen_lowpart_for_combine  PROTO((enum machine_mode, rtx));
439static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
440				  ...));
441static rtx gen_binary		PROTO((enum rtx_code, enum machine_mode,
442				       rtx, rtx));
443static rtx gen_unary		PROTO((enum rtx_code, enum machine_mode,
444				       enum machine_mode, rtx));
445static enum rtx_code simplify_comparison  PROTO((enum rtx_code, rtx *, rtx *));
446static int reversible_comparison_p  PROTO((rtx));
447static void update_table_tick	PROTO((rtx));
448static void record_value_for_reg  PROTO((rtx, rtx, rtx));
449static void record_dead_and_set_regs_1  PROTO((rtx, rtx));
450static void record_dead_and_set_regs  PROTO((rtx));
451static int get_last_value_validate  PROTO((rtx *, rtx, int, int));
452static rtx get_last_value	PROTO((rtx));
453static int use_crosses_set_p	PROTO((rtx, int));
454static void reg_dead_at_p_1	PROTO((rtx, rtx));
455static int reg_dead_at_p	PROTO((rtx, rtx));
456static void move_deaths		PROTO((rtx, rtx, int, rtx, rtx *));
457static int reg_bitfield_target_p  PROTO((rtx, rtx));
458static void distribute_notes	PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
459static void distribute_links	PROTO((rtx));
460static void mark_used_regs_combine PROTO((rtx));
461static int insn_cuid		PROTO((rtx));
462
463/* Main entry point for combiner.  F is the first insn of the function.
464   NREGS is the first unused pseudo-reg number.  */
465
466void
467combine_instructions (f, nregs)
468     rtx f;
469     int nregs;
470{
471  register rtx insn, next;
472#ifdef HAVE_cc0
473  register rtx prev;
474#endif
475  register int i;
476  register rtx links, nextlinks;
477
478  combine_attempts = 0;
479  combine_merges = 0;
480  combine_extras = 0;
481  combine_successes = 0;
482  undobuf.undos = undobuf.previous_undos = 0;
483
484  combine_max_regno = nregs;
485
486  reg_nonzero_bits
487    = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
488  reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
489
490  bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
491  bzero (reg_sign_bit_copies, nregs * sizeof (char));
492
493  reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
494  reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
495  reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
496  reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
497  reg_last_set_label = (int *) alloca (nregs * sizeof (int));
498  reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
499  reg_last_set_mode
500    = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
501  reg_last_set_nonzero_bits
502    = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
503  reg_last_set_sign_bit_copies
504    = (char *) alloca (nregs * sizeof (char));
505
506  init_reg_last_arrays ();
507
508  init_recog_no_volatile ();
509
510  /* Compute maximum uid value so uid_cuid can be allocated.  */
511
512  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
513    if (INSN_UID (insn) > i)
514      i = INSN_UID (insn);
515
516  uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
517  max_uid_cuid = i;
518
519  nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
520
521  /* Don't use reg_nonzero_bits when computing it.  This can cause problems
522     when, for example, we have j <<= 1 in a loop.  */
523
524  nonzero_sign_valid = 0;
525
526  /* Compute the mapping from uids to cuids.
527     Cuids are numbers assigned to insns, like uids,
528     except that cuids increase monotonically through the code.
529
530     Scan all SETs and see if we can deduce anything about what
531     bits are known to be zero for some registers and how many copies
532     of the sign bit are known to exist for those registers.
533
534     Also set any known values so that we can use it while searching
535     for what bits are known to be set.  */
536
537  label_tick = 1;
538
539  /* We need to initialize it here, because record_dead_and_set_regs may call
540     get_last_value.  */
541  subst_prev_insn = NULL_RTX;
542
543  setup_incoming_promotions ();
544
545  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
546    {
547      uid_cuid[INSN_UID (insn)] = ++i;
548      subst_low_cuid = i;
549      subst_insn = insn;
550
551      if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
552	{
553	  note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
554	  record_dead_and_set_regs (insn);
555
556#ifdef AUTO_INC_DEC
557	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
558	    if (REG_NOTE_KIND (links) == REG_INC)
559	      set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX);
560#endif
561	}
562
563      if (GET_CODE (insn) == CODE_LABEL)
564	label_tick++;
565    }
566
567  nonzero_sign_valid = 1;
568
569  /* Now scan all the insns in forward order.  */
570
571  this_basic_block = -1;
572  label_tick = 1;
573  last_call_cuid = 0;
574  mem_last_set = 0;
575  init_reg_last_arrays ();
576  setup_incoming_promotions ();
577
578  for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
579    {
580      next = 0;
581
582      /* If INSN starts a new basic block, update our basic block number.  */
583      if (this_basic_block + 1 < n_basic_blocks
584	  && basic_block_head[this_basic_block + 1] == insn)
585	this_basic_block++;
586
587      if (GET_CODE (insn) == CODE_LABEL)
588	label_tick++;
589
590      else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
591	{
592	  /* Try this insn with each insn it links back to.  */
593
594	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
595	    if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
596	      goto retry;
597
598	  /* Try each sequence of three linked insns ending with this one.  */
599
600	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
601	    for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
602		 nextlinks = XEXP (nextlinks, 1))
603	      if ((next = try_combine (insn, XEXP (links, 0),
604				       XEXP (nextlinks, 0))) != 0)
605		goto retry;
606
607#ifdef HAVE_cc0
608	  /* Try to combine a jump insn that uses CC0
609	     with a preceding insn that sets CC0, and maybe with its
610	     logical predecessor as well.
611	     This is how we make decrement-and-branch insns.
612	     We need this special code because data flow connections
613	     via CC0 do not get entered in LOG_LINKS.  */
614
615	  if (GET_CODE (insn) == JUMP_INSN
616	      && (prev = prev_nonnote_insn (insn)) != 0
617	      && GET_CODE (prev) == INSN
618	      && sets_cc0_p (PATTERN (prev)))
619	    {
620	      if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
621		goto retry;
622
623	      for (nextlinks = LOG_LINKS (prev); nextlinks;
624		   nextlinks = XEXP (nextlinks, 1))
625		if ((next = try_combine (insn, prev,
626					 XEXP (nextlinks, 0))) != 0)
627		  goto retry;
628	    }
629
630	  /* Do the same for an insn that explicitly references CC0.  */
631	  if (GET_CODE (insn) == INSN
632	      && (prev = prev_nonnote_insn (insn)) != 0
633	      && GET_CODE (prev) == INSN
634	      && sets_cc0_p (PATTERN (prev))
635	      && GET_CODE (PATTERN (insn)) == SET
636	      && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
637	    {
638	      if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
639		goto retry;
640
641	      for (nextlinks = LOG_LINKS (prev); nextlinks;
642		   nextlinks = XEXP (nextlinks, 1))
643		if ((next = try_combine (insn, prev,
644					 XEXP (nextlinks, 0))) != 0)
645		  goto retry;
646	    }
647
648	  /* Finally, see if any of the insns that this insn links to
649	     explicitly references CC0.  If so, try this insn, that insn,
650	     and its predecessor if it sets CC0.  */
651	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
652	    if (GET_CODE (XEXP (links, 0)) == INSN
653		&& GET_CODE (PATTERN (XEXP (links, 0))) == SET
654		&& reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
655		&& (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
656		&& GET_CODE (prev) == INSN
657		&& sets_cc0_p (PATTERN (prev))
658		&& (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
659	      goto retry;
660#endif
661
662	  /* Try combining an insn with two different insns whose results it
663	     uses.  */
664	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
665	    for (nextlinks = XEXP (links, 1); nextlinks;
666		 nextlinks = XEXP (nextlinks, 1))
667	      if ((next = try_combine (insn, XEXP (links, 0),
668				       XEXP (nextlinks, 0))) != 0)
669		goto retry;
670
671	  if (GET_CODE (insn) != NOTE)
672	    record_dead_and_set_regs (insn);
673
674	retry:
675	  ;
676	}
677    }
678
679  total_attempts += combine_attempts;
680  total_merges += combine_merges;
681  total_extras += combine_extras;
682  total_successes += combine_successes;
683
684  nonzero_sign_valid = 0;
685}
686
687/* Wipe the reg_last_xxx arrays in preparation for another pass.  */
688
689static void
690init_reg_last_arrays ()
691{
692  int nregs = combine_max_regno;
693
694  bzero ((char *) reg_last_death, nregs * sizeof (rtx));
695  bzero ((char *) reg_last_set, nregs * sizeof (rtx));
696  bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
697  bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
698  bzero ((char *) reg_last_set_label, nregs * sizeof (int));
699  bzero (reg_last_set_invalid, nregs * sizeof (char));
700  bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
701  bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
702  bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
703}
704
705/* Set up any promoted values for incoming argument registers.  */
706
707static void
708setup_incoming_promotions ()
709{
710#ifdef PROMOTE_FUNCTION_ARGS
711  int regno;
712  rtx reg;
713  enum machine_mode mode;
714  int unsignedp;
715  rtx first = get_insns ();
716
717  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
718    if (FUNCTION_ARG_REGNO_P (regno)
719	&& (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
720      {
721	record_value_for_reg
722	  (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
723				       : SIGN_EXTEND),
724				      GET_MODE (reg),
725				      gen_rtx_CLOBBER (mode, const0_rtx)));
726      }
727#endif
728}
729
730/* Called via note_stores.  If X is a pseudo that is narrower than
731   HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
732
733   If we are setting only a portion of X and we can't figure out what
734   portion, assume all bits will be used since we don't know what will
735   be happening.
736
737   Similarly, set how many bits of X are known to be copies of the sign bit
738   at all locations in the function.  This is the smallest number implied
739   by any set of X.  */
740
741static void
742set_nonzero_bits_and_sign_copies (x, set)
743     rtx x;
744     rtx set;
745{
746  int num;
747
748  if (GET_CODE (x) == REG
749      && REGNO (x) >= FIRST_PSEUDO_REGISTER
750      /* If this register is undefined at the start of the file, we can't
751	 say what its contents were.  */
752      && ! REGNO_REG_SET_P (basic_block_live_at_start[0], REGNO (x))
753      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
754    {
755      if (set == 0 || GET_CODE (set) == CLOBBER)
756	{
757	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
758	  reg_sign_bit_copies[REGNO (x)] = 1;
759	  return;
760	}
761
762      /* If this is a complex assignment, see if we can convert it into a
763	 simple assignment.  */
764      set = expand_field_assignment (set);
765
766      /* If this is a simple assignment, or we have a paradoxical SUBREG,
767	 set what we know about X.  */
768
769      if (SET_DEST (set) == x
770	  || (GET_CODE (SET_DEST (set)) == SUBREG
771	      && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
772		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
773	      && SUBREG_REG (SET_DEST (set)) == x))
774	{
775	  rtx src = SET_SRC (set);
776
777#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
778	  /* If X is narrower than a word and SRC is a non-negative
779	     constant that would appear negative in the mode of X,
780	     sign-extend it for use in reg_nonzero_bits because some
781	     machines (maybe most) will actually do the sign-extension
782	     and this is the conservative approach.
783
784	     ??? For 2.5, try to tighten up the MD files in this regard
785	     instead of this kludge.  */
786
787	  if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
788	      && GET_CODE (src) == CONST_INT
789	      && INTVAL (src) > 0
790	      && 0 != (INTVAL (src)
791		       & ((HOST_WIDE_INT) 1
792			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
793	    src = GEN_INT (INTVAL (src)
794			   | ((HOST_WIDE_INT) (-1)
795			      << GET_MODE_BITSIZE (GET_MODE (x))));
796#endif
797
798	  reg_nonzero_bits[REGNO (x)]
799	    |= nonzero_bits (src, nonzero_bits_mode);
800	  num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
801	  if (reg_sign_bit_copies[REGNO (x)] == 0
802	      || reg_sign_bit_copies[REGNO (x)] > num)
803	    reg_sign_bit_copies[REGNO (x)] = num;
804	}
805      else
806	{
807	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
808	  reg_sign_bit_copies[REGNO (x)] = 1;
809	}
810    }
811}
812
813/* See if INSN can be combined into I3.  PRED and SUCC are optionally
814   insns that were previously combined into I3 or that will be combined
815   into the merger of INSN and I3.
816
817   Return 0 if the combination is not allowed for any reason.
818
819   If the combination is allowed, *PDEST will be set to the single
820   destination of INSN and *PSRC to the single source, and this function
821   will return 1.  */
822
823static int
824can_combine_p (insn, i3, pred, succ, pdest, psrc)
825     rtx insn;
826     rtx i3;
827     rtx pred, succ;
828     rtx *pdest, *psrc;
829{
830  int i;
831  rtx set = 0, src, dest;
832  rtx p;
833#ifdef AUTO_INC_DEC
834  rtx link;
835#endif
836  int all_adjacent = (succ ? (next_active_insn (insn) == succ
837			      && next_active_insn (succ) == i3)
838		      : next_active_insn (insn) == i3);
839
840  /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
841     or a PARALLEL consisting of such a SET and CLOBBERs.
842
843     If INSN has CLOBBER parallel parts, ignore them for our processing.
844     By definition, these happen during the execution of the insn.  When it
845     is merged with another insn, all bets are off.  If they are, in fact,
846     needed and aren't also supplied in I3, they may be added by
847     recog_for_combine.  Otherwise, it won't match.
848
849     We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
850     note.
851
852     Get the source and destination of INSN.  If more than one, can't
853     combine.  */
854
855  if (GET_CODE (PATTERN (insn)) == SET)
856    set = PATTERN (insn);
857  else if (GET_CODE (PATTERN (insn)) == PARALLEL
858	   && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
859    {
860      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
861	{
862	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
863
864	  switch (GET_CODE (elt))
865	    {
866	    /* This is important to combine floating point insns
867	       for the SH4 port.  */
868	    case USE:
869	      /* Combining an isolated USE doesn't make sense.
870		 We depend here on combinable_i3_pat to reject them.  */
871	      /* The code below this loop only verifies that the inputs of
872		 the SET in INSN do not change.  We call reg_set_between_p
873		 to verify that the REG in the USE does not change betweeen
874		 I3 and INSN.
875		 If the USE in INSN was for a pseudo register, the matching
876		 insn pattern will likely match any register; combining this
877		 with any other USE would only be safe if we knew that the
878		 used registers have identical values, or if there was
879		 something to tell them apart, e.g. different modes.  For
880		 now, we forgo such compilcated tests and simply disallow
881		 combining of USES of pseudo registers with any other USE.  */
882	      if (GET_CODE (XEXP (elt, 0)) == REG
883		  && GET_CODE (PATTERN (i3)) == PARALLEL)
884		{
885		  rtx i3pat = PATTERN (i3);
886		  int i = XVECLEN (i3pat, 0) - 1;
887		  int regno = REGNO (XEXP (elt, 0));
888		  do
889		    {
890		      rtx i3elt = XVECEXP (i3pat, 0, i);
891		      if (GET_CODE (i3elt) == USE
892			  && GET_CODE (XEXP (i3elt, 0)) == REG
893			  && (REGNO (XEXP (i3elt, 0)) == regno
894			      ? reg_set_between_p (XEXP (elt, 0),
895						   PREV_INSN (insn), i3)
896			      : regno >= FIRST_PSEUDO_REGISTER))
897			return 0;
898		    }
899		  while (--i >= 0);
900		}
901	      break;
902
903	      /* We can ignore CLOBBERs.  */
904	    case CLOBBER:
905	      break;
906
907	    case SET:
908	      /* Ignore SETs whose result isn't used but not those that
909		 have side-effects.  */
910	      if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
911		  && ! side_effects_p (elt))
912		break;
913
914	      /* If we have already found a SET, this is a second one and
915		 so we cannot combine with this insn.  */
916	      if (set)
917		return 0;
918
919	      set = elt;
920	      break;
921
922	    default:
923	      /* Anything else means we can't combine.  */
924	      return 0;
925	    }
926	}
927
928      if (set == 0
929	  /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
930	     so don't do anything with it.  */
931	  || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
932	return 0;
933    }
934  else
935    return 0;
936
937  if (set == 0)
938    return 0;
939
940  set = expand_field_assignment (set);
941  src = SET_SRC (set), dest = SET_DEST (set);
942
943  /* Don't eliminate a store in the stack pointer.  */
944  if (dest == stack_pointer_rtx
945      /* If we couldn't eliminate a field assignment, we can't combine.  */
946      || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
947      /* Don't combine with an insn that sets a register to itself if it has
948	 a REG_EQUAL note.  This may be part of a REG_NO_CONFLICT sequence.  */
949      || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
950      /* Can't merge a function call.  */
951      || GET_CODE (src) == CALL
952      /* Don't eliminate a function call argument.  */
953      || (GET_CODE (i3) == CALL_INSN
954	  && (find_reg_fusage (i3, USE, dest)
955	      || (GET_CODE (dest) == REG
956		  && REGNO (dest) < FIRST_PSEUDO_REGISTER
957		  && global_regs[REGNO (dest)])))
958      /* Don't substitute into an incremented register.  */
959      || FIND_REG_INC_NOTE (i3, dest)
960      || (succ && FIND_REG_INC_NOTE (succ, dest))
961      /* Don't combine the end of a libcall into anything.  */
962      || find_reg_note (insn, REG_RETVAL, NULL_RTX)
963      /* Make sure that DEST is not used after SUCC but before I3.  */
964      || (succ && ! all_adjacent
965	  && reg_used_between_p (dest, succ, i3))
966      /* Make sure that the value that is to be substituted for the register
967	 does not use any registers whose values alter in between.  However,
968	 If the insns are adjacent, a use can't cross a set even though we
969	 think it might (this can happen for a sequence of insns each setting
970	 the same destination; reg_last_set of that register might point to
971	 a NOTE).  If INSN has a REG_EQUIV note, the register is always
972	 equivalent to the memory so the substitution is valid even if there
973	 are intervening stores.  Also, don't move a volatile asm or
974	 UNSPEC_VOLATILE across any other insns.  */
975      || (! all_adjacent
976	  && (((GET_CODE (src) != MEM
977		|| ! find_reg_note (insn, REG_EQUIV, src))
978	       && use_crosses_set_p (src, INSN_CUID (insn)))
979	      || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
980	      || GET_CODE (src) == UNSPEC_VOLATILE))
981      /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
982	 better register allocation by not doing the combine.  */
983      || find_reg_note (i3, REG_NO_CONFLICT, dest)
984      || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
985      /* Don't combine across a CALL_INSN, because that would possibly
986	 change whether the life span of some REGs crosses calls or not,
987	 and it is a pain to update that information.
988	 Exception: if source is a constant, moving it later can't hurt.
989	 Accept that special case, because it helps -fforce-addr a lot.  */
990      || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
991    return 0;
992
993  /* DEST must either be a REG or CC0.  */
994  if (GET_CODE (dest) == REG)
995    {
996      /* If register alignment is being enforced for multi-word items in all
997	 cases except for parameters, it is possible to have a register copy
998	 insn referencing a hard register that is not allowed to contain the
999	 mode being copied and which would not be valid as an operand of most
1000	 insns.  Eliminate this problem by not combining with such an insn.
1001
1002	 Also, on some machines we don't want to extend the life of a hard
1003	 register.
1004
1005	 This is the same test done in can_combine except that we don't test
1006	 if SRC is a CALL operation to permit a hard register with
1007	 SMALL_REGISTER_CLASSES, and that we have to take all_adjacent
1008	 into account.  */
1009
1010      if (GET_CODE (src) == REG
1011	  && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1012	       && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1013	      /* Don't extend the life of a hard register unless it is
1014		 user variable (if we have few registers) or it can't
1015		 fit into the desired register (meaning something special
1016		 is going on).
1017		 Also avoid substituting a return register into I3, because
1018		 reload can't handle a conflict with constraints of other
1019		 inputs.  */
1020	      || (REGNO (src) < FIRST_PSEUDO_REGISTER
1021		  && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
1022		      || (SMALL_REGISTER_CLASSES
1023			  && ((! all_adjacent && ! REG_USERVAR_P (src))
1024			      || (FUNCTION_VALUE_REGNO_P (REGNO (src))
1025				  && ! REG_USERVAR_P (src))))))))
1026	return 0;
1027    }
1028  else if (GET_CODE (dest) != CC0)
1029    return 0;
1030
1031  /* Don't substitute for a register intended as a clobberable operand.
1032     Similarly, don't substitute an expression containing a register that
1033     will be clobbered in I3.  */
1034  if (GET_CODE (PATTERN (i3)) == PARALLEL)
1035    for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1036      if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
1037	  && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1038				       src)
1039	      || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
1040	return 0;
1041
1042  /* If INSN contains anything volatile, or is an `asm' (whether volatile
1043     or not), reject, unless nothing volatile comes between it and I3 */
1044
1045  if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1046    {
1047      /* Make sure succ doesn't contain a volatile reference.  */
1048      if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1049        return 0;
1050
1051      for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1052        if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1053  	  && p != succ && volatile_refs_p (PATTERN (p)))
1054  	return 0;
1055    }
1056
1057  /* If INSN is an asm, and DEST is a hard register, reject, since it has
1058     to be an explicit register variable, and was chosen for a reason.  */
1059
1060  if (GET_CODE (src) == ASM_OPERANDS
1061      && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1062    return 0;
1063
1064  /* If there are any volatile insns between INSN and I3, reject, because
1065     they might affect machine state.  */
1066
1067  for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1068    if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1069	&& p != succ && volatile_insn_p (PATTERN (p)))
1070      return 0;
1071
1072  /* If INSN or I2 contains an autoincrement or autodecrement,
1073     make sure that register is not used between there and I3,
1074     and not already used in I3 either.
1075     Also insist that I3 not be a jump; if it were one
1076     and the incremented register were spilled, we would lose.  */
1077
1078#ifdef AUTO_INC_DEC
1079  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1080    if (REG_NOTE_KIND (link) == REG_INC
1081	&& (GET_CODE (i3) == JUMP_INSN
1082	    || reg_used_between_p (XEXP (link, 0), insn, i3)
1083	    || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1084      return 0;
1085#endif
1086
1087#ifdef HAVE_cc0
1088  /* Don't combine an insn that follows a CC0-setting insn.
1089     An insn that uses CC0 must not be separated from the one that sets it.
1090     We do, however, allow I2 to follow a CC0-setting insn if that insn
1091     is passed as I1; in that case it will be deleted also.
1092     We also allow combining in this case if all the insns are adjacent
1093     because that would leave the two CC0 insns adjacent as well.
1094     It would be more logical to test whether CC0 occurs inside I1 or I2,
1095     but that would be much slower, and this ought to be equivalent.  */
1096
1097  p = prev_nonnote_insn (insn);
1098  if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1099      && ! all_adjacent)
1100    return 0;
1101#endif
1102
1103  /* If we get here, we have passed all the tests and the combination is
1104     to be allowed.  */
1105
1106  *pdest = dest;
1107  *psrc = src;
1108
1109  return 1;
1110}
1111
1112/* Check if PAT is an insn - or a part of it - used to set up an
1113   argument for a function in a hard register.  */
1114
1115static int
1116sets_function_arg_p (pat)
1117     rtx pat;
1118{
1119  int i;
1120  rtx inner_dest;
1121
1122  switch (GET_CODE (pat))
1123    {
1124    case INSN:
1125      return sets_function_arg_p (PATTERN (pat));
1126
1127    case PARALLEL:
1128      for (i = XVECLEN (pat, 0); --i >= 0;)
1129	if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1130	  return 1;
1131
1132      break;
1133
1134    case SET:
1135      inner_dest = SET_DEST (pat);
1136      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1137	     || GET_CODE (inner_dest) == SUBREG
1138	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
1139	inner_dest = XEXP (inner_dest, 0);
1140
1141      return (GET_CODE (inner_dest) == REG
1142	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1143	      && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1144
1145    default:
1146      break;
1147    }
1148
1149  return 0;
1150}
1151
1152/* LOC is the location within I3 that contains its pattern or the component
1153   of a PARALLEL of the pattern.  We validate that it is valid for combining.
1154
1155   One problem is if I3 modifies its output, as opposed to replacing it
1156   entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1157   so would produce an insn that is not equivalent to the original insns.
1158
1159   Consider:
1160
1161         (set (reg:DI 101) (reg:DI 100))
1162	 (set (subreg:SI (reg:DI 101) 0) <foo>)
1163
1164   This is NOT equivalent to:
1165
1166         (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1167	 	    (set (reg:DI 101) (reg:DI 100))])
1168
1169   Not only does this modify 100 (in which case it might still be valid
1170   if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1171
1172   We can also run into a problem if I2 sets a register that I1
1173   uses and I1 gets directly substituted into I3 (not via I2).  In that
1174   case, we would be getting the wrong value of I2DEST into I3, so we
1175   must reject the combination.  This case occurs when I2 and I1 both
1176   feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1177   If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1178   of a SET must prevent combination from occurring.
1179
1180   On machines where SMALL_REGISTER_CLASSES is non-zero, we don't combine
1181   if the destination of a SET is a hard register that isn't a user
1182   variable.
1183
1184   Before doing the above check, we first try to expand a field assignment
1185   into a set of logical operations.
1186
1187   If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1188   we place a register that is both set and used within I3.  If more than one
1189   such register is detected, we fail.
1190
1191   Return 1 if the combination is valid, zero otherwise.  */
1192
1193static int
1194combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1195     rtx i3;
1196     rtx *loc;
1197     rtx i2dest;
1198     rtx i1dest;
1199     int i1_not_in_src;
1200     rtx *pi3dest_killed;
1201{
1202  rtx x = *loc;
1203
1204  if (GET_CODE (x) == SET)
1205    {
1206      rtx set = expand_field_assignment (x);
1207      rtx dest = SET_DEST (set);
1208      rtx src = SET_SRC (set);
1209      rtx inner_dest = dest;
1210
1211#if 0
1212      rtx inner_src = src;
1213#endif
1214
1215      SUBST (*loc, set);
1216
1217      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1218	     || GET_CODE (inner_dest) == SUBREG
1219	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
1220	inner_dest = XEXP (inner_dest, 0);
1221
1222  /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1223     was added.  */
1224#if 0
1225      while (GET_CODE (inner_src) == STRICT_LOW_PART
1226	     || GET_CODE (inner_src) == SUBREG
1227	     || GET_CODE (inner_src) == ZERO_EXTRACT)
1228	inner_src = XEXP (inner_src, 0);
1229
1230      /* If it is better that two different modes keep two different pseudos,
1231	 avoid combining them.  This avoids producing the following pattern
1232	 on a 386:
1233	  (set (subreg:SI (reg/v:QI 21) 0)
1234	       (lshiftrt:SI (reg/v:SI 20)
1235	           (const_int 24)))
1236	 If that were made, reload could not handle the pair of
1237	 reg 20/21, since it would try to get any GENERAL_REGS
1238	 but some of them don't handle QImode.  */
1239
1240      if (rtx_equal_p (inner_src, i2dest)
1241	  && GET_CODE (inner_dest) == REG
1242	  && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1243	return 0;
1244#endif
1245
1246      /* Check for the case where I3 modifies its output, as
1247	 discussed above.  */
1248      if ((inner_dest != dest
1249	   && (reg_overlap_mentioned_p (i2dest, inner_dest)
1250	       || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1251
1252	  /* This is the same test done in can_combine_p except that we
1253	     allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1254	     CALL operation. Moreover, we can't test all_adjacent; we don't
1255	     have to, since this instruction will stay in place, thus we are
1256	     not considering increasing the lifetime of INNER_DEST.
1257
1258	     Also, if this insn sets a function argument, combining it with
1259	     something that might need a spill could clobber a previous
1260	     function argument; the all_adjacent test in can_combine_p also
1261	     checks this; here, we do a more specific test for this case.  */
1262
1263	  || (GET_CODE (inner_dest) == REG
1264	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1265	      && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1266					GET_MODE (inner_dest))
1267		 || (SMALL_REGISTER_CLASSES && GET_CODE (src) != CALL
1268		     && ! REG_USERVAR_P (inner_dest)
1269		     && (FUNCTION_VALUE_REGNO_P (REGNO (inner_dest))
1270			 || (FUNCTION_ARG_REGNO_P (REGNO (inner_dest))
1271			     && i3 != 0
1272			     && sets_function_arg_p (prev_nonnote_insn (i3)))))))
1273	  || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1274	return 0;
1275
1276      /* If DEST is used in I3, it is being killed in this insn,
1277	 so record that for later.
1278	 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1279	 STACK_POINTER_REGNUM, since these are always considered to be
1280	 live.  Similarly for ARG_POINTER_REGNUM if it is fixed.  */
1281      if (pi3dest_killed && GET_CODE (dest) == REG
1282	  && reg_referenced_p (dest, PATTERN (i3))
1283	  && REGNO (dest) != FRAME_POINTER_REGNUM
1284#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1285	  && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1286#endif
1287#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1288	  && (REGNO (dest) != ARG_POINTER_REGNUM
1289	      || ! fixed_regs [REGNO (dest)])
1290#endif
1291	  && REGNO (dest) != STACK_POINTER_REGNUM)
1292	{
1293	  if (*pi3dest_killed)
1294	    return 0;
1295
1296	  *pi3dest_killed = dest;
1297	}
1298    }
1299
1300  else if (GET_CODE (x) == PARALLEL)
1301    {
1302      int i;
1303
1304      for (i = 0; i < XVECLEN (x, 0); i++)
1305	if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1306				i1_not_in_src, pi3dest_killed))
1307	  return 0;
1308    }
1309
1310  return 1;
1311}
1312
1313/* Try to combine the insns I1 and I2 into I3.
1314   Here I1 and I2 appear earlier than I3.
1315   I1 can be zero; then we combine just I2 into I3.
1316
1317   It we are combining three insns and the resulting insn is not recognized,
1318   try splitting it into two insns.  If that happens, I2 and I3 are retained
1319   and I1 is pseudo-deleted by turning it into a NOTE.  Otherwise, I1 and I2
1320   are pseudo-deleted.
1321
1322   Return 0 if the combination does not work.  Then nothing is changed.
1323   If we did the combination, return the insn at which combine should
1324   resume scanning.  */
1325
1326static rtx
1327try_combine (i3, i2, i1)
1328     register rtx i3, i2, i1;
1329{
1330  /* New patterns for I3 and I3, respectively.  */
1331  rtx newpat, newi2pat = 0;
1332  /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead.  */
1333  int added_sets_1, added_sets_2;
1334  /* Total number of SETs to put into I3.  */
1335  int total_sets;
1336  /* Nonzero is I2's body now appears in I3.  */
1337  int i2_is_used;
1338  /* INSN_CODEs for new I3, new I2, and user of condition code.  */
1339  int insn_code_number, i2_code_number, other_code_number;
1340  /* Contains I3 if the destination of I3 is used in its source, which means
1341     that the old life of I3 is being killed.  If that usage is placed into
1342     I2 and not in I3, a REG_DEAD note must be made.  */
1343  rtx i3dest_killed = 0;
1344  /* SET_DEST and SET_SRC of I2 and I1.  */
1345  rtx i2dest, i2src, i1dest = 0, i1src = 0;
1346  /* PATTERN (I2), or a copy of it in certain cases.  */
1347  rtx i2pat;
1348  /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC.  */
1349  int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1350  int i1_feeds_i3 = 0;
1351  /* Notes that must be added to REG_NOTES in I3 and I2.  */
1352  rtx new_i3_notes, new_i2_notes;
1353  /* Notes that we substituted I3 into I2 instead of the normal case.  */
1354  int i3_subst_into_i2 = 0;
1355  /* Notes that I1, I2 or I3 is a MULT operation.  */
1356  int have_mult = 0;
1357  /* Number of clobbers of SCRATCH we had to add.  */
1358  int i3_scratches = 0, i2_scratches = 0, other_scratches = 0;
1359
1360  int maxreg;
1361  rtx temp;
1362  register rtx link;
1363  int i;
1364
1365  /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1366     This can occur when flow deletes an insn that it has merged into an
1367     auto-increment address.  We also can't do anything if I3 has a
1368     REG_LIBCALL note since we don't want to disrupt the contiguity of a
1369     libcall.  */
1370
1371  if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1372      || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1373      || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1374      || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1375    return 0;
1376
1377  combine_attempts++;
1378
1379  undobuf.undos = undobuf.previous_undos = 0;
1380  undobuf.other_insn = 0;
1381
1382  /* Save the current high-water-mark so we can free storage if we didn't
1383     accept this combination.  */
1384  undobuf.storage = (char *) oballoc (0);
1385
1386  /* Reset the hard register usage information.  */
1387  CLEAR_HARD_REG_SET (newpat_used_regs);
1388
1389  /* If I1 and I2 both feed I3, they can be in any order.  To simplify the
1390     code below, set I1 to be the earlier of the two insns.  */
1391  if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1392    temp = i1, i1 = i2, i2 = temp;
1393
1394  added_links_insn = 0;
1395
1396  /* First check for one important special-case that the code below will
1397     not handle.  Namely, the case where I1 is zero, I2 has multiple sets,
1398     and I3 is a SET whose SET_SRC is a SET_DEST in I2.  In that case,
1399     we may be able to replace that destination with the destination of I3.
1400     This occurs in the common code where we compute both a quotient and
1401     remainder into a structure, in which case we want to do the computation
1402     directly into the structure to avoid register-register copies.
1403
1404     We make very conservative checks below and only try to handle the
1405     most common cases of this.  For example, we only handle the case
1406     where I2 and I3 are adjacent to avoid making difficult register
1407     usage tests.  */
1408
1409  if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1410      && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1411      && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1412      && (! SMALL_REGISTER_CLASSES
1413	  || (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1414	      || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1415	      || REG_USERVAR_P (SET_DEST (PATTERN (i3)))))
1416      && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1417      && GET_CODE (PATTERN (i2)) == PARALLEL
1418      && ! side_effects_p (SET_DEST (PATTERN (i3)))
1419      /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1420	 below would need to check what is inside (and reg_overlap_mentioned_p
1421	 doesn't support those codes anyway).  Don't allow those destinations;
1422	 the resulting insn isn't likely to be recognized anyway.  */
1423      && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1424      && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1425      && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1426				    SET_DEST (PATTERN (i3)))
1427      && next_real_insn (i2) == i3)
1428    {
1429      rtx p2 = PATTERN (i2);
1430
1431      /* Make sure that the destination of I3,
1432	 which we are going to substitute into one output of I2,
1433	 is not used within another output of I2.  We must avoid making this:
1434	 (parallel [(set (mem (reg 69)) ...)
1435		    (set (reg 69) ...)])
1436	 which is not well-defined as to order of actions.
1437	 (Besides, reload can't handle output reloads for this.)
1438
1439	 The problem can also happen if the dest of I3 is a memory ref,
1440	 if another dest in I2 is an indirect memory ref.  */
1441      for (i = 0; i < XVECLEN (p2, 0); i++)
1442	if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1443	     || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1444	    && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1445					SET_DEST (XVECEXP (p2, 0, i))))
1446	  break;
1447
1448      if (i == XVECLEN (p2, 0))
1449	for (i = 0; i < XVECLEN (p2, 0); i++)
1450	  if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1451	    {
1452	      combine_merges++;
1453
1454	      subst_insn = i3;
1455	      subst_low_cuid = INSN_CUID (i2);
1456
1457	      added_sets_2 = added_sets_1 = 0;
1458	      i2dest = SET_SRC (PATTERN (i3));
1459
1460	      /* Replace the dest in I2 with our dest and make the resulting
1461		 insn the new pattern for I3.  Then skip to where we
1462		 validate the pattern.  Everything was set up above.  */
1463	      SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1464		     SET_DEST (PATTERN (i3)));
1465
1466	      newpat = p2;
1467	      i3_subst_into_i2 = 1;
1468	      goto validate_replacement;
1469	    }
1470    }
1471
1472#ifndef HAVE_cc0
1473  /* If we have no I1 and I2 looks like:
1474	(parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1475		   (set Y OP)])
1476     make up a dummy I1 that is
1477	(set Y OP)
1478     and change I2 to be
1479        (set (reg:CC X) (compare:CC Y (const_int 0)))
1480
1481     (We can ignore any trailing CLOBBERs.)
1482
1483     This undoes a previous combination and allows us to match a branch-and-
1484     decrement insn.  */
1485
1486  if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1487      && XVECLEN (PATTERN (i2), 0) >= 2
1488      && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1489      && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1490	  == MODE_CC)
1491      && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1492      && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1493      && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1494      && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1495      && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1496		      SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1497    {
1498      for (i =  XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1499	if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1500	  break;
1501
1502      if (i == 1)
1503	{
1504	  /* We make I1 with the same INSN_UID as I2.  This gives it
1505	     the same INSN_CUID for value tracking.  Our fake I1 will
1506	     never appear in the insn stream so giving it the same INSN_UID
1507	     as I2 will not cause a problem.  */
1508
1509	  subst_prev_insn = i1
1510	    = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1511			    XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1512			    NULL_RTX);
1513
1514	  SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1515	  SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1516		 SET_DEST (PATTERN (i1)));
1517	}
1518    }
1519#endif
1520
1521  /* Verify that I2 and I1 are valid for combining.  */
1522  if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1523      || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1524    {
1525      undo_all ();
1526      return 0;
1527    }
1528
1529  /* Record whether I2DEST is used in I2SRC and similarly for the other
1530     cases.  Knowing this will help in register status updating below.  */
1531  i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1532  i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1533  i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1534
1535  /* See if I1 directly feeds into I3.  It does if I1DEST is not used
1536     in I2SRC.  */
1537  i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1538
1539  /* Ensure that I3's pattern can be the destination of combines.  */
1540  if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1541			  i1 && i2dest_in_i1src && i1_feeds_i3,
1542			  &i3dest_killed))
1543    {
1544      undo_all ();
1545      return 0;
1546    }
1547
1548  /* See if any of the insns is a MULT operation.  Unless one is, we will
1549     reject a combination that is, since it must be slower.  Be conservative
1550     here.  */
1551  if (GET_CODE (i2src) == MULT
1552      || (i1 != 0 && GET_CODE (i1src) == MULT)
1553      || (GET_CODE (PATTERN (i3)) == SET
1554	  && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1555    have_mult = 1;
1556
1557  /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1558     We used to do this EXCEPT in one case: I3 has a post-inc in an
1559     output operand.  However, that exception can give rise to insns like
1560     	mov r3,(r3)+
1561     which is a famous insn on the PDP-11 where the value of r3 used as the
1562     source was model-dependent.  Avoid this sort of thing.  */
1563
1564#if 0
1565  if (!(GET_CODE (PATTERN (i3)) == SET
1566	&& GET_CODE (SET_SRC (PATTERN (i3))) == REG
1567	&& GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1568	&& (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1569	    || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1570    /* It's not the exception.  */
1571#endif
1572#ifdef AUTO_INC_DEC
1573    for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1574      if (REG_NOTE_KIND (link) == REG_INC
1575	  && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1576	      || (i1 != 0
1577		  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1578	{
1579	  undo_all ();
1580	  return 0;
1581	}
1582#endif
1583
1584  /* See if the SETs in I1 or I2 need to be kept around in the merged
1585     instruction: whenever the value set there is still needed past I3.
1586     For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1587
1588     For the SET in I1, we have two cases:  If I1 and I2 independently
1589     feed into I3, the set in I1 needs to be kept around if I1DEST dies
1590     or is set in I3.  Otherwise (if I1 feeds I2 which feeds I3), the set
1591     in I1 needs to be kept around unless I1DEST dies or is set in either
1592     I2 or I3.  We can distinguish these cases by seeing if I2SRC mentions
1593     I1DEST.  If so, we know I1 feeds into I2.  */
1594
1595  added_sets_2 = ! dead_or_set_p (i3, i2dest);
1596
1597  added_sets_1
1598    = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1599	       : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1600
1601  /* If the set in I2 needs to be kept around, we must make a copy of
1602     PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1603     PATTERN (I2), we are only substituting for the original I1DEST, not into
1604     an already-substituted copy.  This also prevents making self-referential
1605     rtx.  If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1606     I2DEST.  */
1607
1608  i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1609	   ? gen_rtx_SET (VOIDmode, i2dest, i2src)
1610	   : PATTERN (i2));
1611
1612  if (added_sets_2)
1613    i2pat = copy_rtx (i2pat);
1614
1615  combine_merges++;
1616
1617  /* Substitute in the latest insn for the regs set by the earlier ones.  */
1618
1619  maxreg = max_reg_num ();
1620
1621  subst_insn = i3;
1622
1623  /* It is possible that the source of I2 or I1 may be performing an
1624     unneeded operation, such as a ZERO_EXTEND of something that is known
1625     to have the high part zero.  Handle that case by letting subst look at
1626     the innermost one of them.
1627
1628     Another way to do this would be to have a function that tries to
1629     simplify a single insn instead of merging two or more insns.  We don't
1630     do this because of the potential of infinite loops and because
1631     of the potential extra memory required.  However, doing it the way
1632     we are is a bit of a kludge and doesn't catch all cases.
1633
1634     But only do this if -fexpensive-optimizations since it slows things down
1635     and doesn't usually win.  */
1636
1637  if (flag_expensive_optimizations)
1638    {
1639      /* Pass pc_rtx so no substitutions are done, just simplifications.
1640	 The cases that we are interested in here do not involve the few
1641	 cases were is_replaced is checked.  */
1642      if (i1)
1643	{
1644	  subst_low_cuid = INSN_CUID (i1);
1645	  i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1646	}
1647      else
1648	{
1649	  subst_low_cuid = INSN_CUID (i2);
1650	  i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1651	}
1652
1653      undobuf.previous_undos = undobuf.undos;
1654    }
1655
1656#ifndef HAVE_cc0
1657  /* Many machines that don't use CC0 have insns that can both perform an
1658     arithmetic operation and set the condition code.  These operations will
1659     be represented as a PARALLEL with the first element of the vector
1660     being a COMPARE of an arithmetic operation with the constant zero.
1661     The second element of the vector will set some pseudo to the result
1662     of the same arithmetic operation.  If we simplify the COMPARE, we won't
1663     match such a pattern and so will generate an extra insn.   Here we test
1664     for this case, where both the comparison and the operation result are
1665     needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1666     I2SRC.  Later we will make the PARALLEL that contains I2.  */
1667
1668  if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1669      && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1670      && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1671      && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1672    {
1673#ifdef EXTRA_CC_MODES
1674      rtx *cc_use;
1675      enum machine_mode compare_mode;
1676#endif
1677
1678      newpat = PATTERN (i3);
1679      SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1680
1681      i2_is_used = 1;
1682
1683#ifdef EXTRA_CC_MODES
1684      /* See if a COMPARE with the operand we substituted in should be done
1685	 with the mode that is currently being used.  If not, do the same
1686	 processing we do in `subst' for a SET; namely, if the destination
1687	 is used only once, try to replace it with a register of the proper
1688	 mode and also replace the COMPARE.  */
1689      if (undobuf.other_insn == 0
1690	  && (cc_use = find_single_use (SET_DEST (newpat), i3,
1691					&undobuf.other_insn))
1692	  && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1693					      i2src, const0_rtx))
1694	      != GET_MODE (SET_DEST (newpat))))
1695	{
1696	  int regno = REGNO (SET_DEST (newpat));
1697	  rtx new_dest = gen_rtx_REG (compare_mode, regno);
1698
1699	  if (regno < FIRST_PSEUDO_REGISTER
1700	      || (REG_N_SETS (regno) == 1 && ! added_sets_2
1701		  && ! REG_USERVAR_P (SET_DEST (newpat))))
1702	    {
1703	      if (regno >= FIRST_PSEUDO_REGISTER)
1704		SUBST (regno_reg_rtx[regno], new_dest);
1705
1706	      SUBST (SET_DEST (newpat), new_dest);
1707	      SUBST (XEXP (*cc_use, 0), new_dest);
1708	      SUBST (SET_SRC (newpat),
1709		     gen_rtx_combine (COMPARE, compare_mode,
1710				      i2src, const0_rtx));
1711	    }
1712	  else
1713	    undobuf.other_insn = 0;
1714	}
1715#endif
1716    }
1717  else
1718#endif
1719    {
1720      n_occurrences = 0;		/* `subst' counts here */
1721
1722      /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1723	 need to make a unique copy of I2SRC each time we substitute it
1724	 to avoid self-referential rtl.  */
1725
1726      subst_low_cuid = INSN_CUID (i2);
1727      newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1728		      ! i1_feeds_i3 && i1dest_in_i1src);
1729      undobuf.previous_undos = undobuf.undos;
1730
1731      /* Record whether i2's body now appears within i3's body.  */
1732      i2_is_used = n_occurrences;
1733    }
1734
1735  /* If we already got a failure, don't try to do more.  Otherwise,
1736     try to substitute in I1 if we have it.  */
1737
1738  if (i1 && GET_CODE (newpat) != CLOBBER)
1739    {
1740      /* Before we can do this substitution, we must redo the test done
1741	 above (see detailed comments there) that ensures  that I1DEST
1742	 isn't mentioned in any SETs in NEWPAT that are field assignments.  */
1743
1744      if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1745			      0, NULL_PTR))
1746	{
1747	  undo_all ();
1748	  return 0;
1749	}
1750
1751      n_occurrences = 0;
1752      subst_low_cuid = INSN_CUID (i1);
1753      newpat = subst (newpat, i1dest, i1src, 0, 0);
1754      undobuf.previous_undos = undobuf.undos;
1755    }
1756
1757  /* Fail if an autoincrement side-effect has been duplicated.  Be careful
1758     to count all the ways that I2SRC and I1SRC can be used.  */
1759  if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1760       && i2_is_used + added_sets_2 > 1)
1761      || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1762	  && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1763	      > 1))
1764      /* Fail if we tried to make a new register (we used to abort, but there's
1765	 really no reason to).  */
1766      || max_reg_num () != maxreg
1767      /* Fail if we couldn't do something and have a CLOBBER.  */
1768      || GET_CODE (newpat) == CLOBBER
1769      /* Fail if this new pattern is a MULT and we didn't have one before
1770	 at the outer level.  */
1771      || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1772	  && ! have_mult))
1773    {
1774      undo_all ();
1775      return 0;
1776    }
1777
1778  /* If the actions of the earlier insns must be kept
1779     in addition to substituting them into the latest one,
1780     we must make a new PARALLEL for the latest insn
1781     to hold additional the SETs.  */
1782
1783  if (added_sets_1 || added_sets_2)
1784    {
1785      combine_extras++;
1786
1787      if (GET_CODE (newpat) == PARALLEL)
1788	{
1789	  rtvec old = XVEC (newpat, 0);
1790	  total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1791	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
1792	  bcopy ((char *) &old->elem[0], (char *) XVEC (newpat, 0)->elem,
1793		 sizeof (old->elem[0]) * old->num_elem);
1794	}
1795      else
1796	{
1797	  rtx old = newpat;
1798	  total_sets = 1 + added_sets_1 + added_sets_2;
1799	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
1800	  XVECEXP (newpat, 0, 0) = old;
1801	}
1802
1803     if (added_sets_1)
1804       XVECEXP (newpat, 0, --total_sets)
1805	 = (GET_CODE (PATTERN (i1)) == PARALLEL
1806	    ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
1807
1808     if (added_sets_2)
1809	{
1810	  /* If there is no I1, use I2's body as is.  We used to also not do
1811	     the subst call below if I2 was substituted into I3,
1812	     but that could lose a simplification.  */
1813	  if (i1 == 0)
1814	    XVECEXP (newpat, 0, --total_sets) = i2pat;
1815	  else
1816	    /* See comment where i2pat is assigned.  */
1817	    XVECEXP (newpat, 0, --total_sets)
1818	      = subst (i2pat, i1dest, i1src, 0, 0);
1819	}
1820    }
1821
1822  /* We come here when we are replacing a destination in I2 with the
1823     destination of I3.  */
1824 validate_replacement:
1825
1826  /* Note which hard regs this insn has as inputs.  */
1827  mark_used_regs_combine (newpat);
1828
1829  /* Is the result of combination a valid instruction?  */
1830  insn_code_number
1831    = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1832
1833  /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1834     the second SET's destination is a register that is unused.  In that case,
1835     we just need the first SET.   This can occur when simplifying a divmod
1836     insn.  We *must* test for this case here because the code below that
1837     splits two independent SETs doesn't handle this case correctly when it
1838     updates the register status.  Also check the case where the first
1839     SET's destination is unused.  That would not cause incorrect code, but
1840     does cause an unneeded insn to remain.  */
1841
1842  if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1843      && XVECLEN (newpat, 0) == 2
1844      && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1845      && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1846      && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1847      && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1848      && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1849      && asm_noperands (newpat) < 0)
1850    {
1851      newpat = XVECEXP (newpat, 0, 0);
1852      insn_code_number
1853	= recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1854    }
1855
1856  else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1857	   && XVECLEN (newpat, 0) == 2
1858	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1859	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1860	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1861	   && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1862	   && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1863	   && asm_noperands (newpat) < 0)
1864    {
1865      newpat = XVECEXP (newpat, 0, 1);
1866      insn_code_number
1867	= recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1868    }
1869
1870  /* If we were combining three insns and the result is a simple SET
1871     with no ASM_OPERANDS that wasn't recognized, try to split it into two
1872     insns.  There are two ways to do this.  It can be split using a
1873     machine-specific method (like when you have an addition of a large
1874     constant) or by combine in the function find_split_point.  */
1875
1876  if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1877      && asm_noperands (newpat) < 0)
1878    {
1879      rtx m_split, *split;
1880      rtx ni2dest = i2dest;
1881
1882      /* See if the MD file can split NEWPAT.  If it can't, see if letting it
1883	 use I2DEST as a scratch register will help.  In the latter case,
1884	 convert I2DEST to the mode of the source of NEWPAT if we can.  */
1885
1886      m_split = split_insns (newpat, i3);
1887
1888      /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1889	 inputs of NEWPAT.  */
1890
1891      /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1892	 possible to try that as a scratch reg.  This would require adding
1893	 more code to make it work though.  */
1894
1895      if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
1896	{
1897	  /* If I2DEST is a hard register or the only use of a pseudo,
1898	     we can change its mode.  */
1899	  if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1900	      && GET_MODE (SET_DEST (newpat)) != VOIDmode
1901	      && GET_CODE (i2dest) == REG
1902	      && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1903		  || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
1904		      && ! REG_USERVAR_P (i2dest))))
1905	    ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
1906			       REGNO (i2dest));
1907
1908	  m_split = split_insns
1909	    (gen_rtx_PARALLEL (VOIDmode,
1910			       gen_rtvec (2, newpat,
1911					  gen_rtx_CLOBBER (VOIDmode,
1912							   ni2dest))),
1913	     i3);
1914	}
1915
1916      if (m_split && GET_CODE (m_split) == SEQUENCE
1917	  && XVECLEN (m_split, 0) == 2
1918	  && (next_real_insn (i2) == i3
1919	      || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1920				      INSN_CUID (i2))))
1921	{
1922	  rtx i2set, i3set;
1923	  rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
1924	  newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1925
1926	  i3set = single_set (XVECEXP (m_split, 0, 1));
1927	  i2set = single_set (XVECEXP (m_split, 0, 0));
1928
1929	  /* In case we changed the mode of I2DEST, replace it in the
1930	     pseudo-register table here.  We can't do it above in case this
1931	     code doesn't get executed and we do a split the other way.  */
1932
1933	  if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1934	    SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1935
1936	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes,
1937					      &i2_scratches);
1938
1939	  /* If I2 or I3 has multiple SETs, we won't know how to track
1940	     register status, so don't use these insns.  If I2's destination
1941	     is used between I2 and I3, we also can't use these insns.  */
1942
1943	  if (i2_code_number >= 0 && i2set && i3set
1944	      && (next_real_insn (i2) == i3
1945		  || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
1946	    insn_code_number = recog_for_combine (&newi3pat, i3, &new_i3_notes,
1947						  &i3_scratches);
1948	  if (insn_code_number >= 0)
1949	    newpat = newi3pat;
1950
1951	  /* It is possible that both insns now set the destination of I3.
1952	     If so, we must show an extra use of it.  */
1953
1954	  if (insn_code_number >= 0)
1955	    {
1956	      rtx new_i3_dest = SET_DEST (i3set);
1957	      rtx new_i2_dest = SET_DEST (i2set);
1958
1959	      while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
1960		     || GET_CODE (new_i3_dest) == STRICT_LOW_PART
1961		     || GET_CODE (new_i3_dest) == SUBREG)
1962		new_i3_dest = XEXP (new_i3_dest, 0);
1963
1964	      while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
1965		     || GET_CODE (new_i2_dest) == STRICT_LOW_PART
1966		     || GET_CODE (new_i2_dest) == SUBREG)
1967		new_i2_dest = XEXP (new_i2_dest, 0);
1968
1969	      if (GET_CODE (new_i3_dest) == REG
1970		  && GET_CODE (new_i2_dest) == REG
1971		  && REGNO (new_i3_dest) == REGNO (new_i2_dest))
1972		REG_N_SETS (REGNO (new_i2_dest))++;
1973	    }
1974	}
1975
1976      /* If we can split it and use I2DEST, go ahead and see if that
1977	 helps things be recognized.  Verify that none of the registers
1978	 are set between I2 and I3.  */
1979      if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
1980#ifdef HAVE_cc0
1981	  && GET_CODE (i2dest) == REG
1982#endif
1983	  /* We need I2DEST in the proper mode.  If it is a hard register
1984	     or the only use of a pseudo, we can change its mode.  */
1985	  && (GET_MODE (*split) == GET_MODE (i2dest)
1986	      || GET_MODE (*split) == VOIDmode
1987	      || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1988	      || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
1989		  && ! REG_USERVAR_P (i2dest)))
1990	  && (next_real_insn (i2) == i3
1991	      || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1992	  /* We can't overwrite I2DEST if its value is still used by
1993	     NEWPAT.  */
1994	  && ! reg_referenced_p (i2dest, newpat))
1995	{
1996	  rtx newdest = i2dest;
1997	  enum rtx_code split_code = GET_CODE (*split);
1998	  enum machine_mode split_mode = GET_MODE (*split);
1999
2000	  /* Get NEWDEST as a register in the proper mode.  We have already
2001	     validated that we can do this.  */
2002	  if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2003	    {
2004	      newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
2005
2006	      if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2007		SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2008	    }
2009
2010	  /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2011	     an ASHIFT.  This can occur if it was inside a PLUS and hence
2012	     appeared to be a memory address.  This is a kludge.  */
2013	  if (split_code == MULT
2014	      && GET_CODE (XEXP (*split, 1)) == CONST_INT
2015	      && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2016	    {
2017	      SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
2018					      XEXP (*split, 0), GEN_INT (i)));
2019	      /* Update split_code because we may not have a multiply
2020		 anymore.  */
2021	      split_code = GET_CODE (*split);
2022	    }
2023
2024#ifdef INSN_SCHEDULING
2025	  /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2026	     be written as a ZERO_EXTEND.  */
2027	  if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2028	    SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
2029					    XEXP (*split, 0)));
2030#endif
2031
2032	  newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
2033	  SUBST (*split, newdest);
2034	  i2_code_number
2035	    = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2036
2037	  /* If the split point was a MULT and we didn't have one before,
2038	     don't use one now.  */
2039	  if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2040	    insn_code_number
2041	      = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
2042	}
2043    }
2044
2045  /* Check for a case where we loaded from memory in a narrow mode and
2046     then sign extended it, but we need both registers.  In that case,
2047     we have a PARALLEL with both loads from the same memory location.
2048     We can split this into a load from memory followed by a register-register
2049     copy.  This saves at least one insn, more if register allocation can
2050     eliminate the copy.
2051
2052     We cannot do this if the destination of the second assignment is
2053     a register that we have already assumed is zero-extended.  Similarly
2054     for a SUBREG of such a register.  */
2055
2056  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2057	   && GET_CODE (newpat) == PARALLEL
2058	   && XVECLEN (newpat, 0) == 2
2059	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2060	   && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2061	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2062	   && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2063			   XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2064	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2065				   INSN_CUID (i2))
2066	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2067	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2068	   && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2069		 (GET_CODE (temp) == REG
2070		  && reg_nonzero_bits[REGNO (temp)] != 0
2071		  && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2072		  && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2073		  && (reg_nonzero_bits[REGNO (temp)]
2074		      != GET_MODE_MASK (word_mode))))
2075	   && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2076		 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2077		     (GET_CODE (temp) == REG
2078		      && reg_nonzero_bits[REGNO (temp)] != 0
2079		      && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2080		      && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2081		      && (reg_nonzero_bits[REGNO (temp)]
2082			  != GET_MODE_MASK (word_mode)))))
2083	   && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2084					 SET_SRC (XVECEXP (newpat, 0, 1)))
2085	   && ! find_reg_note (i3, REG_UNUSED,
2086			       SET_DEST (XVECEXP (newpat, 0, 0))))
2087    {
2088      rtx ni2dest;
2089
2090      newi2pat = XVECEXP (newpat, 0, 0);
2091      ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2092      newpat = XVECEXP (newpat, 0, 1);
2093      SUBST (SET_SRC (newpat),
2094	     gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
2095      i2_code_number
2096	= recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2097
2098      if (i2_code_number >= 0)
2099	insn_code_number
2100	  = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
2101
2102      if (insn_code_number >= 0)
2103	{
2104	  rtx insn;
2105	  rtx link;
2106
2107	  /* If we will be able to accept this, we have made a change to the
2108	     destination of I3.  This can invalidate a LOG_LINKS pointing
2109	     to I3.  No other part of combine.c makes such a transformation.
2110
2111	     The new I3 will have a destination that was previously the
2112	     destination of I1 or I2 and which was used in i2 or I3.  Call
2113	     distribute_links to make a LOG_LINK from the next use of
2114	     that destination.  */
2115
2116	  PATTERN (i3) = newpat;
2117	  distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
2118
2119	  /* I3 now uses what used to be its destination and which is
2120	     now I2's destination.  That means we need a LOG_LINK from
2121	     I3 to I2.  But we used to have one, so we still will.
2122
2123	     However, some later insn might be using I2's dest and have
2124	     a LOG_LINK pointing at I3.  We must remove this link.
2125	     The simplest way to remove the link is to point it at I1,
2126	     which we know will be a NOTE.  */
2127
2128	  for (insn = NEXT_INSN (i3);
2129	       insn && (this_basic_block == n_basic_blocks - 1
2130			|| insn != basic_block_head[this_basic_block + 1]);
2131	       insn = NEXT_INSN (insn))
2132	    {
2133	      if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
2134		  && reg_referenced_p (ni2dest, PATTERN (insn)))
2135		{
2136		  for (link = LOG_LINKS (insn); link;
2137		       link = XEXP (link, 1))
2138		    if (XEXP (link, 0) == i3)
2139		      XEXP (link, 0) = i1;
2140
2141		  break;
2142		}
2143	    }
2144	}
2145    }
2146
2147  /* Similarly, check for a case where we have a PARALLEL of two independent
2148     SETs but we started with three insns.  In this case, we can do the sets
2149     as two separate insns.  This case occurs when some SET allows two
2150     other insns to combine, but the destination of that SET is still live.  */
2151
2152  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2153	   && GET_CODE (newpat) == PARALLEL
2154	   && XVECLEN (newpat, 0) == 2
2155	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2156	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2157	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2158	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2159	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2160	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2161	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2162				   INSN_CUID (i2))
2163	   /* Don't pass sets with (USE (MEM ...)) dests to the following.  */
2164	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2165	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2166	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2167				  XVECEXP (newpat, 0, 0))
2168	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2169				  XVECEXP (newpat, 0, 1)))
2170    {
2171      /* Normally, it doesn't matter which of the two is done first,
2172	 but it does if one references cc0.  In that case, it has to
2173	 be first.  */
2174#ifdef HAVE_cc0
2175      if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2176	{
2177	  newi2pat = XVECEXP (newpat, 0, 0);
2178	  newpat = XVECEXP (newpat, 0, 1);
2179	}
2180      else
2181#endif
2182	{
2183	  newi2pat = XVECEXP (newpat, 0, 1);
2184	  newpat = XVECEXP (newpat, 0, 0);
2185	}
2186
2187      i2_code_number
2188	= recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2189
2190      if (i2_code_number >= 0)
2191	insn_code_number
2192	  = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
2193    }
2194
2195  /* If it still isn't recognized, fail and change things back the way they
2196     were.  */
2197  if ((insn_code_number < 0
2198       /* Is the result a reasonable ASM_OPERANDS?  */
2199       && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2200    {
2201      undo_all ();
2202      return 0;
2203    }
2204
2205  /* If we had to change another insn, make sure it is valid also.  */
2206  if (undobuf.other_insn)
2207    {
2208      rtx other_pat = PATTERN (undobuf.other_insn);
2209      rtx new_other_notes;
2210      rtx note, next;
2211
2212      CLEAR_HARD_REG_SET (newpat_used_regs);
2213
2214      other_code_number
2215	= recog_for_combine (&other_pat, undobuf.other_insn,
2216			     &new_other_notes, &other_scratches);
2217
2218      if (other_code_number < 0 && ! check_asm_operands (other_pat))
2219	{
2220	  undo_all ();
2221	  return 0;
2222	}
2223
2224      PATTERN (undobuf.other_insn) = other_pat;
2225
2226      /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2227	 are still valid.  Then add any non-duplicate notes added by
2228	 recog_for_combine.  */
2229      for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2230	{
2231	  next = XEXP (note, 1);
2232
2233	  if (REG_NOTE_KIND (note) == REG_UNUSED
2234	      && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2235	    {
2236	      if (GET_CODE (XEXP (note, 0)) == REG)
2237		REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2238
2239	      remove_note (undobuf.other_insn, note);
2240	    }
2241	}
2242
2243      for (note = new_other_notes; note; note = XEXP (note, 1))
2244	if (GET_CODE (XEXP (note, 0)) == REG)
2245	  REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
2246
2247      distribute_notes (new_other_notes, undobuf.other_insn,
2248			undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2249    }
2250
2251  /* We now know that we can do this combination.  Merge the insns and
2252     update the status of registers and LOG_LINKS.  */
2253
2254  {
2255    rtx i3notes, i2notes, i1notes = 0;
2256    rtx i3links, i2links, i1links = 0;
2257    rtx midnotes = 0;
2258    register int regno;
2259    /* Compute which registers we expect to eliminate.  newi2pat may be setting
2260       either i3dest or i2dest, so we must check it.  Also, i1dest may be the
2261       same as i3dest, in which case newi2pat may be setting i1dest.  */
2262    rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2263		   || i2dest_in_i2src || i2dest_in_i1src
2264		   ? 0 : i2dest);
2265    rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2266		   || (newi2pat && reg_set_p (i1dest, newi2pat))
2267		   ? 0 : i1dest);
2268
2269    /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2270       clear them.  */
2271    i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2272    i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2273    if (i1)
2274      i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2275
2276    /* Ensure that we do not have something that should not be shared but
2277       occurs multiple times in the new insns.  Check this by first
2278       resetting all the `used' flags and then copying anything is shared.  */
2279
2280    reset_used_flags (i3notes);
2281    reset_used_flags (i2notes);
2282    reset_used_flags (i1notes);
2283    reset_used_flags (newpat);
2284    reset_used_flags (newi2pat);
2285    if (undobuf.other_insn)
2286      reset_used_flags (PATTERN (undobuf.other_insn));
2287
2288    i3notes = copy_rtx_if_shared (i3notes);
2289    i2notes = copy_rtx_if_shared (i2notes);
2290    i1notes = copy_rtx_if_shared (i1notes);
2291    newpat = copy_rtx_if_shared (newpat);
2292    newi2pat = copy_rtx_if_shared (newi2pat);
2293    if (undobuf.other_insn)
2294      reset_used_flags (PATTERN (undobuf.other_insn));
2295
2296    INSN_CODE (i3) = insn_code_number;
2297    PATTERN (i3) = newpat;
2298    if (undobuf.other_insn)
2299      INSN_CODE (undobuf.other_insn) = other_code_number;
2300
2301    /* We had one special case above where I2 had more than one set and
2302       we replaced a destination of one of those sets with the destination
2303       of I3.  In that case, we have to update LOG_LINKS of insns later
2304       in this basic block.  Note that this (expensive) case is rare.
2305
2306       Also, in this case, we must pretend that all REG_NOTEs for I2
2307       actually came from I3, so that REG_UNUSED notes from I2 will be
2308       properly handled.  */
2309
2310    if (i3_subst_into_i2)
2311      {
2312	for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2313	  if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2314	      && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2315	      && ! find_reg_note (i2, REG_UNUSED,
2316				  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2317	    for (temp = NEXT_INSN (i2);
2318		 temp && (this_basic_block == n_basic_blocks - 1
2319			  || basic_block_head[this_basic_block] != temp);
2320		 temp = NEXT_INSN (temp))
2321	      if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2322		for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2323		  if (XEXP (link, 0) == i2)
2324		    XEXP (link, 0) = i3;
2325
2326	if (i3notes)
2327	  {
2328	    rtx link = i3notes;
2329	    while (XEXP (link, 1))
2330	      link = XEXP (link, 1);
2331	    XEXP (link, 1) = i2notes;
2332	  }
2333	else
2334	  i3notes = i2notes;
2335	i2notes = 0;
2336      }
2337
2338    LOG_LINKS (i3) = 0;
2339    REG_NOTES (i3) = 0;
2340    LOG_LINKS (i2) = 0;
2341    REG_NOTES (i2) = 0;
2342
2343    if (newi2pat)
2344      {
2345	INSN_CODE (i2) = i2_code_number;
2346	PATTERN (i2) = newi2pat;
2347      }
2348    else
2349      {
2350	PUT_CODE (i2, NOTE);
2351	NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2352	NOTE_SOURCE_FILE (i2) = 0;
2353      }
2354
2355    if (i1)
2356      {
2357	LOG_LINKS (i1) = 0;
2358	REG_NOTES (i1) = 0;
2359	PUT_CODE (i1, NOTE);
2360	NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2361	NOTE_SOURCE_FILE (i1) = 0;
2362      }
2363
2364    /* Get death notes for everything that is now used in either I3 or
2365       I2 and used to die in a previous insn.  If we built two new
2366       patterns, move from I1 to I2 then I2 to I3 so that we get the
2367       proper movement on registers that I2 modifies.  */
2368
2369    if (newi2pat)
2370      {
2371	move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2372	move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2373      }
2374    else
2375      move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2376		   i3, &midnotes);
2377
2378    /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3.  */
2379    if (i3notes)
2380      distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2381			elim_i2, elim_i1);
2382    if (i2notes)
2383      distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2384			elim_i2, elim_i1);
2385    if (i1notes)
2386      distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2387			elim_i2, elim_i1);
2388    if (midnotes)
2389      distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2390			elim_i2, elim_i1);
2391
2392    /* Distribute any notes added to I2 or I3 by recog_for_combine.  We
2393       know these are REG_UNUSED and want them to go to the desired insn,
2394       so we always pass it as i3.  We have not counted the notes in
2395       reg_n_deaths yet, so we need to do so now.  */
2396
2397    if (newi2pat && new_i2_notes)
2398      {
2399	for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2400	  if (GET_CODE (XEXP (temp, 0)) == REG)
2401	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2402
2403	distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2404      }
2405
2406    if (new_i3_notes)
2407      {
2408	for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2409	  if (GET_CODE (XEXP (temp, 0)) == REG)
2410	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2411
2412	distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2413      }
2414
2415    /* If I3DEST was used in I3SRC, it really died in I3.  We may need to
2416       put a REG_DEAD note for it somewhere.  If NEWI2PAT exists and sets
2417       I3DEST, the death must be somewhere before I2, not I3.  If we passed I3
2418       in that case, it might delete I2.  Similarly for I2 and I1.
2419       Show an additional death due to the REG_DEAD note we make here.  If
2420       we discard it in distribute_notes, we will decrement it again.  */
2421
2422    if (i3dest_killed)
2423      {
2424	if (GET_CODE (i3dest_killed) == REG)
2425	  REG_N_DEATHS (REGNO (i3dest_killed))++;
2426
2427	if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
2428	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2429					       NULL_RTX),
2430			    NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
2431	else
2432	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2433					       NULL_RTX),
2434			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2435			    elim_i2, elim_i1);
2436      }
2437
2438    if (i2dest_in_i2src)
2439      {
2440	if (GET_CODE (i2dest) == REG)
2441	  REG_N_DEATHS (REGNO (i2dest))++;
2442
2443	if (newi2pat && reg_set_p (i2dest, newi2pat))
2444	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2445			    NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2446	else
2447	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2448			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2449			    NULL_RTX, NULL_RTX);
2450      }
2451
2452    if (i1dest_in_i1src)
2453      {
2454	if (GET_CODE (i1dest) == REG)
2455	  REG_N_DEATHS (REGNO (i1dest))++;
2456
2457	if (newi2pat && reg_set_p (i1dest, newi2pat))
2458	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2459			    NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2460	else
2461	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2462			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2463			    NULL_RTX, NULL_RTX);
2464      }
2465
2466    distribute_links (i3links);
2467    distribute_links (i2links);
2468    distribute_links (i1links);
2469
2470    if (GET_CODE (i2dest) == REG)
2471      {
2472	rtx link;
2473	rtx i2_insn = 0, i2_val = 0, set;
2474
2475	/* The insn that used to set this register doesn't exist, and
2476	   this life of the register may not exist either.  See if one of
2477	   I3's links points to an insn that sets I2DEST.  If it does,
2478	   that is now the last known value for I2DEST. If we don't update
2479	   this and I2 set the register to a value that depended on its old
2480	   contents, we will get confused.  If this insn is used, thing
2481	   will be set correctly in combine_instructions.  */
2482
2483	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2484	  if ((set = single_set (XEXP (link, 0))) != 0
2485	      && rtx_equal_p (i2dest, SET_DEST (set)))
2486	    i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2487
2488	record_value_for_reg (i2dest, i2_insn, i2_val);
2489
2490	/* If the reg formerly set in I2 died only once and that was in I3,
2491	   zero its use count so it won't make `reload' do any work.  */
2492	if (! added_sets_2
2493	    && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2494	    && ! i2dest_in_i2src)
2495	  {
2496	    regno = REGNO (i2dest);
2497	    REG_N_SETS (regno)--;
2498	    if (REG_N_SETS (regno) == 0
2499		&& ! REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
2500	      REG_N_REFS (regno) = 0;
2501	  }
2502      }
2503
2504    if (i1 && GET_CODE (i1dest) == REG)
2505      {
2506	rtx link;
2507	rtx i1_insn = 0, i1_val = 0, set;
2508
2509	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2510	  if ((set = single_set (XEXP (link, 0))) != 0
2511	      && rtx_equal_p (i1dest, SET_DEST (set)))
2512	    i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2513
2514	record_value_for_reg (i1dest, i1_insn, i1_val);
2515
2516	regno = REGNO (i1dest);
2517	if (! added_sets_1 && ! i1dest_in_i1src)
2518	  {
2519	    REG_N_SETS (regno)--;
2520	    if (REG_N_SETS (regno) == 0
2521		&& ! REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
2522	      REG_N_REFS (regno) = 0;
2523	  }
2524      }
2525
2526    /* Update reg_nonzero_bits et al for any changes that may have been made
2527       to this insn.  */
2528
2529    note_stores (newpat, set_nonzero_bits_and_sign_copies);
2530    if (newi2pat)
2531      note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
2532
2533    /* If we added any (clobber (scratch)), add them to the max for a
2534       block.  This is a very pessimistic calculation, since we might
2535       have had them already and this might not be the worst block, but
2536       it's not worth doing any better.  */
2537    max_scratch += i3_scratches + i2_scratches + other_scratches;
2538
2539    /* If I3 is now an unconditional jump, ensure that it has a
2540       BARRIER following it since it may have initially been a
2541       conditional jump.  It may also be the last nonnote insn.  */
2542
2543    if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2544	&& ((temp = next_nonnote_insn (i3)) == NULL_RTX
2545	    || GET_CODE (temp) != BARRIER))
2546      emit_barrier_after (i3);
2547  }
2548
2549  combine_successes++;
2550
2551  /* Clear this here, so that subsequent get_last_value calls are not
2552     affected.  */
2553  subst_prev_insn = NULL_RTX;
2554
2555  if (added_links_insn
2556      && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2557      && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2558    return added_links_insn;
2559  else
2560    return newi2pat ? i2 : i3;
2561}
2562
2563/* Undo all the modifications recorded in undobuf.  */
2564
2565static void
2566undo_all ()
2567{
2568  struct undo *undo, *next;
2569
2570  for (undo = undobuf.undos; undo; undo = next)
2571    {
2572      next = undo->next;
2573      if (undo->is_int)
2574	*undo->where.i = undo->old_contents.i;
2575      else
2576	*undo->where.r = undo->old_contents.r;
2577
2578      undo->next = undobuf.frees;
2579      undobuf.frees = undo;
2580    }
2581
2582  obfree (undobuf.storage);
2583  undobuf.undos = undobuf.previous_undos = 0;
2584
2585  /* Clear this here, so that subsequent get_last_value calls are not
2586     affected.  */
2587  subst_prev_insn = NULL_RTX;
2588}
2589
2590/* Find the innermost point within the rtx at LOC, possibly LOC itself,
2591   where we have an arithmetic expression and return that point.  LOC will
2592   be inside INSN.
2593
2594   try_combine will call this function to see if an insn can be split into
2595   two insns.  */
2596
2597static rtx *
2598find_split_point (loc, insn)
2599     rtx *loc;
2600     rtx insn;
2601{
2602  rtx x = *loc;
2603  enum rtx_code code = GET_CODE (x);
2604  rtx *split;
2605  int len = 0, pos, unsignedp;
2606  rtx inner;
2607
2608  /* First special-case some codes.  */
2609  switch (code)
2610    {
2611    case SUBREG:
2612#ifdef INSN_SCHEDULING
2613      /* If we are making a paradoxical SUBREG invalid, it becomes a split
2614	 point.  */
2615      if (GET_CODE (SUBREG_REG (x)) == MEM)
2616	return loc;
2617#endif
2618      return find_split_point (&SUBREG_REG (x), insn);
2619
2620    case MEM:
2621#ifdef HAVE_lo_sum
2622      /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2623	 using LO_SUM and HIGH.  */
2624      if (GET_CODE (XEXP (x, 0)) == CONST
2625	  || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2626	{
2627	  SUBST (XEXP (x, 0),
2628		 gen_rtx_combine (LO_SUM, Pmode,
2629				  gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2630				  XEXP (x, 0)));
2631	  return &XEXP (XEXP (x, 0), 0);
2632	}
2633#endif
2634
2635      /* If we have a PLUS whose second operand is a constant and the
2636	 address is not valid, perhaps will can split it up using
2637	 the machine-specific way to split large constants.  We use
2638	 the first pseudo-reg (one of the virtual regs) as a placeholder;
2639	 it will not remain in the result.  */
2640      if (GET_CODE (XEXP (x, 0)) == PLUS
2641	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2642	  && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2643	{
2644	  rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2645	  rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
2646				 subst_insn);
2647
2648	  /* This should have produced two insns, each of which sets our
2649	     placeholder.  If the source of the second is a valid address,
2650	     we can make put both sources together and make a split point
2651	     in the middle.  */
2652
2653	  if (seq && XVECLEN (seq, 0) == 2
2654	      && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2655	      && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2656	      && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2657	      && ! reg_mentioned_p (reg,
2658				    SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2659	      && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2660	      && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2661	      && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2662	      && memory_address_p (GET_MODE (x),
2663				   SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2664	    {
2665	      rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2666	      rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2667
2668	      /* Replace the placeholder in SRC2 with SRC1.  If we can
2669		 find where in SRC2 it was placed, that can become our
2670		 split point and we can replace this address with SRC2.
2671		 Just try two obvious places.  */
2672
2673	      src2 = replace_rtx (src2, reg, src1);
2674	      split = 0;
2675	      if (XEXP (src2, 0) == src1)
2676		split = &XEXP (src2, 0);
2677	      else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2678		       && XEXP (XEXP (src2, 0), 0) == src1)
2679		split = &XEXP (XEXP (src2, 0), 0);
2680
2681	      if (split)
2682		{
2683		  SUBST (XEXP (x, 0), src2);
2684		  return split;
2685		}
2686	    }
2687
2688	  /* If that didn't work, perhaps the first operand is complex and
2689	     needs to be computed separately, so make a split point there.
2690	     This will occur on machines that just support REG + CONST
2691	     and have a constant moved through some previous computation.  */
2692
2693	  else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2694		   && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2695			 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2696			     == 'o')))
2697	    return &XEXP (XEXP (x, 0), 0);
2698	}
2699      break;
2700
2701    case SET:
2702#ifdef HAVE_cc0
2703      /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2704	 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2705	 we need to put the operand into a register.  So split at that
2706	 point.  */
2707
2708      if (SET_DEST (x) == cc0_rtx
2709	  && GET_CODE (SET_SRC (x)) != COMPARE
2710	  && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2711	  && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2712	  && ! (GET_CODE (SET_SRC (x)) == SUBREG
2713		&& GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2714	return &SET_SRC (x);
2715#endif
2716
2717      /* See if we can split SET_SRC as it stands.  */
2718      split = find_split_point (&SET_SRC (x), insn);
2719      if (split && split != &SET_SRC (x))
2720	return split;
2721
2722      /* See if we can split SET_DEST as it stands.  */
2723      split = find_split_point (&SET_DEST (x), insn);
2724      if (split && split != &SET_DEST (x))
2725	return split;
2726
2727      /* See if this is a bitfield assignment with everything constant.  If
2728	 so, this is an IOR of an AND, so split it into that.  */
2729      if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2730	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2731	      <= HOST_BITS_PER_WIDE_INT)
2732	  && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2733	  && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2734	  && GET_CODE (SET_SRC (x)) == CONST_INT
2735	  && ((INTVAL (XEXP (SET_DEST (x), 1))
2736	      + INTVAL (XEXP (SET_DEST (x), 2)))
2737	      <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2738	  && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2739	{
2740	  int pos = INTVAL (XEXP (SET_DEST (x), 2));
2741	  int len = INTVAL (XEXP (SET_DEST (x), 1));
2742	  int src = INTVAL (SET_SRC (x));
2743	  rtx dest = XEXP (SET_DEST (x), 0);
2744	  enum machine_mode mode = GET_MODE (dest);
2745	  unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2746
2747	  if (BITS_BIG_ENDIAN)
2748	    pos = GET_MODE_BITSIZE (mode) - len - pos;
2749
2750	  if (src == mask)
2751	    SUBST (SET_SRC (x),
2752		   gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2753	  else
2754	    SUBST (SET_SRC (x),
2755		   gen_binary (IOR, mode,
2756			       gen_binary (AND, mode, dest,
2757					   GEN_INT (~ (mask << pos)
2758						    & GET_MODE_MASK (mode))),
2759			       GEN_INT (src << pos)));
2760
2761	  SUBST (SET_DEST (x), dest);
2762
2763	  split = find_split_point (&SET_SRC (x), insn);
2764	  if (split && split != &SET_SRC (x))
2765	    return split;
2766	}
2767
2768      /* Otherwise, see if this is an operation that we can split into two.
2769	 If so, try to split that.  */
2770      code = GET_CODE (SET_SRC (x));
2771
2772      switch (code)
2773	{
2774	case AND:
2775	  /* If we are AND'ing with a large constant that is only a single
2776	     bit and the result is only being used in a context where we
2777	     need to know if it is zero or non-zero, replace it with a bit
2778	     extraction.  This will avoid the large constant, which might
2779	     have taken more than one insn to make.  If the constant were
2780	     not a valid argument to the AND but took only one insn to make,
2781	     this is no worse, but if it took more than one insn, it will
2782	     be better.  */
2783
2784	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2785	      && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2786	      && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2787	      && GET_CODE (SET_DEST (x)) == REG
2788	      && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2789	      && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2790	      && XEXP (*split, 0) == SET_DEST (x)
2791	      && XEXP (*split, 1) == const0_rtx)
2792	    {
2793	      rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
2794						XEXP (SET_SRC (x), 0),
2795						pos, NULL_RTX, 1, 1, 0, 0);
2796	      if (extraction != 0)
2797		{
2798		  SUBST (SET_SRC (x), extraction);
2799		  return find_split_point (loc, insn);
2800		}
2801	    }
2802	  break;
2803
2804	case NE:
2805	  /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
2806	     is known to be on, this can be converted into a NEG of a shift. */
2807	  if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
2808	      && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
2809	      && 1 <= (pos = exact_log2
2810		       (nonzero_bits (XEXP (SET_SRC (x), 0),
2811				      GET_MODE (XEXP (SET_SRC (x), 0))))))
2812	    {
2813	      enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
2814
2815	      SUBST (SET_SRC (x),
2816		     gen_rtx_combine (NEG, mode,
2817				      gen_rtx_combine (LSHIFTRT, mode,
2818						       XEXP (SET_SRC (x), 0),
2819						       GEN_INT (pos))));
2820
2821	      split = find_split_point (&SET_SRC (x), insn);
2822	      if (split && split != &SET_SRC (x))
2823		return split;
2824	    }
2825	  break;
2826
2827	case SIGN_EXTEND:
2828	  inner = XEXP (SET_SRC (x), 0);
2829
2830	  /* We can't optimize if either mode is a partial integer
2831	     mode as we don't know how many bits are significant
2832	     in those modes.  */
2833	  if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
2834	      || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
2835	    break;
2836
2837	  pos = 0;
2838	  len = GET_MODE_BITSIZE (GET_MODE (inner));
2839	  unsignedp = 0;
2840	  break;
2841
2842	case SIGN_EXTRACT:
2843	case ZERO_EXTRACT:
2844	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2845	      && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2846	    {
2847	      inner = XEXP (SET_SRC (x), 0);
2848	      len = INTVAL (XEXP (SET_SRC (x), 1));
2849	      pos = INTVAL (XEXP (SET_SRC (x), 2));
2850
2851	      if (BITS_BIG_ENDIAN)
2852		pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2853	      unsignedp = (code == ZERO_EXTRACT);
2854	    }
2855	  break;
2856
2857	default:
2858	  break;
2859	}
2860
2861      if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2862	{
2863	  enum machine_mode mode = GET_MODE (SET_SRC (x));
2864
2865	  /* For unsigned, we have a choice of a shift followed by an
2866	     AND or two shifts.  Use two shifts for field sizes where the
2867	     constant might be too large.  We assume here that we can
2868	     always at least get 8-bit constants in an AND insn, which is
2869	     true for every current RISC.  */
2870
2871	  if (unsignedp && len <= 8)
2872	    {
2873	      SUBST (SET_SRC (x),
2874		     gen_rtx_combine
2875		     (AND, mode,
2876		      gen_rtx_combine (LSHIFTRT, mode,
2877				       gen_lowpart_for_combine (mode, inner),
2878				       GEN_INT (pos)),
2879		      GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2880
2881	      split = find_split_point (&SET_SRC (x), insn);
2882	      if (split && split != &SET_SRC (x))
2883		return split;
2884	    }
2885	  else
2886	    {
2887	      SUBST (SET_SRC (x),
2888		     gen_rtx_combine
2889		     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
2890		      gen_rtx_combine (ASHIFT, mode,
2891				       gen_lowpart_for_combine (mode, inner),
2892				       GEN_INT (GET_MODE_BITSIZE (mode)
2893						- len - pos)),
2894		      GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2895
2896	      split = find_split_point (&SET_SRC (x), insn);
2897	      if (split && split != &SET_SRC (x))
2898		return split;
2899	    }
2900	}
2901
2902      /* See if this is a simple operation with a constant as the second
2903	 operand.  It might be that this constant is out of range and hence
2904	 could be used as a split point.  */
2905      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2906	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2907	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2908	  && CONSTANT_P (XEXP (SET_SRC (x), 1))
2909	  && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2910	      || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2911		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2912		      == 'o'))))
2913	return &XEXP (SET_SRC (x), 1);
2914
2915      /* Finally, see if this is a simple operation with its first operand
2916	 not in a register.  The operation might require this operand in a
2917	 register, so return it as a split point.  We can always do this
2918	 because if the first operand were another operation, we would have
2919	 already found it as a split point.  */
2920      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2921	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2922	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2923	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2924	  && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2925	return &XEXP (SET_SRC (x), 0);
2926
2927      return 0;
2928
2929    case AND:
2930    case IOR:
2931      /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2932	 it is better to write this as (not (ior A B)) so we can split it.
2933	 Similarly for IOR.  */
2934      if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2935	{
2936	  SUBST (*loc,
2937		 gen_rtx_combine (NOT, GET_MODE (x),
2938				  gen_rtx_combine (code == IOR ? AND : IOR,
2939						   GET_MODE (x),
2940						   XEXP (XEXP (x, 0), 0),
2941						   XEXP (XEXP (x, 1), 0))));
2942	  return find_split_point (loc, insn);
2943	}
2944
2945      /* Many RISC machines have a large set of logical insns.  If the
2946	 second operand is a NOT, put it first so we will try to split the
2947	 other operand first.  */
2948      if (GET_CODE (XEXP (x, 1)) == NOT)
2949	{
2950	  rtx tem = XEXP (x, 0);
2951	  SUBST (XEXP (x, 0), XEXP (x, 1));
2952	  SUBST (XEXP (x, 1), tem);
2953	}
2954      break;
2955
2956    default:
2957      break;
2958    }
2959
2960  /* Otherwise, select our actions depending on our rtx class.  */
2961  switch (GET_RTX_CLASS (code))
2962    {
2963    case 'b':			/* This is ZERO_EXTRACT and SIGN_EXTRACT.  */
2964    case '3':
2965      split = find_split_point (&XEXP (x, 2), insn);
2966      if (split)
2967	return split;
2968      /* ... fall through ...  */
2969    case '2':
2970    case 'c':
2971    case '<':
2972      split = find_split_point (&XEXP (x, 1), insn);
2973      if (split)
2974	return split;
2975      /* ... fall through ...  */
2976    case '1':
2977      /* Some machines have (and (shift ...) ...) insns.  If X is not
2978	 an AND, but XEXP (X, 0) is, use it as our split point.  */
2979      if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2980	return &XEXP (x, 0);
2981
2982      split = find_split_point (&XEXP (x, 0), insn);
2983      if (split)
2984	return split;
2985      return loc;
2986    }
2987
2988  /* Otherwise, we don't have a split point.  */
2989  return 0;
2990}
2991
2992/* Throughout X, replace FROM with TO, and return the result.
2993   The result is TO if X is FROM;
2994   otherwise the result is X, but its contents may have been modified.
2995   If they were modified, a record was made in undobuf so that
2996   undo_all will (among other things) return X to its original state.
2997
2998   If the number of changes necessary is too much to record to undo,
2999   the excess changes are not made, so the result is invalid.
3000   The changes already made can still be undone.
3001   undobuf.num_undo is incremented for such changes, so by testing that
3002   the caller can tell whether the result is valid.
3003
3004   `n_occurrences' is incremented each time FROM is replaced.
3005
3006   IN_DEST is non-zero if we are processing the SET_DEST of a SET.
3007
3008   UNIQUE_COPY is non-zero if each substitution must be unique.  We do this
3009   by copying if `n_occurrences' is non-zero.  */
3010
3011static rtx
3012subst (x, from, to, in_dest, unique_copy)
3013     register rtx x, from, to;
3014     int in_dest;
3015     int unique_copy;
3016{
3017  register enum rtx_code code = GET_CODE (x);
3018  enum machine_mode op0_mode = VOIDmode;
3019  register char *fmt;
3020  register int len, i;
3021  rtx new;
3022
3023/* Two expressions are equal if they are identical copies of a shared
3024   RTX or if they are both registers with the same register number
3025   and mode.  */
3026
3027#define COMBINE_RTX_EQUAL_P(X,Y)			\
3028  ((X) == (Y)						\
3029   || (GET_CODE (X) == REG && GET_CODE (Y) == REG	\
3030       && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3031
3032  if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3033    {
3034      n_occurrences++;
3035      return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3036    }
3037
3038  /* If X and FROM are the same register but different modes, they will
3039     not have been seen as equal above.  However, flow.c will make a
3040     LOG_LINKS entry for that case.  If we do nothing, we will try to
3041     rerecognize our original insn and, when it succeeds, we will
3042     delete the feeding insn, which is incorrect.
3043
3044     So force this insn not to match in this (rare) case.  */
3045  if (! in_dest && code == REG && GET_CODE (from) == REG
3046      && REGNO (x) == REGNO (from))
3047    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
3048
3049  /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3050     of which may contain things that can be combined.  */
3051  if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3052    return x;
3053
3054  /* It is possible to have a subexpression appear twice in the insn.
3055     Suppose that FROM is a register that appears within TO.
3056     Then, after that subexpression has been scanned once by `subst',
3057     the second time it is scanned, TO may be found.  If we were
3058     to scan TO here, we would find FROM within it and create a
3059     self-referent rtl structure which is completely wrong.  */
3060  if (COMBINE_RTX_EQUAL_P (x, to))
3061    return to;
3062
3063  /* Parallel asm_operands need special attention because all of the
3064     inputs are shared across the arms.  Furthermore, unsharing the
3065     rtl results in recognition failures.  Failure to handle this case
3066     specially can result in circular rtl.
3067
3068     Solve this by doing a normal pass across the first entry of the
3069     parallel, and only processing the SET_DESTs of the subsequent
3070     entries.  Ug.  */
3071
3072  if (code == PARALLEL
3073      && GET_CODE (XVECEXP (x, 0, 0)) == SET
3074      && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
3075    {
3076      new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3077
3078      /* If this substitution failed, this whole thing fails.  */
3079      if (GET_CODE (new) == CLOBBER
3080	  && XEXP (new, 0) == const0_rtx)
3081	return new;
3082
3083      SUBST (XVECEXP (x, 0, 0), new);
3084
3085      for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
3086	{
3087	  rtx dest = SET_DEST (XVECEXP (x, 0, i));
3088
3089	  if (GET_CODE (dest) != REG
3090	      && GET_CODE (dest) != CC0
3091	      && GET_CODE (dest) != PC)
3092	    {
3093	      new = subst (dest, from, to, 0, unique_copy);
3094
3095	      /* If this substitution failed, this whole thing fails.  */
3096	      if (GET_CODE (new) == CLOBBER
3097		  && XEXP (new, 0) == const0_rtx)
3098		return new;
3099
3100	      SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
3101	    }
3102	}
3103    }
3104  else
3105    {
3106      len = GET_RTX_LENGTH (code);
3107      fmt = GET_RTX_FORMAT (code);
3108
3109      /* We don't need to process a SET_DEST that is a register, CC0,
3110	 or PC, so set up to skip this common case.  All other cases
3111	 where we want to suppress replacing something inside a
3112	 SET_SRC are handled via the IN_DEST operand.  */
3113      if (code == SET
3114	  && (GET_CODE (SET_DEST (x)) == REG
3115	      || GET_CODE (SET_DEST (x)) == CC0
3116	      || GET_CODE (SET_DEST (x)) == PC))
3117	fmt = "ie";
3118
3119      /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3120	 constant.  */
3121      if (fmt[0] == 'e')
3122	op0_mode = GET_MODE (XEXP (x, 0));
3123
3124      for (i = 0; i < len; i++)
3125	{
3126	  if (fmt[i] == 'E')
3127	    {
3128	      register int j;
3129	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3130		{
3131		  if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3132		    {
3133		      new = (unique_copy && n_occurrences
3134			     ? copy_rtx (to) : to);
3135		      n_occurrences++;
3136		    }
3137		  else
3138		    {
3139		      new = subst (XVECEXP (x, i, j), from, to, 0,
3140				   unique_copy);
3141
3142		      /* If this substitution failed, this whole thing
3143			 fails.  */
3144		      if (GET_CODE (new) == CLOBBER
3145			  && XEXP (new, 0) == const0_rtx)
3146			return new;
3147		    }
3148
3149		  SUBST (XVECEXP (x, i, j), new);
3150		}
3151	    }
3152	  else if (fmt[i] == 'e')
3153	    {
3154	      if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3155		{
3156		  /* In general, don't install a subreg involving two
3157		     modes not tieable.  It can worsen register
3158		     allocation, and can even make invalid reload
3159		     insns, since the reg inside may need to be copied
3160		     from in the outside mode, and that may be invalid
3161		     if it is an fp reg copied in integer mode.
3162
3163		     We allow two exceptions to this: It is valid if
3164		     it is inside another SUBREG and the mode of that
3165		     SUBREG and the mode of the inside of TO is
3166		     tieable and it is valid if X is a SET that copies
3167		     FROM to CC0.  */
3168
3169		  if (GET_CODE (to) == SUBREG
3170		      && ! MODES_TIEABLE_P (GET_MODE (to),
3171					    GET_MODE (SUBREG_REG (to)))
3172		      && ! (code == SUBREG
3173			    && MODES_TIEABLE_P (GET_MODE (x),
3174						GET_MODE (SUBREG_REG (to))))
3175#ifdef HAVE_cc0
3176		      && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3177#endif
3178		      )
3179		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3180
3181		  new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3182		  n_occurrences++;
3183		}
3184	      else
3185		/* If we are in a SET_DEST, suppress most cases unless we
3186		   have gone inside a MEM, in which case we want to
3187		   simplify the address.  We assume here that things that
3188		   are actually part of the destination have their inner
3189		   parts in the first expression.  This is true for SUBREG,
3190		   STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3191		   things aside from REG and MEM that should appear in a
3192		   SET_DEST.  */
3193		new = subst (XEXP (x, i), from, to,
3194			     (((in_dest
3195				&& (code == SUBREG || code == STRICT_LOW_PART
3196				    || code == ZERO_EXTRACT))
3197			       || code == SET)
3198			      && i == 0), unique_copy);
3199
3200	      /* If we found that we will have to reject this combination,
3201		 indicate that by returning the CLOBBER ourselves, rather than
3202		 an expression containing it.  This will speed things up as
3203		 well as prevent accidents where two CLOBBERs are considered
3204		 to be equal, thus producing an incorrect simplification.  */
3205
3206	      if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3207		return new;
3208
3209	      SUBST (XEXP (x, i), new);
3210	    }
3211	}
3212    }
3213
3214  /* Try to simplify X.  If the simplification changed the code, it is likely
3215     that further simplification will help, so loop, but limit the number
3216     of repetitions that will be performed.  */
3217
3218  for (i = 0; i < 4; i++)
3219    {
3220      /* If X is sufficiently simple, don't bother trying to do anything
3221	 with it.  */
3222      if (code != CONST_INT && code != REG && code != CLOBBER)
3223	x = simplify_rtx (x, op0_mode, i == 3, in_dest);
3224
3225      if (GET_CODE (x) == code)
3226	break;
3227
3228      code = GET_CODE (x);
3229
3230      /* We no longer know the original mode of operand 0 since we
3231	 have changed the form of X)  */
3232      op0_mode = VOIDmode;
3233    }
3234
3235  return x;
3236}
3237
3238/* Simplify X, a piece of RTL.  We just operate on the expression at the
3239   outer level; call `subst' to simplify recursively.  Return the new
3240   expression.
3241
3242   OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3243   will be the iteration even if an expression with a code different from
3244   X is returned; IN_DEST is nonzero if we are inside a SET_DEST.  */
3245
3246static rtx
3247simplify_rtx (x, op0_mode, last, in_dest)
3248     rtx x;
3249     enum machine_mode op0_mode;
3250     int last;
3251     int in_dest;
3252{
3253  enum rtx_code code = GET_CODE (x);
3254  enum machine_mode mode = GET_MODE (x);
3255  rtx temp;
3256  int i;
3257
3258  /* If this is a commutative operation, put a constant last and a complex
3259     expression first.  We don't need to do this for comparisons here.  */
3260  if (GET_RTX_CLASS (code) == 'c'
3261      && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3262	  || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
3263	      && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
3264	  || (GET_CODE (XEXP (x, 0)) == SUBREG
3265	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
3266	      && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
3267    {
3268      temp = XEXP (x, 0);
3269      SUBST (XEXP (x, 0), XEXP (x, 1));
3270      SUBST (XEXP (x, 1), temp);
3271    }
3272
3273  /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3274     sign extension of a PLUS with a constant, reverse the order of the sign
3275     extension and the addition. Note that this not the same as the original
3276     code, but overflow is undefined for signed values.  Also note that the
3277     PLUS will have been partially moved "inside" the sign-extension, so that
3278     the first operand of X will really look like:
3279         (ashiftrt (plus (ashift A C4) C5) C4).
3280     We convert this to
3281         (plus (ashiftrt (ashift A C4) C2) C4)
3282     and replace the first operand of X with that expression.  Later parts
3283     of this function may simplify the expression further.
3284
3285     For example, if we start with (mult (sign_extend (plus A C1)) C2),
3286     we swap the SIGN_EXTEND and PLUS.  Later code will apply the
3287     distributive law to produce (plus (mult (sign_extend X) C1) C3).
3288
3289     We do this to simplify address expressions.  */
3290
3291  if ((code == PLUS || code == MINUS || code == MULT)
3292      && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3293      && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3294      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3295      && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3296      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3297      && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3298      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3299      && (temp = simplify_binary_operation (ASHIFTRT, mode,
3300					    XEXP (XEXP (XEXP (x, 0), 0), 1),
3301					    XEXP (XEXP (x, 0), 1))) != 0)
3302    {
3303      rtx new
3304	= simplify_shift_const (NULL_RTX, ASHIFT, mode,
3305				XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3306				INTVAL (XEXP (XEXP (x, 0), 1)));
3307
3308      new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3309				  INTVAL (XEXP (XEXP (x, 0), 1)));
3310
3311      SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3312    }
3313
3314  /* If this is a simple operation applied to an IF_THEN_ELSE, try
3315     applying it to the arms of the IF_THEN_ELSE.  This often simplifies
3316     things.  Check for cases where both arms are testing the same
3317     condition.
3318
3319     Don't do anything if all operands are very simple.  */
3320
3321  if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3322	|| GET_RTX_CLASS (code) == '<')
3323       && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3324	    && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3325		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3326		      == 'o')))
3327	   || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3328	       && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3329		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3330			 == 'o')))))
3331      || (GET_RTX_CLASS (code) == '1'
3332	  && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3333	       && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3334		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3335			 == 'o'))))))
3336    {
3337      rtx cond, true, false;
3338
3339      cond = if_then_else_cond (x, &true, &false);
3340      if (cond != 0
3341	  /* If everything is a comparison, what we have is highly unlikely
3342	     to be simpler, so don't use it.  */
3343	  && ! (GET_RTX_CLASS (code) == '<'
3344		&& (GET_RTX_CLASS (GET_CODE (true)) == '<'
3345		    || GET_RTX_CLASS (GET_CODE (false)) == '<')))
3346	{
3347	  rtx cop1 = const0_rtx;
3348	  enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3349
3350	  if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3351	    return x;
3352
3353	  /* Simplify the alternative arms; this may collapse the true and
3354	     false arms to store-flag values.  */
3355	  true = subst (true, pc_rtx, pc_rtx, 0, 0);
3356	  false = subst (false, pc_rtx, pc_rtx, 0, 0);
3357
3358	  /* Restarting if we generate a store-flag expression will cause
3359	     us to loop.  Just drop through in this case.  */
3360
3361	  /* If the result values are STORE_FLAG_VALUE and zero, we can
3362	     just make the comparison operation.  */
3363	  if (true == const_true_rtx && false == const0_rtx)
3364	    x = gen_binary (cond_code, mode, cond, cop1);
3365	  else if (true == const0_rtx && false == const_true_rtx)
3366	    x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3367
3368	  /* Likewise, we can make the negate of a comparison operation
3369	     if the result values are - STORE_FLAG_VALUE and zero.  */
3370	  else if (GET_CODE (true) == CONST_INT
3371		   && INTVAL (true) == - STORE_FLAG_VALUE
3372		   && false == const0_rtx)
3373	    x = gen_unary (NEG, mode, mode,
3374			   gen_binary (cond_code, mode, cond, cop1));
3375	  else if (GET_CODE (false) == CONST_INT
3376		   && INTVAL (false) == - STORE_FLAG_VALUE
3377		   && true == const0_rtx)
3378	    x = gen_unary (NEG, mode, mode,
3379			   gen_binary (reverse_condition (cond_code),
3380				       mode, cond, cop1));
3381	  else
3382	    return gen_rtx_IF_THEN_ELSE (mode,
3383					 gen_binary (cond_code, VOIDmode,
3384						     cond, cop1),
3385					 true, false);
3386
3387	  code = GET_CODE (x);
3388	  op0_mode = VOIDmode;
3389	}
3390    }
3391
3392  /* Try to fold this expression in case we have constants that weren't
3393     present before.  */
3394  temp = 0;
3395  switch (GET_RTX_CLASS (code))
3396    {
3397    case '1':
3398      temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3399      break;
3400    case '<':
3401      temp = simplify_relational_operation (code, op0_mode,
3402					    XEXP (x, 0), XEXP (x, 1));
3403#ifdef FLOAT_STORE_FLAG_VALUE
3404      if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3405	temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3406		: immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3407#endif
3408      break;
3409    case 'c':
3410    case '2':
3411      temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3412      break;
3413    case 'b':
3414    case '3':
3415      temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3416					 XEXP (x, 1), XEXP (x, 2));
3417      break;
3418    }
3419
3420  if (temp)
3421    x = temp, code = GET_CODE (temp);
3422
3423  /* First see if we can apply the inverse distributive law.  */
3424  if (code == PLUS || code == MINUS
3425      || code == AND || code == IOR || code == XOR)
3426    {
3427      x = apply_distributive_law (x);
3428      code = GET_CODE (x);
3429    }
3430
3431  /* If CODE is an associative operation not otherwise handled, see if we
3432     can associate some operands.  This can win if they are constants or
3433     if they are logically related (i.e. (a & b) & a.  */
3434  if ((code == PLUS || code == MINUS
3435       || code == MULT || code == AND || code == IOR || code == XOR
3436       || code == DIV || code == UDIV
3437       || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3438      && INTEGRAL_MODE_P (mode))
3439    {
3440      if (GET_CODE (XEXP (x, 0)) == code)
3441	{
3442	  rtx other = XEXP (XEXP (x, 0), 0);
3443	  rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3444	  rtx inner_op1 = XEXP (x, 1);
3445	  rtx inner;
3446
3447	  /* Make sure we pass the constant operand if any as the second
3448	     one if this is a commutative operation.  */
3449	  if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3450	    {
3451	      rtx tem = inner_op0;
3452	      inner_op0 = inner_op1;
3453	      inner_op1 = tem;
3454	    }
3455	  inner = simplify_binary_operation (code == MINUS ? PLUS
3456					     : code == DIV ? MULT
3457					     : code == UDIV ? MULT
3458					     : code,
3459					     mode, inner_op0, inner_op1);
3460
3461	  /* For commutative operations, try the other pair if that one
3462	     didn't simplify.  */
3463	  if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3464	    {
3465	      other = XEXP (XEXP (x, 0), 1);
3466	      inner = simplify_binary_operation (code, mode,
3467						 XEXP (XEXP (x, 0), 0),
3468						 XEXP (x, 1));
3469	    }
3470
3471	  if (inner)
3472	    return gen_binary (code, mode, other, inner);
3473	}
3474    }
3475
3476  /* A little bit of algebraic simplification here.  */
3477  switch (code)
3478    {
3479    case MEM:
3480      /* Ensure that our address has any ASHIFTs converted to MULT in case
3481	 address-recognizing predicates are called later.  */
3482      temp = make_compound_operation (XEXP (x, 0), MEM);
3483      SUBST (XEXP (x, 0), temp);
3484      break;
3485
3486    case SUBREG:
3487      /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3488	 is paradoxical.  If we can't do that safely, then it becomes
3489	 something nonsensical so that this combination won't take place.  */
3490
3491      if (GET_CODE (SUBREG_REG (x)) == MEM
3492	  && (GET_MODE_SIZE (mode)
3493	      <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3494	{
3495	  rtx inner = SUBREG_REG (x);
3496	  int endian_offset = 0;
3497	  /* Don't change the mode of the MEM
3498	     if that would change the meaning of the address.  */
3499	  if (MEM_VOLATILE_P (SUBREG_REG (x))
3500	      || mode_dependent_address_p (XEXP (inner, 0)))
3501	    return gen_rtx_CLOBBER (mode, const0_rtx);
3502
3503	  if (BYTES_BIG_ENDIAN)
3504	    {
3505	      if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3506		endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3507	      if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3508		endian_offset -= (UNITS_PER_WORD
3509				  - GET_MODE_SIZE (GET_MODE (inner)));
3510	    }
3511	  /* Note if the plus_constant doesn't make a valid address
3512	     then this combination won't be accepted.  */
3513	  x = gen_rtx_MEM (mode,
3514			   plus_constant (XEXP (inner, 0),
3515					  (SUBREG_WORD (x) * UNITS_PER_WORD
3516					   + endian_offset)));
3517	  MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3518	  RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3519	  MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3520	  return x;
3521	}
3522
3523      /* If we are in a SET_DEST, these other cases can't apply.  */
3524      if (in_dest)
3525	return x;
3526
3527      /* Changing mode twice with SUBREG => just change it once,
3528	 or not at all if changing back to starting mode.  */
3529      if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3530	{
3531	  if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3532	      && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3533	    return SUBREG_REG (SUBREG_REG (x));
3534
3535	  SUBST_INT (SUBREG_WORD (x),
3536		     SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3537	  SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3538	}
3539
3540      /* SUBREG of a hard register => just change the register number
3541	 and/or mode.  If the hard register is not valid in that mode,
3542	 suppress this combination.  If the hard register is the stack,
3543	 frame, or argument pointer, leave this as a SUBREG.  */
3544
3545      if (GET_CODE (SUBREG_REG (x)) == REG
3546	  && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3547	  && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
3548#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3549	  && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3550#endif
3551#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3552	  && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3553#endif
3554	  && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
3555	{
3556	  if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3557				  mode))
3558	    return gen_rtx_REG (mode,
3559				REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3560	  else
3561	    return gen_rtx_CLOBBER (mode, const0_rtx);
3562	}
3563
3564      /* For a constant, try to pick up the part we want.  Handle a full
3565	 word and low-order part.  Only do this if we are narrowing
3566	 the constant; if it is being widened, we have no idea what
3567	 the extra bits will have been set to.  */
3568
3569      if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3570	  && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3571	  && GET_MODE_SIZE (op0_mode) > UNITS_PER_WORD
3572	  && GET_MODE_CLASS (mode) == MODE_INT)
3573	{
3574	  temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
3575				  0, op0_mode);
3576	  if (temp)
3577	    return temp;
3578	}
3579
3580      /* If we want a subreg of a constant, at offset 0,
3581	 take the low bits.  On a little-endian machine, that's
3582	 always valid.  On a big-endian machine, it's valid
3583	 only if the constant's mode fits in one word.   Note that we
3584	 cannot use subreg_lowpart_p since SUBREG_REG may be VOIDmode.  */
3585      if (CONSTANT_P (SUBREG_REG (x))
3586	  && ((GET_MODE_SIZE (op0_mode) <= UNITS_PER_WORD
3587	      || ! WORDS_BIG_ENDIAN)
3588	      ? SUBREG_WORD (x) == 0
3589	      : (SUBREG_WORD (x)
3590		 == ((GET_MODE_SIZE (op0_mode)
3591		      - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
3592		     / UNITS_PER_WORD)))
3593	  && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode)
3594	  && (! WORDS_BIG_ENDIAN
3595	      || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
3596	return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3597
3598      /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3599	 since we are saying that the high bits don't matter.  */
3600      if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3601	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3602	return SUBREG_REG (x);
3603
3604      /* Note that we cannot do any narrowing for non-constants since
3605	 we might have been counting on using the fact that some bits were
3606	 zero.  We now do this in the SET.  */
3607
3608      break;
3609
3610    case NOT:
3611      /* (not (plus X -1)) can become (neg X).  */
3612      if (GET_CODE (XEXP (x, 0)) == PLUS
3613	  && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3614	return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
3615
3616      /* Similarly, (not (neg X)) is (plus X -1).  */
3617      if (GET_CODE (XEXP (x, 0)) == NEG)
3618	return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3619				constm1_rtx);
3620
3621      /* (not (xor X C)) for C constant is (xor X D) with D = ~ C.  */
3622      if (GET_CODE (XEXP (x, 0)) == XOR
3623	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3624	  && (temp = simplify_unary_operation (NOT, mode,
3625					       XEXP (XEXP (x, 0), 1),
3626					       mode)) != 0)
3627	return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
3628
3629      /* (not (ashift 1 X)) is (rotate ~1 X).  We used to do this for operands
3630	 other than 1, but that is not valid.  We could do a similar
3631	 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3632	 but this doesn't seem common enough to bother with.  */
3633      if (GET_CODE (XEXP (x, 0)) == ASHIFT
3634	  && XEXP (XEXP (x, 0), 0) == const1_rtx)
3635	return gen_rtx_ROTATE (mode, gen_unary (NOT, mode, mode, const1_rtx),
3636			       XEXP (XEXP (x, 0), 1));
3637
3638      if (GET_CODE (XEXP (x, 0)) == SUBREG
3639	  && subreg_lowpart_p (XEXP (x, 0))
3640	  && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3641	      < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3642	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3643	  && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3644	{
3645	  enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3646
3647	  x = gen_rtx_ROTATE (inner_mode,
3648			      gen_unary (NOT, inner_mode, inner_mode,
3649					 const1_rtx),
3650			      XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3651	  return gen_lowpart_for_combine (mode, x);
3652	}
3653
3654      /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3655	 reversing the comparison code if valid.  */
3656      if (STORE_FLAG_VALUE == -1
3657	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3658	  && reversible_comparison_p (XEXP (x, 0)))
3659	return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3660				mode, XEXP (XEXP (x, 0), 0),
3661				XEXP (XEXP (x, 0), 1));
3662
3663      /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3664	 is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3665	 perform the above simplification.  */
3666
3667      if (STORE_FLAG_VALUE == -1
3668	  && XEXP (x, 1) == const1_rtx
3669	  && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3670	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3671	  && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3672	return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3673
3674      /* Apply De Morgan's laws to reduce number of patterns for machines
3675 	 with negating logical insns (and-not, nand, etc.).  If result has
3676 	 only one NOT, put it first, since that is how the patterns are
3677 	 coded.  */
3678
3679      if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3680 	{
3681 	 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3682
3683	 if (GET_CODE (in1) == NOT)
3684	   in1 = XEXP (in1, 0);
3685 	 else
3686	   in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3687
3688	 if (GET_CODE (in2) == NOT)
3689	   in2 = XEXP (in2, 0);
3690 	 else if (GET_CODE (in2) == CONST_INT
3691		  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3692	   in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
3693	 else
3694	   in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3695
3696	 if (GET_CODE (in2) == NOT)
3697	   {
3698	     rtx tem = in2;
3699	     in2 = in1; in1 = tem;
3700	   }
3701
3702	 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3703				 mode, in1, in2);
3704       }
3705      break;
3706
3707    case NEG:
3708      /* (neg (plus X 1)) can become (not X).  */
3709      if (GET_CODE (XEXP (x, 0)) == PLUS
3710	  && XEXP (XEXP (x, 0), 1) == const1_rtx)
3711	return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3712
3713      /* Similarly, (neg (not X)) is (plus X 1).  */
3714      if (GET_CODE (XEXP (x, 0)) == NOT)
3715	return plus_constant (XEXP (XEXP (x, 0), 0), 1);
3716
3717      /* (neg (minus X Y)) can become (minus Y X).  */
3718      if (GET_CODE (XEXP (x, 0)) == MINUS
3719	  && (! FLOAT_MODE_P (mode)
3720	      /* x-y != -(y-x) with IEEE floating point.  */
3721	      || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3722	      || flag_fast_math))
3723	return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3724			   XEXP (XEXP (x, 0), 0));
3725
3726      /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1.  */
3727      if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3728	  && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3729	return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3730
3731      /* NEG commutes with ASHIFT since it is multiplication.  Only do this
3732	 if we can then eliminate the NEG (e.g.,
3733	 if the operand is a constant).  */
3734
3735      if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3736	{
3737	  temp = simplify_unary_operation (NEG, mode,
3738					   XEXP (XEXP (x, 0), 0), mode);
3739	  if (temp)
3740	    {
3741	      SUBST (XEXP (XEXP (x, 0), 0), temp);
3742	      return XEXP (x, 0);
3743	    }
3744	}
3745
3746      temp = expand_compound_operation (XEXP (x, 0));
3747
3748      /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3749 	 replaced by (lshiftrt X C).  This will convert
3750	 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y).  */
3751
3752      if (GET_CODE (temp) == ASHIFTRT
3753	  && GET_CODE (XEXP (temp, 1)) == CONST_INT
3754	  && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3755	return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3756				     INTVAL (XEXP (temp, 1)));
3757
3758      /* If X has only a single bit that might be nonzero, say, bit I, convert
3759	 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3760	 MODE minus 1.  This will convert (neg (zero_extract X 1 Y)) to
3761	 (sign_extract X 1 Y).  But only do this if TEMP isn't a register
3762	 or a SUBREG of one since we'd be making the expression more
3763	 complex if it was just a register.  */
3764
3765      if (GET_CODE (temp) != REG
3766	  && ! (GET_CODE (temp) == SUBREG
3767		&& GET_CODE (SUBREG_REG (temp)) == REG)
3768	  && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
3769	{
3770	  rtx temp1 = simplify_shift_const
3771	    (NULL_RTX, ASHIFTRT, mode,
3772	     simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3773				   GET_MODE_BITSIZE (mode) - 1 - i),
3774	     GET_MODE_BITSIZE (mode) - 1 - i);
3775
3776	  /* If all we did was surround TEMP with the two shifts, we
3777	     haven't improved anything, so don't use it.  Otherwise,
3778	     we are better off with TEMP1.  */
3779	  if (GET_CODE (temp1) != ASHIFTRT
3780	      || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3781	      || XEXP (XEXP (temp1, 0), 0) != temp)
3782	    return temp1;
3783	}
3784      break;
3785
3786    case TRUNCATE:
3787      /* We can't handle truncation to a partial integer mode here
3788	 because we don't know the real bitsize of the partial
3789	 integer mode.  */
3790      if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3791	break;
3792
3793      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3794	  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3795				    GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
3796	SUBST (XEXP (x, 0),
3797	       force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3798			      GET_MODE_MASK (mode), NULL_RTX, 0));
3799
3800      /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI.  */
3801      if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3802	   || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3803	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3804	return XEXP (XEXP (x, 0), 0);
3805
3806      /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3807	 (OP:SI foo:SI) if OP is NEG or ABS.  */
3808      if ((GET_CODE (XEXP (x, 0)) == ABS
3809	   || GET_CODE (XEXP (x, 0)) == NEG)
3810	  && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3811	      || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
3812	  && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3813	return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3814			  XEXP (XEXP (XEXP (x, 0), 0), 0));
3815
3816      /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
3817	 (truncate:SI x).  */
3818      if (GET_CODE (XEXP (x, 0)) == SUBREG
3819	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
3820	  && subreg_lowpart_p (XEXP (x, 0)))
3821	return SUBREG_REG (XEXP (x, 0));
3822
3823      /* If we know that the value is already truncated, we can
3824         replace the TRUNCATE with a SUBREG.  */
3825      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3826	  >= GET_MODE_BITSIZE (mode) + 1)
3827	return gen_lowpart_for_combine (mode, XEXP (x, 0));
3828
3829      /* A truncate of a comparison can be replaced with a subreg if
3830         STORE_FLAG_VALUE permits.  This is like the previous test,
3831         but it works even if the comparison is done in a mode larger
3832         than HOST_BITS_PER_WIDE_INT.  */
3833      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3834	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3835	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0)
3836	return gen_lowpart_for_combine (mode, XEXP (x, 0));
3837
3838      /* Similarly, a truncate of a register whose value is a
3839         comparison can be replaced with a subreg if STORE_FLAG_VALUE
3840         permits.  */
3841      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3842	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0
3843	  && (temp = get_last_value (XEXP (x, 0)))
3844	  && GET_RTX_CLASS (GET_CODE (temp)) == '<')
3845	return gen_lowpart_for_combine (mode, XEXP (x, 0));
3846
3847      break;
3848
3849    case FLOAT_TRUNCATE:
3850      /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF.  */
3851      if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3852	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3853 	return XEXP (XEXP (x, 0), 0);
3854
3855      /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3856	 (OP:SF foo:SF) if OP is NEG or ABS.  */
3857      if ((GET_CODE (XEXP (x, 0)) == ABS
3858	   || GET_CODE (XEXP (x, 0)) == NEG)
3859	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3860	  && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3861	return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3862			  XEXP (XEXP (XEXP (x, 0), 0), 0));
3863
3864      /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3865	 is (float_truncate:SF x).  */
3866      if (GET_CODE (XEXP (x, 0)) == SUBREG
3867	  && subreg_lowpart_p (XEXP (x, 0))
3868	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3869	return SUBREG_REG (XEXP (x, 0));
3870      break;
3871
3872#ifdef HAVE_cc0
3873    case COMPARE:
3874      /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3875	 using cc0, in which case we want to leave it as a COMPARE
3876	 so we can distinguish it from a register-register-copy.  */
3877      if (XEXP (x, 1) == const0_rtx)
3878	return XEXP (x, 0);
3879
3880      /* In IEEE floating point, x-0 is not the same as x.  */
3881      if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3882	   || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3883	   || flag_fast_math)
3884	  && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3885	return XEXP (x, 0);
3886      break;
3887#endif
3888
3889    case CONST:
3890      /* (const (const X)) can become (const X).  Do it this way rather than
3891	 returning the inner CONST since CONST can be shared with a
3892	 REG_EQUAL note.  */
3893      if (GET_CODE (XEXP (x, 0)) == CONST)
3894	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3895      break;
3896
3897#ifdef HAVE_lo_sum
3898    case LO_SUM:
3899      /* Convert (lo_sum (high FOO) FOO) to FOO.  This is necessary so we
3900	 can add in an offset.  find_split_point will split this address up
3901	 again if it doesn't match.  */
3902      if (GET_CODE (XEXP (x, 0)) == HIGH
3903	  && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3904	return XEXP (x, 1);
3905      break;
3906#endif
3907
3908    case PLUS:
3909      /* If we have (plus (plus (A const) B)), associate it so that CONST is
3910	 outermost.  That's because that's the way indexed addresses are
3911	 supposed to appear.  This code used to check many more cases, but
3912	 they are now checked elsewhere.  */
3913      if (GET_CODE (XEXP (x, 0)) == PLUS
3914	  && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3915	return gen_binary (PLUS, mode,
3916			   gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3917				       XEXP (x, 1)),
3918			   XEXP (XEXP (x, 0), 1));
3919
3920      /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3921	 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3922	 bit-field and can be replaced by either a sign_extend or a
3923	 sign_extract.  The `and' may be a zero_extend.  */
3924      if (GET_CODE (XEXP (x, 0)) == XOR
3925	  && GET_CODE (XEXP (x, 1)) == CONST_INT
3926	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3927	  && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3928	  && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3929	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3930	  && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3931	       && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3932	       && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
3933		   == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
3934	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3935		  && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3936		      == i + 1))))
3937	return simplify_shift_const
3938	  (NULL_RTX, ASHIFTRT, mode,
3939	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
3940				 XEXP (XEXP (XEXP (x, 0), 0), 0),
3941				 GET_MODE_BITSIZE (mode) - (i + 1)),
3942	   GET_MODE_BITSIZE (mode) - (i + 1));
3943
3944      /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3945	 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3946	 is 1.  This produces better code than the alternative immediately
3947	 below.  */
3948      if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3949	  && reversible_comparison_p (XEXP (x, 0))
3950	  && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3951	      || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
3952	return
3953	  gen_unary (NEG, mode, mode,
3954		     gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3955				 mode, XEXP (XEXP (x, 0), 0),
3956				 XEXP (XEXP (x, 0), 1)));
3957
3958      /* If only the low-order bit of X is possibly nonzero, (plus x -1)
3959	 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3960	 the bitsize of the mode - 1.  This allows simplification of
3961	 "a = (b & 8) == 0;"  */
3962      if (XEXP (x, 1) == constm1_rtx
3963	  && GET_CODE (XEXP (x, 0)) != REG
3964	  && ! (GET_CODE (XEXP (x,0)) == SUBREG
3965		&& GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3966	  && nonzero_bits (XEXP (x, 0), mode) == 1)
3967	return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
3968	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
3969				 gen_rtx_combine (XOR, mode,
3970						  XEXP (x, 0), const1_rtx),
3971				 GET_MODE_BITSIZE (mode) - 1),
3972	   GET_MODE_BITSIZE (mode) - 1);
3973
3974      /* If we are adding two things that have no bits in common, convert
3975	 the addition into an IOR.  This will often be further simplified,
3976	 for example in cases like ((a & 1) + (a & 2)), which can
3977	 become a & 3.  */
3978
3979      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3980	  && (nonzero_bits (XEXP (x, 0), mode)
3981	      & nonzero_bits (XEXP (x, 1), mode)) == 0)
3982	return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3983      break;
3984
3985    case MINUS:
3986      /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
3987	 by reversing the comparison code if valid.  */
3988      if (STORE_FLAG_VALUE == 1
3989	  && XEXP (x, 0) == const1_rtx
3990	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3991	  && reversible_comparison_p (XEXP (x, 1)))
3992	return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3993			   mode, XEXP (XEXP (x, 1), 0),
3994				XEXP (XEXP (x, 1), 1));
3995
3996      /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3997	 (and <foo> (const_int pow2-1))  */
3998      if (GET_CODE (XEXP (x, 1)) == AND
3999	  && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4000	  && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4001	  && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4002	return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4003				       - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4004
4005      /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4006	 integers.  */
4007      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
4008	return gen_binary (MINUS, mode,
4009			   gen_binary (MINUS, mode, XEXP (x, 0),
4010				       XEXP (XEXP (x, 1), 0)),
4011			   XEXP (XEXP (x, 1), 1));
4012      break;
4013
4014    case MULT:
4015      /* If we have (mult (plus A B) C), apply the distributive law and then
4016	 the inverse distributive law to see if things simplify.  This
4017	 occurs mostly in addresses, often when unrolling loops.  */
4018
4019      if (GET_CODE (XEXP (x, 0)) == PLUS)
4020	{
4021	  x = apply_distributive_law
4022	    (gen_binary (PLUS, mode,
4023			 gen_binary (MULT, mode,
4024				     XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4025			 gen_binary (MULT, mode,
4026				     XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4027
4028	  if (GET_CODE (x) != MULT)
4029	    return x;
4030	}
4031      break;
4032
4033    case UDIV:
4034      /* If this is a divide by a power of two, treat it as a shift if
4035	 its first operand is a shift.  */
4036      if (GET_CODE (XEXP (x, 1)) == CONST_INT
4037	  && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4038	  && (GET_CODE (XEXP (x, 0)) == ASHIFT
4039	      || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4040	      || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4041	      || GET_CODE (XEXP (x, 0)) == ROTATE
4042	      || GET_CODE (XEXP (x, 0)) == ROTATERT))
4043	return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4044      break;
4045
4046    case EQ:  case NE:
4047    case GT:  case GTU:  case GE:  case GEU:
4048    case LT:  case LTU:  case LE:  case LEU:
4049      /* If the first operand is a condition code, we can't do anything
4050	 with it.  */
4051      if (GET_CODE (XEXP (x, 0)) == COMPARE
4052	  || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4053#ifdef HAVE_cc0
4054	      && XEXP (x, 0) != cc0_rtx
4055#endif
4056	       ))
4057	{
4058	  rtx op0 = XEXP (x, 0);
4059	  rtx op1 = XEXP (x, 1);
4060	  enum rtx_code new_code;
4061
4062	  if (GET_CODE (op0) == COMPARE)
4063	    op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4064
4065	  /* Simplify our comparison, if possible.  */
4066	  new_code = simplify_comparison (code, &op0, &op1);
4067
4068	  /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4069	     if only the low-order bit is possibly nonzero in X (such as when
4070	     X is a ZERO_EXTRACT of one bit).  Similarly, we can convert EQ to
4071	     (xor X 1) or (minus 1 X); we use the former.  Finally, if X is
4072	     known to be either 0 or -1, NE becomes a NEG and EQ becomes
4073	     (plus X 1).
4074
4075	     Remove any ZERO_EXTRACT we made when thinking this was a
4076	     comparison.  It may now be simpler to use, e.g., an AND.  If a
4077	     ZERO_EXTRACT is indeed appropriate, it will be placed back by
4078	     the call to make_compound_operation in the SET case.  */
4079
4080	  if (STORE_FLAG_VALUE == 1
4081	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4082	      && op1 == const0_rtx && nonzero_bits (op0, mode) == 1)
4083	    return gen_lowpart_for_combine (mode,
4084					    expand_compound_operation (op0));
4085
4086	  else if (STORE_FLAG_VALUE == 1
4087		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4088		   && op1 == const0_rtx
4089		   && (num_sign_bit_copies (op0, mode)
4090		       == GET_MODE_BITSIZE (mode)))
4091	    {
4092	      op0 = expand_compound_operation (op0);
4093	      return gen_unary (NEG, mode, mode,
4094				gen_lowpart_for_combine (mode, op0));
4095	    }
4096
4097	  else if (STORE_FLAG_VALUE == 1
4098		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4099		   && op1 == const0_rtx
4100		   && nonzero_bits (op0, mode) == 1)
4101	    {
4102	      op0 = expand_compound_operation (op0);
4103	      return gen_binary (XOR, mode,
4104				 gen_lowpart_for_combine (mode, op0),
4105				 const1_rtx);
4106	    }
4107
4108	  else if (STORE_FLAG_VALUE == 1
4109		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4110		   && op1 == const0_rtx
4111		   && (num_sign_bit_copies (op0, mode)
4112		       == GET_MODE_BITSIZE (mode)))
4113	    {
4114	      op0 = expand_compound_operation (op0);
4115	      return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
4116	    }
4117
4118	  /* If STORE_FLAG_VALUE is -1, we have cases similar to
4119	     those above.  */
4120	  if (STORE_FLAG_VALUE == -1
4121	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4122	      && op1 == const0_rtx
4123	      && (num_sign_bit_copies (op0, mode)
4124		  == GET_MODE_BITSIZE (mode)))
4125	    return gen_lowpart_for_combine (mode,
4126					    expand_compound_operation (op0));
4127
4128	  else if (STORE_FLAG_VALUE == -1
4129		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4130		   && op1 == const0_rtx
4131		   && nonzero_bits (op0, mode) == 1)
4132	    {
4133	      op0 = expand_compound_operation (op0);
4134	      return gen_unary (NEG, mode, mode,
4135				gen_lowpart_for_combine (mode, op0));
4136	    }
4137
4138	  else if (STORE_FLAG_VALUE == -1
4139		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4140		   && op1 == const0_rtx
4141		   && (num_sign_bit_copies (op0, mode)
4142		       == GET_MODE_BITSIZE (mode)))
4143	    {
4144	      op0 = expand_compound_operation (op0);
4145	      return gen_unary (NOT, mode, mode,
4146				gen_lowpart_for_combine (mode, op0));
4147	    }
4148
4149	  /* If X is 0/1, (eq X 0) is X-1.  */
4150	  else if (STORE_FLAG_VALUE == -1
4151		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4152		   && op1 == const0_rtx
4153		   && nonzero_bits (op0, mode) == 1)
4154	    {
4155	      op0 = expand_compound_operation (op0);
4156	      return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
4157	    }
4158
4159	  /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4160	     one bit that might be nonzero, we can convert (ne x 0) to
4161	     (ashift x c) where C puts the bit in the sign bit.  Remove any
4162	     AND with STORE_FLAG_VALUE when we are done, since we are only
4163	     going to test the sign bit.  */
4164	  if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4165	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4166	      && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4167		  == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4168	      && op1 == const0_rtx
4169	      && mode == GET_MODE (op0)
4170	      && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4171	    {
4172	      x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4173					expand_compound_operation (op0),
4174					GET_MODE_BITSIZE (mode) - 1 - i);
4175	      if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4176		return XEXP (x, 0);
4177	      else
4178		return x;
4179	    }
4180
4181	  /* If the code changed, return a whole new comparison.  */
4182	  if (new_code != code)
4183	    return gen_rtx_combine (new_code, mode, op0, op1);
4184
4185	  /* Otherwise, keep this operation, but maybe change its operands.
4186	     This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR).  */
4187	  SUBST (XEXP (x, 0), op0);
4188	  SUBST (XEXP (x, 1), op1);
4189	}
4190      break;
4191
4192    case IF_THEN_ELSE:
4193      return simplify_if_then_else (x);
4194
4195    case ZERO_EXTRACT:
4196    case SIGN_EXTRACT:
4197    case ZERO_EXTEND:
4198    case SIGN_EXTEND:
4199      /* If we are processing SET_DEST, we are done.  */
4200      if (in_dest)
4201	return x;
4202
4203      return expand_compound_operation (x);
4204
4205    case SET:
4206      return simplify_set (x);
4207
4208    case AND:
4209    case IOR:
4210    case XOR:
4211      return simplify_logical (x, last);
4212
4213    case ABS:
4214      /* (abs (neg <foo>)) -> (abs <foo>) */
4215      if (GET_CODE (XEXP (x, 0)) == NEG)
4216	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4217
4218      /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4219         do nothing.  */
4220      if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4221	break;
4222
4223      /* If operand is something known to be positive, ignore the ABS.  */
4224      if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4225	  || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4226	       <= HOST_BITS_PER_WIDE_INT)
4227	      && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4228		   & ((HOST_WIDE_INT) 1
4229		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4230		  == 0)))
4231	return XEXP (x, 0);
4232
4233
4234      /* If operand is known to be only -1 or 0, convert ABS to NEG.  */
4235      if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4236	return gen_rtx_combine (NEG, mode, XEXP (x, 0));
4237
4238      break;
4239
4240    case FFS:
4241      /* (ffs (*_extend <X>)) = (ffs <X>) */
4242      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4243	  || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4244	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4245      break;
4246
4247    case FLOAT:
4248      /* (float (sign_extend <X>)) = (float <X>).  */
4249      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4250	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4251      break;
4252
4253    case ASHIFT:
4254    case LSHIFTRT:
4255    case ASHIFTRT:
4256    case ROTATE:
4257    case ROTATERT:
4258      /* If this is a shift by a constant amount, simplify it.  */
4259      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4260	return simplify_shift_const (x, code, mode, XEXP (x, 0),
4261				     INTVAL (XEXP (x, 1)));
4262
4263#ifdef SHIFT_COUNT_TRUNCATED
4264      else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4265	SUBST (XEXP (x, 1),
4266	       force_to_mode (XEXP (x, 1), GET_MODE (x),
4267			      ((HOST_WIDE_INT) 1
4268			       << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4269			      - 1,
4270			      NULL_RTX, 0));
4271#endif
4272
4273      break;
4274
4275    default:
4276      break;
4277    }
4278
4279  return x;
4280}
4281
4282/* Simplify X, an IF_THEN_ELSE expression.  Return the new expression.  */
4283
4284static rtx
4285simplify_if_then_else (x)
4286     rtx x;
4287{
4288  enum machine_mode mode = GET_MODE (x);
4289  rtx cond = XEXP (x, 0);
4290  rtx true = XEXP (x, 1);
4291  rtx false = XEXP (x, 2);
4292  enum rtx_code true_code = GET_CODE (cond);
4293  int comparison_p = GET_RTX_CLASS (true_code) == '<';
4294  rtx temp;
4295  int i;
4296
4297  /* Simplify storing of the truth value.  */
4298  if (comparison_p && true == const_true_rtx && false == const0_rtx)
4299    return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4300
4301  /* Also when the truth value has to be reversed.  */
4302  if (comparison_p && reversible_comparison_p (cond)
4303      && true == const0_rtx && false == const_true_rtx)
4304    return gen_binary (reverse_condition (true_code),
4305		       mode, XEXP (cond, 0), XEXP (cond, 1));
4306
4307  /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4308     in it is being compared against certain values.  Get the true and false
4309     comparisons and see if that says anything about the value of each arm.  */
4310
4311  if (comparison_p && reversible_comparison_p (cond)
4312      && GET_CODE (XEXP (cond, 0)) == REG)
4313    {
4314      HOST_WIDE_INT nzb;
4315      rtx from = XEXP (cond, 0);
4316      enum rtx_code false_code = reverse_condition (true_code);
4317      rtx true_val = XEXP (cond, 1);
4318      rtx false_val = true_val;
4319      int swapped = 0;
4320
4321      /* If FALSE_CODE is EQ, swap the codes and arms.  */
4322
4323      if (false_code == EQ)
4324	{
4325	  swapped = 1, true_code = EQ, false_code = NE;
4326	  temp = true, true = false, false = temp;
4327	}
4328
4329      /* If we are comparing against zero and the expression being tested has
4330	 only a single bit that might be nonzero, that is its value when it is
4331	 not equal to zero.  Similarly if it is known to be -1 or 0.  */
4332
4333      if (true_code == EQ && true_val == const0_rtx
4334	  && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4335	false_code = EQ, false_val = GEN_INT (nzb);
4336      else if (true_code == EQ && true_val == const0_rtx
4337	       && (num_sign_bit_copies (from, GET_MODE (from))
4338		   == GET_MODE_BITSIZE (GET_MODE (from))))
4339	false_code = EQ, false_val = constm1_rtx;
4340
4341      /* Now simplify an arm if we know the value of the register in the
4342	 branch and it is used in the arm.  Be careful due to the potential
4343	 of locally-shared RTL.  */
4344
4345      if (reg_mentioned_p (from, true))
4346	true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
4347		      pc_rtx, pc_rtx, 0, 0);
4348      if (reg_mentioned_p (from, false))
4349	false = subst (known_cond (copy_rtx (false), false_code,
4350				   from, false_val),
4351		       pc_rtx, pc_rtx, 0, 0);
4352
4353      SUBST (XEXP (x, 1), swapped ? false : true);
4354      SUBST (XEXP (x, 2), swapped ? true : false);
4355
4356      true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
4357    }
4358
4359  /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4360     reversed, do so to avoid needing two sets of patterns for
4361     subtract-and-branch insns.  Similarly if we have a constant in the true
4362     arm, the false arm is the same as the first operand of the comparison, or
4363     the false arm is more complicated than the true arm.  */
4364
4365  if (comparison_p && reversible_comparison_p (cond)
4366      && (true == pc_rtx
4367	  || (CONSTANT_P (true)
4368	      && GET_CODE (false) != CONST_INT && false != pc_rtx)
4369	  || true == const0_rtx
4370	  || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4371	      && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4372	  || (GET_CODE (true) == SUBREG
4373	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4374	      && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4375	  || reg_mentioned_p (true, false)
4376	  || rtx_equal_p (false, XEXP (cond, 0))))
4377    {
4378      true_code = reverse_condition (true_code);
4379      SUBST (XEXP (x, 0),
4380	     gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4381			 XEXP (cond, 1)));
4382
4383      SUBST (XEXP (x, 1), false);
4384      SUBST (XEXP (x, 2), true);
4385
4386      temp = true, true = false, false = temp, cond = XEXP (x, 0);
4387
4388      /* It is possible that the conditional has been simplified out.  */
4389      true_code = GET_CODE (cond);
4390      comparison_p = GET_RTX_CLASS (true_code) == '<';
4391    }
4392
4393  /* If the two arms are identical, we don't need the comparison.  */
4394
4395  if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4396    return true;
4397
4398  /* Convert a == b ? b : a to "a".  */
4399  if (true_code == EQ && ! side_effects_p (cond)
4400      && rtx_equal_p (XEXP (cond, 0), false)
4401      && rtx_equal_p (XEXP (cond, 1), true))
4402    return false;
4403  else if (true_code == NE && ! side_effects_p (cond)
4404	   && rtx_equal_p (XEXP (cond, 0), true)
4405	   && rtx_equal_p (XEXP (cond, 1), false))
4406    return true;
4407
4408  /* Look for cases where we have (abs x) or (neg (abs X)).  */
4409
4410  if (GET_MODE_CLASS (mode) == MODE_INT
4411      && GET_CODE (false) == NEG
4412      && rtx_equal_p (true, XEXP (false, 0))
4413      && comparison_p
4414      && rtx_equal_p (true, XEXP (cond, 0))
4415      && ! side_effects_p (true))
4416    switch (true_code)
4417      {
4418      case GT:
4419      case GE:
4420	return gen_unary (ABS, mode, mode, true);
4421      case LT:
4422      case LE:
4423	return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
4424    default:
4425      break;
4426      }
4427
4428  /* Look for MIN or MAX.  */
4429
4430  if ((! FLOAT_MODE_P (mode) || flag_fast_math)
4431      && comparison_p
4432      && rtx_equal_p (XEXP (cond, 0), true)
4433      && rtx_equal_p (XEXP (cond, 1), false)
4434      && ! side_effects_p (cond))
4435    switch (true_code)
4436      {
4437      case GE:
4438      case GT:
4439	return gen_binary (SMAX, mode, true, false);
4440      case LE:
4441      case LT:
4442	return gen_binary (SMIN, mode, true, false);
4443      case GEU:
4444      case GTU:
4445	return gen_binary (UMAX, mode, true, false);
4446      case LEU:
4447      case LTU:
4448	return gen_binary (UMIN, mode, true, false);
4449      default:
4450	break;
4451      }
4452
4453  /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4454     second operand is zero, this can be done as (OP Z (mult COND C2)) where
4455     C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4456     SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4457     We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4458     neither 1 or -1, but it isn't worth checking for.  */
4459
4460  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4461      && comparison_p && mode != VOIDmode && ! side_effects_p (x))
4462    {
4463      rtx t = make_compound_operation (true, SET);
4464      rtx f = make_compound_operation (false, SET);
4465      rtx cond_op0 = XEXP (cond, 0);
4466      rtx cond_op1 = XEXP (cond, 1);
4467      enum rtx_code op, extend_op = NIL;
4468      enum machine_mode m = mode;
4469      rtx z = 0, c1;
4470
4471      if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4472	   || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4473	   || GET_CODE (t) == ASHIFT
4474	   || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4475	  && rtx_equal_p (XEXP (t, 0), f))
4476	c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4477
4478      /* If an identity-zero op is commutative, check whether there
4479	 would be a match if we swapped the operands.  */
4480      else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4481		|| GET_CODE (t) == XOR)
4482	       && rtx_equal_p (XEXP (t, 1), f))
4483	c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4484      else if (GET_CODE (t) == SIGN_EXTEND
4485	       && (GET_CODE (XEXP (t, 0)) == PLUS
4486		   || GET_CODE (XEXP (t, 0)) == MINUS
4487		   || GET_CODE (XEXP (t, 0)) == IOR
4488		   || GET_CODE (XEXP (t, 0)) == XOR
4489		   || GET_CODE (XEXP (t, 0)) == ASHIFT
4490		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4491		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4492	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4493	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4494	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4495	       && (num_sign_bit_copies (f, GET_MODE (f))
4496		   > (GET_MODE_BITSIZE (mode)
4497		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4498	{
4499	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4500	  extend_op = SIGN_EXTEND;
4501	  m = GET_MODE (XEXP (t, 0));
4502	}
4503      else if (GET_CODE (t) == SIGN_EXTEND
4504	       && (GET_CODE (XEXP (t, 0)) == PLUS
4505		   || GET_CODE (XEXP (t, 0)) == IOR
4506		   || GET_CODE (XEXP (t, 0)) == XOR)
4507	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4508	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4509	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4510	       && (num_sign_bit_copies (f, GET_MODE (f))
4511		   > (GET_MODE_BITSIZE (mode)
4512		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4513	{
4514	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4515	  extend_op = SIGN_EXTEND;
4516	  m = GET_MODE (XEXP (t, 0));
4517	}
4518      else if (GET_CODE (t) == ZERO_EXTEND
4519	       && (GET_CODE (XEXP (t, 0)) == PLUS
4520		   || GET_CODE (XEXP (t, 0)) == MINUS
4521		   || GET_CODE (XEXP (t, 0)) == IOR
4522		   || GET_CODE (XEXP (t, 0)) == XOR
4523		   || GET_CODE (XEXP (t, 0)) == ASHIFT
4524		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4525		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4526	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4527	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4528	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4529	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4530	       && ((nonzero_bits (f, GET_MODE (f))
4531		    & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4532		   == 0))
4533	{
4534	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4535	  extend_op = ZERO_EXTEND;
4536	  m = GET_MODE (XEXP (t, 0));
4537	}
4538      else if (GET_CODE (t) == ZERO_EXTEND
4539	       && (GET_CODE (XEXP (t, 0)) == PLUS
4540		   || GET_CODE (XEXP (t, 0)) == IOR
4541		   || GET_CODE (XEXP (t, 0)) == XOR)
4542	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4543	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4544	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4545	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4546	       && ((nonzero_bits (f, GET_MODE (f))
4547		    & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4548		   == 0))
4549	{
4550	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4551	  extend_op = ZERO_EXTEND;
4552	  m = GET_MODE (XEXP (t, 0));
4553	}
4554
4555      if (z)
4556	{
4557	  temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4558			pc_rtx, pc_rtx, 0, 0);
4559	  temp = gen_binary (MULT, m, temp,
4560			     gen_binary (MULT, m, c1, const_true_rtx));
4561	  temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4562	  temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4563
4564	  if (extend_op != NIL)
4565	    temp = gen_unary (extend_op, mode, m, temp);
4566
4567	  return temp;
4568	}
4569    }
4570
4571  /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4572     1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4573     negation of a single bit, we can convert this operation to a shift.  We
4574     can actually do this more generally, but it doesn't seem worth it.  */
4575
4576  if (true_code == NE && XEXP (cond, 1) == const0_rtx
4577      && false == const0_rtx && GET_CODE (true) == CONST_INT
4578      && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4579	   && (i = exact_log2 (INTVAL (true))) >= 0)
4580	  || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4581	       == GET_MODE_BITSIZE (mode))
4582	      && (i = exact_log2 (- INTVAL (true))) >= 0)))
4583    return
4584      simplify_shift_const (NULL_RTX, ASHIFT, mode,
4585			    gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
4586
4587  return x;
4588}
4589
4590/* Simplify X, a SET expression.  Return the new expression.  */
4591
4592static rtx
4593simplify_set (x)
4594     rtx x;
4595{
4596  rtx src = SET_SRC (x);
4597  rtx dest = SET_DEST (x);
4598  enum machine_mode mode
4599    = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4600  rtx other_insn;
4601  rtx *cc_use;
4602
4603  /* (set (pc) (return)) gets written as (return).  */
4604  if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4605    return src;
4606
4607  /* Now that we know for sure which bits of SRC we are using, see if we can
4608     simplify the expression for the object knowing that we only need the
4609     low-order bits.  */
4610
4611  if (GET_MODE_CLASS (mode) == MODE_INT)
4612    src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4613
4614  /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4615     the comparison result and try to simplify it unless we already have used
4616     undobuf.other_insn.  */
4617  if ((GET_CODE (src) == COMPARE
4618#ifdef HAVE_cc0
4619       || dest == cc0_rtx
4620#endif
4621       )
4622      && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4623      && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4624      && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4625      && rtx_equal_p (XEXP (*cc_use, 0), dest))
4626    {
4627      enum rtx_code old_code = GET_CODE (*cc_use);
4628      enum rtx_code new_code;
4629      rtx op0, op1;
4630      int other_changed = 0;
4631      enum machine_mode compare_mode = GET_MODE (dest);
4632
4633      if (GET_CODE (src) == COMPARE)
4634	op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4635      else
4636	op0 = src, op1 = const0_rtx;
4637
4638      /* Simplify our comparison, if possible.  */
4639      new_code = simplify_comparison (old_code, &op0, &op1);
4640
4641#ifdef EXTRA_CC_MODES
4642      /* If this machine has CC modes other than CCmode, check to see if we
4643	 need to use a different CC mode here.  */
4644      compare_mode = SELECT_CC_MODE (new_code, op0, op1);
4645#endif /* EXTRA_CC_MODES */
4646
4647#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
4648      /* If the mode changed, we have to change SET_DEST, the mode in the
4649	 compare, and the mode in the place SET_DEST is used.  If SET_DEST is
4650	 a hard register, just build new versions with the proper mode.  If it
4651	 is a pseudo, we lose unless it is only time we set the pseudo, in
4652	 which case we can safely change its mode.  */
4653      if (compare_mode != GET_MODE (dest))
4654	{
4655	  int regno = REGNO (dest);
4656	  rtx new_dest = gen_rtx_REG (compare_mode, regno);
4657
4658	  if (regno < FIRST_PSEUDO_REGISTER
4659	      || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
4660	    {
4661	      if (regno >= FIRST_PSEUDO_REGISTER)
4662		SUBST (regno_reg_rtx[regno], new_dest);
4663
4664	      SUBST (SET_DEST (x), new_dest);
4665	      SUBST (XEXP (*cc_use, 0), new_dest);
4666	      other_changed = 1;
4667
4668	      dest = new_dest;
4669	    }
4670	}
4671#endif
4672
4673      /* If the code changed, we have to build a new comparison in
4674	 undobuf.other_insn.  */
4675      if (new_code != old_code)
4676	{
4677	  unsigned HOST_WIDE_INT mask;
4678
4679	  SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4680					   dest, const0_rtx));
4681
4682	  /* If the only change we made was to change an EQ into an NE or
4683	     vice versa, OP0 has only one bit that might be nonzero, and OP1
4684	     is zero, check if changing the user of the condition code will
4685	     produce a valid insn.  If it won't, we can keep the original code
4686	     in that insn by surrounding our operation with an XOR.  */
4687
4688	  if (((old_code == NE && new_code == EQ)
4689	       || (old_code == EQ && new_code == NE))
4690	      && ! other_changed && op1 == const0_rtx
4691	      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4692	      && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
4693	    {
4694	      rtx pat = PATTERN (other_insn), note = 0;
4695	      int scratches;
4696
4697	      if ((recog_for_combine (&pat, other_insn, &note, &scratches) < 0
4698		   && ! check_asm_operands (pat)))
4699		{
4700		  PUT_CODE (*cc_use, old_code);
4701		  other_insn = 0;
4702
4703		  op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
4704		}
4705	    }
4706
4707	  other_changed = 1;
4708	}
4709
4710      if (other_changed)
4711	undobuf.other_insn = other_insn;
4712
4713#ifdef HAVE_cc0
4714      /* If we are now comparing against zero, change our source if
4715	 needed.  If we do not use cc0, we always have a COMPARE.  */
4716      if (op1 == const0_rtx && dest == cc0_rtx)
4717	{
4718	  SUBST (SET_SRC (x), op0);
4719	  src = op0;
4720	}
4721      else
4722#endif
4723
4724      /* Otherwise, if we didn't previously have a COMPARE in the
4725	 correct mode, we need one.  */
4726      if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4727	{
4728	  SUBST (SET_SRC (x),
4729		 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4730	  src = SET_SRC (x);
4731	}
4732      else
4733	{
4734	  /* Otherwise, update the COMPARE if needed.  */
4735	  SUBST (XEXP (src, 0), op0);
4736	  SUBST (XEXP (src, 1), op1);
4737	}
4738    }
4739  else
4740    {
4741      /* Get SET_SRC in a form where we have placed back any
4742	 compound expressions.  Then do the checks below.  */
4743      src = make_compound_operation (src, SET);
4744      SUBST (SET_SRC (x), src);
4745    }
4746
4747  /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4748     and X being a REG or (subreg (reg)), we may be able to convert this to
4749     (set (subreg:m2 x) (op)).
4750
4751     We can always do this if M1 is narrower than M2 because that means that
4752     we only care about the low bits of the result.
4753
4754     However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
4755     perform a narrower operation than requested since the high-order bits will
4756     be undefined.  On machine where it is defined, this transformation is safe
4757     as long as M1 and M2 have the same number of words.  */
4758
4759  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4760      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4761      && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4762	   / UNITS_PER_WORD)
4763	  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4764	       + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
4765#ifndef WORD_REGISTER_OPERATIONS
4766      && (GET_MODE_SIZE (GET_MODE (src))
4767	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4768#endif
4769#ifdef CLASS_CANNOT_CHANGE_SIZE
4770      && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
4771	    && (TEST_HARD_REG_BIT
4772		(reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
4773		 REGNO (dest)))
4774	    && (GET_MODE_SIZE (GET_MODE (src))
4775		!= GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4776#endif
4777      && (GET_CODE (dest) == REG
4778	  || (GET_CODE (dest) == SUBREG
4779	      && GET_CODE (SUBREG_REG (dest)) == REG)))
4780    {
4781      SUBST (SET_DEST (x),
4782	     gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4783				      dest));
4784      SUBST (SET_SRC (x), SUBREG_REG (src));
4785
4786      src = SET_SRC (x), dest = SET_DEST (x);
4787    }
4788
4789#ifdef LOAD_EXTEND_OP
4790  /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4791     would require a paradoxical subreg.  Replace the subreg with a
4792     zero_extend to avoid the reload that would otherwise be required.  */
4793
4794  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4795      && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4796      && SUBREG_WORD (src) == 0
4797      && (GET_MODE_SIZE (GET_MODE (src))
4798	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4799      && GET_CODE (SUBREG_REG (src)) == MEM)
4800    {
4801      SUBST (SET_SRC (x),
4802	     gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4803			      GET_MODE (src), XEXP (src, 0)));
4804
4805      src = SET_SRC (x);
4806    }
4807#endif
4808
4809  /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4810     are comparing an item known to be 0 or -1 against 0, use a logical
4811     operation instead. Check for one of the arms being an IOR of the other
4812     arm with some value.  We compute three terms to be IOR'ed together.  In
4813     practice, at most two will be nonzero.  Then we do the IOR's.  */
4814
4815  if (GET_CODE (dest) != PC
4816      && GET_CODE (src) == IF_THEN_ELSE
4817      && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
4818      && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4819      && XEXP (XEXP (src, 0), 1) == const0_rtx
4820      && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
4821#ifdef HAVE_conditional_move
4822      && ! can_conditionally_move_p (GET_MODE (src))
4823#endif
4824      && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4825			       GET_MODE (XEXP (XEXP (src, 0), 0)))
4826	  == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4827      && ! side_effects_p (src))
4828    {
4829      rtx true = (GET_CODE (XEXP (src, 0)) == NE
4830		      ? XEXP (src, 1) : XEXP (src, 2));
4831      rtx false = (GET_CODE (XEXP (src, 0)) == NE
4832		   ? XEXP (src, 2) : XEXP (src, 1));
4833      rtx term1 = const0_rtx, term2, term3;
4834
4835      if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4836	term1 = false, true = XEXP (true, 1), false = const0_rtx;
4837      else if (GET_CODE (true) == IOR
4838	       && rtx_equal_p (XEXP (true, 1), false))
4839	term1 = false, true = XEXP (true, 0), false = const0_rtx;
4840      else if (GET_CODE (false) == IOR
4841	       && rtx_equal_p (XEXP (false, 0), true))
4842	term1 = true, false = XEXP (false, 1), true = const0_rtx;
4843      else if (GET_CODE (false) == IOR
4844	       && rtx_equal_p (XEXP (false, 1), true))
4845	term1 = true, false = XEXP (false, 0), true = const0_rtx;
4846
4847      term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4848      term3 = gen_binary (AND, GET_MODE (src),
4849			  gen_unary (NOT, GET_MODE (src), GET_MODE (src),
4850				     XEXP (XEXP (src, 0), 0)),
4851			  false);
4852
4853      SUBST (SET_SRC (x),
4854	     gen_binary (IOR, GET_MODE (src),
4855			 gen_binary (IOR, GET_MODE (src), term1, term2),
4856			 term3));
4857
4858      src = SET_SRC (x);
4859    }
4860
4861  /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4862     whole thing fail.  */
4863  if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4864    return src;
4865  else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4866    return dest;
4867  else
4868    /* Convert this into a field assignment operation, if possible.  */
4869    return make_field_assignment (x);
4870}
4871
4872/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4873   result.  LAST is nonzero if this is the last retry.  */
4874
4875static rtx
4876simplify_logical (x, last)
4877     rtx x;
4878     int last;
4879{
4880  enum machine_mode mode = GET_MODE (x);
4881  rtx op0 = XEXP (x, 0);
4882  rtx op1 = XEXP (x, 1);
4883
4884  switch (GET_CODE (x))
4885    {
4886    case AND:
4887      /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4888	 insn (and may simplify more).  */
4889      if (GET_CODE (op0) == XOR
4890	  && rtx_equal_p (XEXP (op0, 0), op1)
4891	  && ! side_effects_p (op1))
4892	x = gen_binary (AND, mode,
4893			gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
4894
4895      if (GET_CODE (op0) == XOR
4896	  && rtx_equal_p (XEXP (op0, 1), op1)
4897	  && ! side_effects_p (op1))
4898	x = gen_binary (AND, mode,
4899			gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
4900
4901      /* Similarly for (~ (A ^ B)) & A.  */
4902      if (GET_CODE (op0) == NOT
4903	  && GET_CODE (XEXP (op0, 0)) == XOR
4904	  && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4905	  && ! side_effects_p (op1))
4906	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4907
4908      if (GET_CODE (op0) == NOT
4909	  && GET_CODE (XEXP (op0, 0)) == XOR
4910	  && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
4911	  && ! side_effects_p (op1))
4912	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4913
4914      if (GET_CODE (op1) == CONST_INT)
4915	{
4916	  x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
4917
4918	  /* If we have (ior (and (X C1) C2)) and the next restart would be
4919	     the last, simplify this by making C1 as small as possible
4920	     and then exit.  */
4921	  if (last
4922	      && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4923	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
4924	      && GET_CODE (op1) == CONST_INT)
4925	    return gen_binary (IOR, mode,
4926			       gen_binary (AND, mode, XEXP (op0, 0),
4927					   GEN_INT (INTVAL (XEXP (op0, 1))
4928						    & ~ INTVAL (op1))), op1);
4929
4930	  if (GET_CODE (x) != AND)
4931	    return x;
4932
4933	  if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
4934	      || GET_RTX_CLASS (GET_CODE (x)) == '2')
4935	    op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4936	}
4937
4938      /* Convert (A | B) & A to A.  */
4939      if (GET_CODE (op0) == IOR
4940	  && (rtx_equal_p (XEXP (op0, 0), op1)
4941	      || rtx_equal_p (XEXP (op0, 1), op1))
4942	  && ! side_effects_p (XEXP (op0, 0))
4943	  && ! side_effects_p (XEXP (op0, 1)))
4944	return op1;
4945
4946      /* In the following group of tests (and those in case IOR below),
4947	 we start with some combination of logical operations and apply
4948	 the distributive law followed by the inverse distributive law.
4949	 Most of the time, this results in no change.  However, if some of
4950	 the operands are the same or inverses of each other, simplifications
4951	 will result.
4952
4953	 For example, (and (ior A B) (not B)) can occur as the result of
4954	 expanding a bit field assignment.  When we apply the distributive
4955	 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4956	 which then simplifies to (and (A (not B))).
4957
4958	 If we have (and (ior A B) C), apply the distributive law and then
4959	 the inverse distributive law to see if things simplify.  */
4960
4961      if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
4962	{
4963	  x = apply_distributive_law
4964	    (gen_binary (GET_CODE (op0), mode,
4965			 gen_binary (AND, mode, XEXP (op0, 0), op1),
4966			 gen_binary (AND, mode, XEXP (op0, 1), op1)));
4967	  if (GET_CODE (x) != AND)
4968	    return x;
4969	}
4970
4971      if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
4972	return apply_distributive_law
4973	  (gen_binary (GET_CODE (op1), mode,
4974		       gen_binary (AND, mode, XEXP (op1, 0), op0),
4975		       gen_binary (AND, mode, XEXP (op1, 1), op0)));
4976
4977      /* Similarly, taking advantage of the fact that
4978	 (and (not A) (xor B C)) == (xor (ior A B) (ior A C))  */
4979
4980      if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
4981	return apply_distributive_law
4982	  (gen_binary (XOR, mode,
4983		       gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
4984		       gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
4985
4986      else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
4987	return apply_distributive_law
4988	  (gen_binary (XOR, mode,
4989		       gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
4990		       gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
4991      break;
4992
4993    case IOR:
4994      /* (ior A C) is C if all bits of A that might be nonzero are on in C.  */
4995      if (GET_CODE (op1) == CONST_INT
4996	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4997	  && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
4998	return op1;
4999
5000      /* Convert (A & B) | A to A.  */
5001      if (GET_CODE (op0) == AND
5002	  && (rtx_equal_p (XEXP (op0, 0), op1)
5003	      || rtx_equal_p (XEXP (op0, 1), op1))
5004	  && ! side_effects_p (XEXP (op0, 0))
5005	  && ! side_effects_p (XEXP (op0, 1)))
5006	return op1;
5007
5008      /* If we have (ior (and A B) C), apply the distributive law and then
5009	 the inverse distributive law to see if things simplify.  */
5010
5011      if (GET_CODE (op0) == AND)
5012	{
5013	  x = apply_distributive_law
5014	    (gen_binary (AND, mode,
5015			 gen_binary (IOR, mode, XEXP (op0, 0), op1),
5016			 gen_binary (IOR, mode, XEXP (op0, 1), op1)));
5017
5018	  if (GET_CODE (x) != IOR)
5019	    return x;
5020	}
5021
5022      if (GET_CODE (op1) == AND)
5023	{
5024	  x = apply_distributive_law
5025	    (gen_binary (AND, mode,
5026			 gen_binary (IOR, mode, XEXP (op1, 0), op0),
5027			 gen_binary (IOR, mode, XEXP (op1, 1), op0)));
5028
5029	  if (GET_CODE (x) != IOR)
5030	    return x;
5031	}
5032
5033      /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5034	 mode size to (rotate A CX).  */
5035
5036      if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5037	   || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5038	  && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5039	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5040	  && GET_CODE (XEXP (op1, 1)) == CONST_INT
5041	  && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
5042	      == GET_MODE_BITSIZE (mode)))
5043	return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5044			       (GET_CODE (op0) == ASHIFT
5045				? XEXP (op0, 1) : XEXP (op1, 1)));
5046
5047      /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5048	 a (sign_extend (plus ...)).  If so, OP1 is a CONST_INT, and the PLUS
5049	 does not affect any of the bits in OP1, it can really be done
5050	 as a PLUS and we can associate.  We do this by seeing if OP1
5051	 can be safely shifted left C bits.  */
5052      if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5053	  && GET_CODE (XEXP (op0, 0)) == PLUS
5054	  && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5055	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5056	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5057	{
5058	  int count = INTVAL (XEXP (op0, 1));
5059	  HOST_WIDE_INT mask = INTVAL (op1) << count;
5060
5061	  if (mask >> count == INTVAL (op1)
5062	      && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5063	    {
5064	      SUBST (XEXP (XEXP (op0, 0), 1),
5065		     GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5066	      return op0;
5067	    }
5068	}
5069      break;
5070
5071    case XOR:
5072      /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5073	 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5074	 (NOT y).  */
5075      {
5076	int num_negated = 0;
5077
5078	if (GET_CODE (op0) == NOT)
5079	  num_negated++, op0 = XEXP (op0, 0);
5080	if (GET_CODE (op1) == NOT)
5081	  num_negated++, op1 = XEXP (op1, 0);
5082
5083	if (num_negated == 2)
5084	  {
5085	    SUBST (XEXP (x, 0), op0);
5086	    SUBST (XEXP (x, 1), op1);
5087	  }
5088	else if (num_negated == 1)
5089	  return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
5090      }
5091
5092      /* Convert (xor (and A B) B) to (and (not A) B).  The latter may
5093	 correspond to a machine insn or result in further simplifications
5094	 if B is a constant.  */
5095
5096      if (GET_CODE (op0) == AND
5097	  && rtx_equal_p (XEXP (op0, 1), op1)
5098	  && ! side_effects_p (op1))
5099	return gen_binary (AND, mode,
5100			   gen_unary (NOT, mode, mode, XEXP (op0, 0)),
5101			   op1);
5102
5103      else if (GET_CODE (op0) == AND
5104	       && rtx_equal_p (XEXP (op0, 0), op1)
5105	       && ! side_effects_p (op1))
5106	return gen_binary (AND, mode,
5107			   gen_unary (NOT, mode, mode, XEXP (op0, 1)),
5108			   op1);
5109
5110      /* (xor (comparison foo bar) (const_int 1)) can become the reversed
5111	 comparison if STORE_FLAG_VALUE is 1.  */
5112      if (STORE_FLAG_VALUE == 1
5113	  && op1 == const1_rtx
5114	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5115	  && reversible_comparison_p (op0))
5116	return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5117				mode, XEXP (op0, 0), XEXP (op0, 1));
5118
5119      /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5120	 is (lt foo (const_int 0)), so we can perform the above
5121	 simplification if STORE_FLAG_VALUE is 1.  */
5122
5123      if (STORE_FLAG_VALUE == 1
5124	  && op1 == const1_rtx
5125	  && GET_CODE (op0) == LSHIFTRT
5126	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5127	  && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5128	return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
5129
5130      /* (xor (comparison foo bar) (const_int sign-bit))
5131	 when STORE_FLAG_VALUE is the sign bit.  */
5132      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5133	  && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5134	      == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5135	  && op1 == const_true_rtx
5136	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5137	  && reversible_comparison_p (op0))
5138	return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5139				mode, XEXP (op0, 0), XEXP (op0, 1));
5140      break;
5141
5142    default:
5143      abort ();
5144    }
5145
5146  return x;
5147}
5148
5149/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5150   operations" because they can be replaced with two more basic operations.
5151   ZERO_EXTEND is also considered "compound" because it can be replaced with
5152   an AND operation, which is simpler, though only one operation.
5153
5154   The function expand_compound_operation is called with an rtx expression
5155   and will convert it to the appropriate shifts and AND operations,
5156   simplifying at each stage.
5157
5158   The function make_compound_operation is called to convert an expression
5159   consisting of shifts and ANDs into the equivalent compound expression.
5160   It is the inverse of this function, loosely speaking.  */
5161
5162static rtx
5163expand_compound_operation (x)
5164     rtx x;
5165{
5166  int pos = 0, len;
5167  int unsignedp = 0;
5168  int modewidth;
5169  rtx tem;
5170
5171  switch (GET_CODE (x))
5172    {
5173    case ZERO_EXTEND:
5174      unsignedp = 1;
5175    case SIGN_EXTEND:
5176      /* We can't necessarily use a const_int for a multiword mode;
5177	 it depends on implicitly extending the value.
5178	 Since we don't know the right way to extend it,
5179	 we can't tell whether the implicit way is right.
5180
5181	 Even for a mode that is no wider than a const_int,
5182	 we can't win, because we need to sign extend one of its bits through
5183	 the rest of it, and we don't know which bit.  */
5184      if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5185	return x;
5186
5187      /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5188	 (zero_extend:MODE FROM) or (sign_extend:MODE FROM).  It is for any MEM
5189	 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5190	 reloaded. If not for that, MEM's would very rarely be safe.
5191
5192	 Reject MODEs bigger than a word, because we might not be able
5193	 to reference a two-register group starting with an arbitrary register
5194	 (and currently gen_lowpart might crash for a SUBREG).  */
5195
5196      if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5197	return x;
5198
5199      len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5200      /* If the inner object has VOIDmode (the only way this can happen
5201	 is if it is a ASM_OPERANDS), we can't do anything since we don't
5202	 know how much masking to do.  */
5203      if (len == 0)
5204	return x;
5205
5206      break;
5207
5208    case ZERO_EXTRACT:
5209      unsignedp = 1;
5210    case SIGN_EXTRACT:
5211      /* If the operand is a CLOBBER, just return it.  */
5212      if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5213	return XEXP (x, 0);
5214
5215      if (GET_CODE (XEXP (x, 1)) != CONST_INT
5216	  || GET_CODE (XEXP (x, 2)) != CONST_INT
5217	  || GET_MODE (XEXP (x, 0)) == VOIDmode)
5218	return x;
5219
5220      len = INTVAL (XEXP (x, 1));
5221      pos = INTVAL (XEXP (x, 2));
5222
5223      /* If this goes outside the object being extracted, replace the object
5224	 with a (use (mem ...)) construct that only combine understands
5225	 and is used only for this purpose.  */
5226      if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5227	SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
5228
5229      if (BITS_BIG_ENDIAN)
5230	pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5231
5232      break;
5233
5234    default:
5235      return x;
5236    }
5237
5238  /* We can optimize some special cases of ZERO_EXTEND.  */
5239  if (GET_CODE (x) == ZERO_EXTEND)
5240    {
5241      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5242         know that the last value didn't have any inappropriate bits
5243         set.  */
5244      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5245	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5246	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5247	  && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5248	      & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5249	return XEXP (XEXP (x, 0), 0);
5250
5251      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5252      if (GET_CODE (XEXP (x, 0)) == SUBREG
5253	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5254	  && subreg_lowpart_p (XEXP (x, 0))
5255	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5256	  && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5257	      & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5258	return SUBREG_REG (XEXP (x, 0));
5259
5260      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5261         is a comparison and STORE_FLAG_VALUE permits.  This is like
5262         the first case, but it works even when GET_MODE (x) is larger
5263         than HOST_WIDE_INT.  */
5264      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5265	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5266	  && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5267	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5268	      <= HOST_BITS_PER_WIDE_INT)
5269 	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5270	      & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5271	return XEXP (XEXP (x, 0), 0);
5272
5273      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5274      if (GET_CODE (XEXP (x, 0)) == SUBREG
5275	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5276	  && subreg_lowpart_p (XEXP (x, 0))
5277	  && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5278	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5279	      <= HOST_BITS_PER_WIDE_INT)
5280	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5281	      & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5282	return SUBREG_REG (XEXP (x, 0));
5283
5284      /* If sign extension is cheaper than zero extension, then use it
5285	 if we know that no extraneous bits are set, and that the high
5286	 bit is not set.  */
5287      if (flag_expensive_optimizations
5288	  && ((GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5289	       && ((nonzero_bits (XEXP (x, 0), GET_MODE (x))
5290		    & ~ (((unsigned HOST_WIDE_INT)
5291			  GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5292			 >> 1))
5293		   == 0))
5294	      || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5295		  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5296		      <= HOST_BITS_PER_WIDE_INT)
5297		  && (((HOST_WIDE_INT) STORE_FLAG_VALUE
5298		       & ~ (((unsigned HOST_WIDE_INT)
5299			     GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5300			    >> 1))
5301		      == 0))))
5302	{
5303	  rtx temp = gen_rtx_SIGN_EXTEND (GET_MODE (x), XEXP (x, 0));
5304
5305	  if (rtx_cost (temp, SET) < rtx_cost (x, SET))
5306	    return expand_compound_operation (temp);
5307	}
5308    }
5309
5310  /* If we reach here, we want to return a pair of shifts.  The inner
5311     shift is a left shift of BITSIZE - POS - LEN bits.  The outer
5312     shift is a right shift of BITSIZE - LEN bits.  It is arithmetic or
5313     logical depending on the value of UNSIGNEDP.
5314
5315     If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5316     converted into an AND of a shift.
5317
5318     We must check for the case where the left shift would have a negative
5319     count.  This can happen in a case like (x >> 31) & 255 on machines
5320     that can't shift by a constant.  On those machines, we would first
5321     combine the shift with the AND to produce a variable-position
5322     extraction.  Then the constant of 31 would be substituted in to produce
5323     a such a position.  */
5324
5325  modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5326  if (modewidth >= pos - len)
5327    tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
5328				GET_MODE (x),
5329				simplify_shift_const (NULL_RTX, ASHIFT,
5330						      GET_MODE (x),
5331						      XEXP (x, 0),
5332						      modewidth - pos - len),
5333				modewidth - len);
5334
5335  else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5336    tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5337				  simplify_shift_const (NULL_RTX, LSHIFTRT,
5338							GET_MODE (x),
5339							XEXP (x, 0), pos),
5340				  ((HOST_WIDE_INT) 1 << len) - 1);
5341  else
5342    /* Any other cases we can't handle.  */
5343    return x;
5344
5345
5346  /* If we couldn't do this for some reason, return the original
5347     expression.  */
5348  if (GET_CODE (tem) == CLOBBER)
5349    return x;
5350
5351  return tem;
5352}
5353
5354/* X is a SET which contains an assignment of one object into
5355   a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5356   or certain SUBREGS). If possible, convert it into a series of
5357   logical operations.
5358
5359   We half-heartedly support variable positions, but do not at all
5360   support variable lengths.  */
5361
5362static rtx
5363expand_field_assignment (x)
5364     rtx x;
5365{
5366  rtx inner;
5367  rtx pos;			/* Always counts from low bit.  */
5368  int len;
5369  rtx mask;
5370  enum machine_mode compute_mode;
5371
5372  /* Loop until we find something we can't simplify.  */
5373  while (1)
5374    {
5375      if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5376	  && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5377	{
5378	  inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5379	  len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
5380	  pos = GEN_INT (BITS_PER_WORD * SUBREG_WORD (XEXP (SET_DEST (x), 0)));
5381	}
5382      else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5383	       && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5384	{
5385	  inner = XEXP (SET_DEST (x), 0);
5386	  len = INTVAL (XEXP (SET_DEST (x), 1));
5387	  pos = XEXP (SET_DEST (x), 2);
5388
5389	  /* If the position is constant and spans the width of INNER,
5390	     surround INNER  with a USE to indicate this.  */
5391	  if (GET_CODE (pos) == CONST_INT
5392	      && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5393	    inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
5394
5395	  if (BITS_BIG_ENDIAN)
5396	    {
5397	      if (GET_CODE (pos) == CONST_INT)
5398		pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5399			       - INTVAL (pos));
5400	      else if (GET_CODE (pos) == MINUS
5401		       && GET_CODE (XEXP (pos, 1)) == CONST_INT
5402		       && (INTVAL (XEXP (pos, 1))
5403			   == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5404		/* If position is ADJUST - X, new position is X.  */
5405		pos = XEXP (pos, 0);
5406	      else
5407		pos = gen_binary (MINUS, GET_MODE (pos),
5408				  GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5409					   - len),
5410				  pos);
5411	    }
5412	}
5413
5414      /* A SUBREG between two modes that occupy the same numbers of words
5415	 can be done by moving the SUBREG to the source.  */
5416      else if (GET_CODE (SET_DEST (x)) == SUBREG
5417	       && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5418		     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5419		   == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5420			+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5421	{
5422	  x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
5423			   gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
5424						    SET_SRC (x)));
5425	  continue;
5426	}
5427      else
5428	break;
5429
5430      while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5431	inner = SUBREG_REG (inner);
5432
5433      compute_mode = GET_MODE (inner);
5434
5435      /* Compute a mask of LEN bits, if we can do this on the host machine.  */
5436      if (len < HOST_BITS_PER_WIDE_INT)
5437	mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
5438      else
5439	break;
5440
5441      /* Now compute the equivalent expression.  Make a copy of INNER
5442	 for the SET_DEST in case it is a MEM into which we will substitute;
5443	 we don't want shared RTL in that case.  */
5444      x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
5445		       gen_binary (IOR, compute_mode,
5446				   gen_binary (AND, compute_mode,
5447					       gen_unary (NOT, compute_mode,
5448							  compute_mode,
5449							  gen_binary (ASHIFT,
5450								      compute_mode,
5451								      mask, pos)),
5452					       inner),
5453				   gen_binary (ASHIFT, compute_mode,
5454					       gen_binary (AND, compute_mode,
5455							   gen_lowpart_for_combine
5456							   (compute_mode,
5457							    SET_SRC (x)),
5458							   mask),
5459					       pos)));
5460    }
5461
5462  return x;
5463}
5464
5465/* Return an RTX for a reference to LEN bits of INNER.  If POS_RTX is nonzero,
5466   it is an RTX that represents a variable starting position; otherwise,
5467   POS is the (constant) starting bit position (counted from the LSB).
5468
5469   INNER may be a USE.  This will occur when we started with a bitfield
5470   that went outside the boundary of the object in memory, which is
5471   allowed on most machines.  To isolate this case, we produce a USE
5472   whose mode is wide enough and surround the MEM with it.  The only
5473   code that understands the USE is this routine.  If it is not removed,
5474   it will cause the resulting insn not to match.
5475
5476   UNSIGNEDP is non-zero for an unsigned reference and zero for a
5477   signed reference.
5478
5479   IN_DEST is non-zero if this is a reference in the destination of a
5480   SET.  This is used when a ZERO_ or SIGN_EXTRACT isn't needed.  If non-zero,
5481   a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5482   be used.
5483
5484   IN_COMPARE is non-zero if we are in a COMPARE.  This means that a
5485   ZERO_EXTRACT should be built even for bits starting at bit 0.
5486
5487   MODE is the desired mode of the result (if IN_DEST == 0).
5488
5489   The result is an RTX for the extraction or NULL_RTX if the target
5490   can't handle it.  */
5491
5492static rtx
5493make_extraction (mode, inner, pos, pos_rtx, len,
5494		 unsignedp, in_dest, in_compare)
5495     enum machine_mode mode;
5496     rtx inner;
5497     int pos;
5498     rtx pos_rtx;
5499     int len;
5500     int unsignedp;
5501     int in_dest, in_compare;
5502{
5503  /* This mode describes the size of the storage area
5504     to fetch the overall value from.  Within that, we
5505     ignore the POS lowest bits, etc.  */
5506  enum machine_mode is_mode = GET_MODE (inner);
5507  enum machine_mode inner_mode;
5508  enum machine_mode wanted_inner_mode = byte_mode;
5509  enum machine_mode wanted_inner_reg_mode = word_mode;
5510  enum machine_mode pos_mode = word_mode;
5511  enum machine_mode extraction_mode = word_mode;
5512  enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5513  int spans_byte = 0;
5514  rtx new = 0;
5515  rtx orig_pos_rtx = pos_rtx;
5516  int orig_pos;
5517
5518  /* Get some information about INNER and get the innermost object.  */
5519  if (GET_CODE (inner) == USE)
5520    /* (use:SI (mem:QI foo)) stands for (mem:SI foo).  */
5521    /* We don't need to adjust the position because we set up the USE
5522       to pretend that it was a full-word object.  */
5523    spans_byte = 1, inner = XEXP (inner, 0);
5524  else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5525    {
5526      /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5527	 consider just the QI as the memory to extract from.
5528	 The subreg adds or removes high bits; its mode is
5529	 irrelevant to the meaning of this extraction,
5530	 since POS and LEN count from the lsb.  */
5531      if (GET_CODE (SUBREG_REG (inner)) == MEM)
5532	is_mode = GET_MODE (SUBREG_REG (inner));
5533      inner = SUBREG_REG (inner);
5534    }
5535
5536  inner_mode = GET_MODE (inner);
5537
5538  if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
5539    pos = INTVAL (pos_rtx), pos_rtx = 0;
5540
5541  /* See if this can be done without an extraction.  We never can if the
5542     width of the field is not the same as that of some integer mode. For
5543     registers, we can only avoid the extraction if the position is at the
5544     low-order bit and this is either not in the destination or we have the
5545     appropriate STRICT_LOW_PART operation available.
5546
5547     For MEM, we can avoid an extract if the field starts on an appropriate
5548     boundary and we can change the mode of the memory reference.  However,
5549     we cannot directly access the MEM if we have a USE and the underlying
5550     MEM is not TMODE.  This combination means that MEM was being used in a
5551     context where bits outside its mode were being referenced; that is only
5552     valid in bit-field insns.  */
5553
5554  if (tmode != BLKmode
5555      && ! (spans_byte && inner_mode != tmode)
5556      && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5557	   && GET_CODE (inner) != MEM
5558	   && (! in_dest
5559	       || (GET_CODE (inner) == REG
5560		   && (movstrict_optab->handlers[(int) tmode].insn_code
5561		       != CODE_FOR_nothing))))
5562	  || (GET_CODE (inner) == MEM && pos_rtx == 0
5563	      && (pos
5564		  % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5565		     : BITS_PER_UNIT)) == 0
5566	      /* We can't do this if we are widening INNER_MODE (it
5567		 may not be aligned, for one thing).  */
5568	      && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5569	      && (inner_mode == tmode
5570		  || (! mode_dependent_address_p (XEXP (inner, 0))
5571		      && ! MEM_VOLATILE_P (inner))))))
5572    {
5573      /* If INNER is a MEM, make a new MEM that encompasses just the desired
5574	 field.  If the original and current mode are the same, we need not
5575	 adjust the offset.  Otherwise, we do if bytes big endian.
5576
5577	 If INNER is not a MEM, get a piece consisting of just the field
5578	 of interest (in this case POS % BITS_PER_WORD must be 0).  */
5579
5580      if (GET_CODE (inner) == MEM)
5581	{
5582	  int offset;
5583	  /* POS counts from lsb, but make OFFSET count in memory order.  */
5584	  if (BYTES_BIG_ENDIAN)
5585	    offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5586	  else
5587	    offset = pos / BITS_PER_UNIT;
5588
5589	  new = gen_rtx_MEM (tmode, plus_constant (XEXP (inner, 0), offset));
5590	  RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5591	  MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5592	  MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5593	}
5594      else if (GET_CODE (inner) == REG)
5595	{
5596	  /* We can't call gen_lowpart_for_combine here since we always want
5597	     a SUBREG and it would sometimes return a new hard register.  */
5598	  if (tmode != inner_mode)
5599	    new = gen_rtx_SUBREG (tmode, inner,
5600				  (WORDS_BIG_ENDIAN
5601				   && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5602				   ? (((GET_MODE_SIZE (inner_mode)
5603					- GET_MODE_SIZE (tmode))
5604				       / UNITS_PER_WORD)
5605				      - pos / BITS_PER_WORD)
5606				   : pos / BITS_PER_WORD));
5607	  else
5608	    new = inner;
5609	}
5610      else
5611	new = force_to_mode (inner, tmode,
5612			     len >= HOST_BITS_PER_WIDE_INT
5613			     ? GET_MODE_MASK (tmode)
5614			     : ((HOST_WIDE_INT) 1 << len) - 1,
5615			     NULL_RTX, 0);
5616
5617      /* If this extraction is going into the destination of a SET,
5618	 make a STRICT_LOW_PART unless we made a MEM.  */
5619
5620      if (in_dest)
5621	return (GET_CODE (new) == MEM ? new
5622		: (GET_CODE (new) != SUBREG
5623		   ? gen_rtx_CLOBBER (tmode, const0_rtx)
5624		   : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
5625
5626      /* Otherwise, sign- or zero-extend unless we already are in the
5627	 proper mode.  */
5628
5629      return (mode == tmode ? new
5630	      : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5631				 mode, new));
5632    }
5633
5634  /* Unless this is a COMPARE or we have a funny memory reference,
5635     don't do anything with zero-extending field extracts starting at
5636     the low-order bit since they are simple AND operations.  */
5637  if (pos_rtx == 0 && pos == 0 && ! in_dest
5638      && ! in_compare && ! spans_byte && unsignedp)
5639    return 0;
5640
5641  /* Unless we are allowed to span bytes, reject this if we would be
5642     spanning bytes or if the position is not a constant and the length
5643     is not 1.  In all other cases, we would only be going outside
5644     out object in cases when an original shift would have been
5645     undefined.  */
5646  if (! spans_byte
5647      && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5648	  || (pos_rtx != 0 && len != 1)))
5649    return 0;
5650
5651  /* Get the mode to use should INNER not be a MEM, the mode for the position,
5652     and the mode for the result.  */
5653#ifdef HAVE_insv
5654  if (in_dest)
5655    {
5656      wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
5657      pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5658      extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5659    }
5660#endif
5661
5662#ifdef HAVE_extzv
5663  if (! in_dest && unsignedp)
5664    {
5665      wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
5666      pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5667      extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5668    }
5669#endif
5670
5671#ifdef HAVE_extv
5672  if (! in_dest && ! unsignedp)
5673    {
5674      wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
5675      pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5676      extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5677    }
5678#endif
5679
5680  /* Never narrow an object, since that might not be safe.  */
5681
5682  if (mode != VOIDmode
5683      && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5684    extraction_mode = mode;
5685
5686  if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5687      && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5688    pos_mode = GET_MODE (pos_rtx);
5689
5690  /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
5691     if we have to change the mode of memory and cannot, the desired mode is
5692     EXTRACTION_MODE.  */
5693  if (GET_CODE (inner) != MEM)
5694    wanted_inner_mode = wanted_inner_reg_mode;
5695  else if (inner_mode != wanted_inner_mode
5696	   && (mode_dependent_address_p (XEXP (inner, 0))
5697	       || MEM_VOLATILE_P (inner)))
5698    wanted_inner_mode = extraction_mode;
5699
5700  orig_pos = pos;
5701
5702  if (BITS_BIG_ENDIAN)
5703    {
5704      /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
5705	 BITS_BIG_ENDIAN style.  If position is constant, compute new
5706	 position.  Otherwise, build subtraction.
5707	 Note that POS is relative to the mode of the original argument.
5708	 If it's a MEM we need to recompute POS relative to that.
5709	 However, if we're extracting from (or inserting into) a register,
5710	 we want to recompute POS relative to wanted_inner_mode.  */
5711      int width = (GET_CODE (inner) == MEM
5712		   ? GET_MODE_BITSIZE (is_mode)
5713		   : GET_MODE_BITSIZE (wanted_inner_mode));
5714
5715      if (pos_rtx == 0)
5716	pos = width - len - pos;
5717      else
5718	pos_rtx
5719	  = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5720			     GEN_INT (width - len), pos_rtx);
5721      /* POS may be less than 0 now, but we check for that below.
5722	 Note that it can only be less than 0 if GET_CODE (inner) != MEM.  */
5723    }
5724
5725  /* If INNER has a wider mode, make it smaller.  If this is a constant
5726     extract, try to adjust the byte to point to the byte containing
5727     the value.  */
5728  if (wanted_inner_mode != VOIDmode
5729      && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
5730      && ((GET_CODE (inner) == MEM
5731	   && (inner_mode == wanted_inner_mode
5732	       || (! mode_dependent_address_p (XEXP (inner, 0))
5733		   && ! MEM_VOLATILE_P (inner))))))
5734    {
5735      int offset = 0;
5736
5737      /* The computations below will be correct if the machine is big
5738	 endian in both bits and bytes or little endian in bits and bytes.
5739	 If it is mixed, we must adjust.  */
5740
5741      /* If bytes are big endian and we had a paradoxical SUBREG, we must
5742	 adjust OFFSET to compensate.  */
5743      if (BYTES_BIG_ENDIAN
5744	  && ! spans_byte
5745	  && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5746	offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
5747
5748      /* If this is a constant position, we can move to the desired byte.  */
5749      if (pos_rtx == 0)
5750	{
5751	  offset += pos / BITS_PER_UNIT;
5752	  pos %= GET_MODE_BITSIZE (wanted_inner_mode);
5753	}
5754
5755      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5756	  && ! spans_byte
5757	  && is_mode != wanted_inner_mode)
5758	offset = (GET_MODE_SIZE (is_mode)
5759		  - GET_MODE_SIZE (wanted_inner_mode) - offset);
5760
5761      if (offset != 0 || inner_mode != wanted_inner_mode)
5762	{
5763	  rtx newmem = gen_rtx_MEM (wanted_inner_mode,
5764				    plus_constant (XEXP (inner, 0), offset));
5765	  RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5766	  MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5767	  MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5768	  inner = newmem;
5769	}
5770    }
5771
5772  /* If INNER is not memory, we can always get it into the proper mode.  If we
5773     are changing its mode, POS must be a constant and smaller than the size
5774     of the new mode.  */
5775  else if (GET_CODE (inner) != MEM)
5776    {
5777      if (GET_MODE (inner) != wanted_inner_mode
5778	  && (pos_rtx != 0
5779	      || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
5780	return 0;
5781
5782      inner = force_to_mode (inner, wanted_inner_mode,
5783			     pos_rtx
5784			     || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5785			     ? GET_MODE_MASK (wanted_inner_mode)
5786			     : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5787			     NULL_RTX, 0);
5788    }
5789
5790  /* Adjust mode of POS_RTX, if needed.  If we want a wider mode, we
5791     have to zero extend.  Otherwise, we can just use a SUBREG.  */
5792  if (pos_rtx != 0
5793      && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5794    pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
5795  else if (pos_rtx != 0
5796	   && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5797    pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5798
5799  /* Make POS_RTX unless we already have it and it is correct.  If we don't
5800     have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
5801     be a CONST_INT.  */
5802  if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5803    pos_rtx = orig_pos_rtx;
5804
5805  else if (pos_rtx == 0)
5806    pos_rtx = GEN_INT (pos);
5807
5808  /* Make the required operation.  See if we can use existing rtx.  */
5809  new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5810			 extraction_mode, inner, GEN_INT (len), pos_rtx);
5811  if (! in_dest)
5812    new = gen_lowpart_for_combine (mode, new);
5813
5814  return new;
5815}
5816
5817/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5818   with any other operations in X.  Return X without that shift if so.  */
5819
5820static rtx
5821extract_left_shift (x, count)
5822     rtx x;
5823     int count;
5824{
5825  enum rtx_code code = GET_CODE (x);
5826  enum machine_mode mode = GET_MODE (x);
5827  rtx tem;
5828
5829  switch (code)
5830    {
5831    case ASHIFT:
5832      /* This is the shift itself.  If it is wide enough, we will return
5833	 either the value being shifted if the shift count is equal to
5834	 COUNT or a shift for the difference.  */
5835      if (GET_CODE (XEXP (x, 1)) == CONST_INT
5836	  && INTVAL (XEXP (x, 1)) >= count)
5837	return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5838				     INTVAL (XEXP (x, 1)) - count);
5839      break;
5840
5841    case NEG:  case NOT:
5842      if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5843	return gen_unary (code, mode, mode, tem);
5844
5845      break;
5846
5847    case PLUS:  case IOR:  case XOR:  case AND:
5848      /* If we can safely shift this constant and we find the inner shift,
5849	 make a new operation.  */
5850      if (GET_CODE (XEXP (x,1)) == CONST_INT
5851	  && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
5852	  && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5853	return gen_binary (code, mode, tem,
5854			   GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5855
5856      break;
5857
5858    default:
5859      break;
5860    }
5861
5862  return 0;
5863}
5864
5865/* Look at the expression rooted at X.  Look for expressions
5866   equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5867   Form these expressions.
5868
5869   Return the new rtx, usually just X.
5870
5871   Also, for machines like the Vax that don't have logical shift insns,
5872   try to convert logical to arithmetic shift operations in cases where
5873   they are equivalent.  This undoes the canonicalizations to logical
5874   shifts done elsewhere.
5875
5876   We try, as much as possible, to re-use rtl expressions to save memory.
5877
5878   IN_CODE says what kind of expression we are processing.  Normally, it is
5879   SET.  In a memory address (inside a MEM, PLUS or minus, the latter two
5880   being kludges), it is MEM.  When processing the arguments of a comparison
5881   or a COMPARE against zero, it is COMPARE.  */
5882
5883static rtx
5884make_compound_operation (x, in_code)
5885     rtx x;
5886     enum rtx_code in_code;
5887{
5888  enum rtx_code code = GET_CODE (x);
5889  enum machine_mode mode = GET_MODE (x);
5890  int mode_width = GET_MODE_BITSIZE (mode);
5891  rtx rhs, lhs;
5892  enum rtx_code next_code;
5893  int i;
5894  rtx new = 0;
5895  rtx tem;
5896  char *fmt;
5897
5898  /* Select the code to be used in recursive calls.  Once we are inside an
5899     address, we stay there.  If we have a comparison, set to COMPARE,
5900     but once inside, go back to our default of SET.  */
5901
5902  next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
5903	       : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5904		  && XEXP (x, 1) == const0_rtx) ? COMPARE
5905	       : in_code == COMPARE ? SET : in_code);
5906
5907  /* Process depending on the code of this operation.  If NEW is set
5908     non-zero, it will be returned.  */
5909
5910  switch (code)
5911    {
5912    case ASHIFT:
5913      /* Convert shifts by constants into multiplications if inside
5914	 an address.  */
5915      if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5916	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5917	  && INTVAL (XEXP (x, 1)) >= 0)
5918	{
5919	  new = make_compound_operation (XEXP (x, 0), next_code);
5920	  new = gen_rtx_combine (MULT, mode, new,
5921				 GEN_INT ((HOST_WIDE_INT) 1
5922					  << INTVAL (XEXP (x, 1))));
5923	}
5924      break;
5925
5926    case AND:
5927      /* If the second operand is not a constant, we can't do anything
5928	 with it.  */
5929      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5930	break;
5931
5932      /* If the constant is a power of two minus one and the first operand
5933	 is a logical right shift, make an extraction.  */
5934      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5935	  && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5936	{
5937	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5938	  new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5939				 0, in_code == COMPARE);
5940	}
5941
5942      /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
5943      else if (GET_CODE (XEXP (x, 0)) == SUBREG
5944	       && subreg_lowpart_p (XEXP (x, 0))
5945	       && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5946	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5947	{
5948	  new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5949					 next_code);
5950	  new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
5951				 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5952				 0, in_code == COMPARE);
5953	}
5954      /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)).  */
5955      else if ((GET_CODE (XEXP (x, 0)) == XOR
5956		|| GET_CODE (XEXP (x, 0)) == IOR)
5957	       && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5958	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5959	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5960	{
5961	  /* Apply the distributive law, and then try to make extractions.  */
5962	  new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
5963				 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
5964					      XEXP (x, 1)),
5965				 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
5966					      XEXP (x, 1)));
5967	  new = make_compound_operation (new, in_code);
5968	}
5969
5970      /* If we are have (and (rotate X C) M) and C is larger than the number
5971	 of bits in M, this is an extraction.  */
5972
5973      else if (GET_CODE (XEXP (x, 0)) == ROTATE
5974	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5975	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5976	       && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
5977	{
5978	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5979	  new = make_extraction (mode, new,
5980				 (GET_MODE_BITSIZE (mode)
5981				  - INTVAL (XEXP (XEXP (x, 0), 1))),
5982				 NULL_RTX, i, 1, 0, in_code == COMPARE);
5983	}
5984
5985      /* On machines without logical shifts, if the operand of the AND is
5986	 a logical shift and our mask turns off all the propagated sign
5987	 bits, we can replace the logical shift with an arithmetic shift.  */
5988      else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5989	       && (lshr_optab->handlers[(int) mode].insn_code
5990		   == CODE_FOR_nothing)
5991	       && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5992	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5993	       && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5994	       && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5995	       && mode_width <= HOST_BITS_PER_WIDE_INT)
5996	{
5997	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
5998
5999	  mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6000	  if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6001	    SUBST (XEXP (x, 0),
6002		   gen_rtx_combine (ASHIFTRT, mode,
6003				    make_compound_operation (XEXP (XEXP (x, 0), 0),
6004							     next_code),
6005				    XEXP (XEXP (x, 0), 1)));
6006	}
6007
6008      /* If the constant is one less than a power of two, this might be
6009	 representable by an extraction even if no shift is present.
6010	 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6011	 we are in a COMPARE.  */
6012      else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6013	new = make_extraction (mode,
6014			       make_compound_operation (XEXP (x, 0),
6015							next_code),
6016			       0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6017
6018      /* If we are in a comparison and this is an AND with a power of two,
6019	 convert this into the appropriate bit extract.  */
6020      else if (in_code == COMPARE
6021	       && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6022	new = make_extraction (mode,
6023			       make_compound_operation (XEXP (x, 0),
6024							next_code),
6025			       i, NULL_RTX, 1, 1, 0, 1);
6026
6027      break;
6028
6029    case LSHIFTRT:
6030      /* If the sign bit is known to be zero, replace this with an
6031	 arithmetic shift.  */
6032      if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
6033	  && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6034	  && mode_width <= HOST_BITS_PER_WIDE_INT
6035	  && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6036	{
6037	  new = gen_rtx_combine (ASHIFTRT, mode,
6038				 make_compound_operation (XEXP (x, 0),
6039							  next_code),
6040				 XEXP (x, 1));
6041	  break;
6042	}
6043
6044      /* ... fall through ...  */
6045
6046    case ASHIFTRT:
6047      lhs = XEXP (x, 0);
6048      rhs = XEXP (x, 1);
6049
6050      /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6051	 this is a SIGN_EXTRACT.  */
6052      if (GET_CODE (rhs) == CONST_INT
6053	  && GET_CODE (lhs) == ASHIFT
6054	  && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6055	  && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6056	{
6057	  new = make_compound_operation (XEXP (lhs, 0), next_code);
6058	  new = make_extraction (mode, new,
6059				 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6060				 NULL_RTX, mode_width - INTVAL (rhs),
6061				 code == LSHIFTRT, 0, in_code == COMPARE);
6062	}
6063
6064      /* See if we have operations between an ASHIFTRT and an ASHIFT.
6065	 If so, try to merge the shifts into a SIGN_EXTEND.  We could
6066	 also do this for some cases of SIGN_EXTRACT, but it doesn't
6067	 seem worth the effort; the case checked for occurs on Alpha.  */
6068
6069      if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6070	  && ! (GET_CODE (lhs) == SUBREG
6071		&& (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6072	  && GET_CODE (rhs) == CONST_INT
6073	  && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6074	  && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6075	new = make_extraction (mode, make_compound_operation (new, next_code),
6076			       0, NULL_RTX, mode_width - INTVAL (rhs),
6077			       code == LSHIFTRT, 0, in_code == COMPARE);
6078
6079      break;
6080
6081    case SUBREG:
6082      /* Call ourselves recursively on the inner expression.  If we are
6083	 narrowing the object and it has a different RTL code from
6084	 what it originally did, do this SUBREG as a force_to_mode.  */
6085
6086      tem = make_compound_operation (SUBREG_REG (x), in_code);
6087      if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6088	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6089	  && subreg_lowpart_p (x))
6090	{
6091	  rtx newer = force_to_mode (tem, mode,
6092				     GET_MODE_MASK (mode), NULL_RTX, 0);
6093
6094	  /* If we have something other than a SUBREG, we might have
6095	     done an expansion, so rerun outselves.  */
6096	  if (GET_CODE (newer) != SUBREG)
6097	    newer = make_compound_operation (newer, in_code);
6098
6099	  return newer;
6100	}
6101
6102      /* If this is a paradoxical subreg, and the new code is a sign or
6103	 zero extension, omit the subreg and widen the extension.  If it
6104	 is a regular subreg, we can still get rid of the subreg by not
6105	 widening so much, or in fact removing the extension entirely.  */
6106      if ((GET_CODE (tem) == SIGN_EXTEND
6107	   || GET_CODE (tem) == ZERO_EXTEND)
6108	  && subreg_lowpart_p (x))
6109	{
6110	  if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6111	      || (GET_MODE_SIZE (mode) >
6112		  GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6113	    tem = gen_rtx_combine (GET_CODE (tem), mode, XEXP (tem, 0));
6114	  else
6115	    tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6116	  return tem;
6117	}
6118      break;
6119
6120    default:
6121      break;
6122    }
6123
6124  if (new)
6125    {
6126      x = gen_lowpart_for_combine (mode, new);
6127      code = GET_CODE (x);
6128    }
6129
6130  /* Now recursively process each operand of this operation.  */
6131  fmt = GET_RTX_FORMAT (code);
6132  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6133    if (fmt[i] == 'e')
6134      {
6135	new = make_compound_operation (XEXP (x, i), next_code);
6136	SUBST (XEXP (x, i), new);
6137      }
6138
6139  return x;
6140}
6141
6142/* Given M see if it is a value that would select a field of bits
6143    within an item, but not the entire word.  Return -1 if not.
6144    Otherwise, return the starting position of the field, where 0 is the
6145    low-order bit.
6146
6147   *PLEN is set to the length of the field.  */
6148
6149static int
6150get_pos_from_mask (m, plen)
6151     unsigned HOST_WIDE_INT m;
6152     int *plen;
6153{
6154  /* Get the bit number of the first 1 bit from the right, -1 if none.  */
6155  int pos = exact_log2 (m & - m);
6156
6157  if (pos < 0)
6158    return -1;
6159
6160  /* Now shift off the low-order zero bits and see if we have a power of
6161     two minus 1.  */
6162  *plen = exact_log2 ((m >> pos) + 1);
6163
6164  if (*plen <= 0)
6165    return -1;
6166
6167  return pos;
6168}
6169
6170/* See if X can be simplified knowing that we will only refer to it in
6171   MODE and will only refer to those bits that are nonzero in MASK.
6172   If other bits are being computed or if masking operations are done
6173   that select a superset of the bits in MASK, they can sometimes be
6174   ignored.
6175
6176   Return a possibly simplified expression, but always convert X to
6177   MODE.  If X is a CONST_INT, AND the CONST_INT with MASK.
6178
6179   Also, if REG is non-zero and X is a register equal in value to REG,
6180   replace X with REG.
6181
6182   If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6183   are all off in X.  This is used when X will be complemented, by either
6184   NOT, NEG, or XOR.  */
6185
6186static rtx
6187force_to_mode (x, mode, mask, reg, just_select)
6188     rtx x;
6189     enum machine_mode mode;
6190     unsigned HOST_WIDE_INT mask;
6191     rtx reg;
6192     int just_select;
6193{
6194  enum rtx_code code = GET_CODE (x);
6195  int next_select = just_select || code == XOR || code == NOT || code == NEG;
6196  enum machine_mode op_mode;
6197  unsigned HOST_WIDE_INT fuller_mask, nonzero;
6198  rtx op0, op1, temp;
6199
6200  /* If this is a CALL or ASM_OPERANDS, don't do anything.  Some of the
6201     code below will do the wrong thing since the mode of such an
6202     expression is VOIDmode.
6203
6204     Also do nothing if X is a CLOBBER; this can happen if X was
6205     the return value from a call to gen_lowpart_for_combine.  */
6206  if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
6207    return x;
6208
6209  /* We want to perform the operation is its present mode unless we know
6210     that the operation is valid in MODE, in which case we do the operation
6211     in MODE.  */
6212  op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6213	      && code_to_optab[(int) code] != 0
6214	      && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
6215		  != CODE_FOR_nothing))
6216	     ? mode : GET_MODE (x));
6217
6218  /* It is not valid to do a right-shift in a narrower mode
6219     than the one it came in with.  */
6220  if ((code == LSHIFTRT || code == ASHIFTRT)
6221      && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6222    op_mode = GET_MODE (x);
6223
6224  /* Truncate MASK to fit OP_MODE.  */
6225  if (op_mode)
6226    mask &= GET_MODE_MASK (op_mode);
6227
6228  /* When we have an arithmetic operation, or a shift whose count we
6229     do not know, we need to assume that all bit the up to the highest-order
6230     bit in MASK will be needed.  This is how we form such a mask.  */
6231  if (op_mode)
6232    fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6233		   ? GET_MODE_MASK (op_mode)
6234		   : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
6235  else
6236    fuller_mask = ~ (HOST_WIDE_INT) 0;
6237
6238  /* Determine what bits of X are guaranteed to be (non)zero.  */
6239  nonzero = nonzero_bits (x, mode);
6240
6241  /* If none of the bits in X are needed, return a zero.  */
6242  if (! just_select && (nonzero & mask) == 0)
6243    return const0_rtx;
6244
6245  /* If X is a CONST_INT, return a new one.  Do this here since the
6246     test below will fail.  */
6247  if (GET_CODE (x) == CONST_INT)
6248    {
6249      HOST_WIDE_INT cval = INTVAL (x) & mask;
6250      int width = GET_MODE_BITSIZE (mode);
6251
6252      /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6253	 number, sign extend it.  */
6254      if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6255	  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6256	cval |= (HOST_WIDE_INT) -1 << width;
6257
6258      return GEN_INT (cval);
6259    }
6260
6261  /* If X is narrower than MODE and we want all the bits in X's mode, just
6262     get X in the proper mode.  */
6263  if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6264      && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
6265    return gen_lowpart_for_combine (mode, x);
6266
6267  /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6268     MASK are already known to be zero in X, we need not do anything.  */
6269  if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
6270    return x;
6271
6272  switch (code)
6273    {
6274    case CLOBBER:
6275      /* If X is a (clobber (const_int)), return it since we know we are
6276	 generating something that won't match.  */
6277      return x;
6278
6279    case USE:
6280      /* X is a (use (mem ..)) that was made from a bit-field extraction that
6281	 spanned the boundary of the MEM.  If we are now masking so it is
6282	 within that boundary, we don't need the USE any more.  */
6283      if (! BITS_BIG_ENDIAN
6284	  && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6285	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6286      break;
6287
6288    case SIGN_EXTEND:
6289    case ZERO_EXTEND:
6290    case ZERO_EXTRACT:
6291    case SIGN_EXTRACT:
6292      x = expand_compound_operation (x);
6293      if (GET_CODE (x) != code)
6294	return force_to_mode (x, mode, mask, reg, next_select);
6295      break;
6296
6297    case REG:
6298      if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6299		       || rtx_equal_p (reg, get_last_value (x))))
6300	x = reg;
6301      break;
6302
6303    case SUBREG:
6304      if (subreg_lowpart_p (x)
6305	  /* We can ignore the effect of this SUBREG if it narrows the mode or
6306	     if the constant masks to zero all the bits the mode doesn't
6307	     have.  */
6308	  && ((GET_MODE_SIZE (GET_MODE (x))
6309	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6310	      || (0 == (mask
6311			& GET_MODE_MASK (GET_MODE (x))
6312			& ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
6313	return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
6314      break;
6315
6316    case AND:
6317      /* If this is an AND with a constant, convert it into an AND
6318	 whose constant is the AND of that constant with MASK.  If it
6319	 remains an AND of MASK, delete it since it is redundant.  */
6320
6321      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6322	{
6323	  x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6324				      mask & INTVAL (XEXP (x, 1)));
6325
6326	  /* If X is still an AND, see if it is an AND with a mask that
6327	     is just some low-order bits.  If so, and it is MASK, we don't
6328	     need it.  */
6329
6330	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6331	      && INTVAL (XEXP (x, 1)) == mask)
6332	    x = XEXP (x, 0);
6333
6334	  /* If it remains an AND, try making another AND with the bits
6335	     in the mode mask that aren't in MASK turned on.  If the
6336	     constant in the AND is wide enough, this might make a
6337	     cheaper constant.  */
6338
6339	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6340	      && GET_MODE_MASK (GET_MODE (x)) != mask
6341	      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
6342	    {
6343	      HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6344				    | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
6345	      int width = GET_MODE_BITSIZE (GET_MODE (x));
6346	      rtx y;
6347
6348	      /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6349		 number, sign extend it.  */
6350	      if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6351		  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6352		cval |= (HOST_WIDE_INT) -1 << width;
6353
6354	      y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6355	      if (rtx_cost (y, SET) < rtx_cost (x, SET))
6356		x = y;
6357	    }
6358
6359	  break;
6360	}
6361
6362      goto binop;
6363
6364    case PLUS:
6365      /* In (and (plus FOO C1) M), if M is a mask that just turns off
6366	 low-order bits (as in an alignment operation) and FOO is already
6367	 aligned to that boundary, mask C1 to that boundary as well.
6368	 This may eliminate that PLUS and, later, the AND.  */
6369
6370      {
6371	int width = GET_MODE_BITSIZE (mode);
6372	unsigned HOST_WIDE_INT smask = mask;
6373
6374	/* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6375	   number, sign extend it.  */
6376
6377	if (width < HOST_BITS_PER_WIDE_INT
6378	    && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6379	  smask |= (HOST_WIDE_INT) -1 << width;
6380
6381	if (GET_CODE (XEXP (x, 1)) == CONST_INT
6382	    && exact_log2 (- smask) >= 0)
6383	  {
6384#ifdef STACK_BIAS
6385	    if (STACK_BIAS
6386	        && (XEXP (x, 0) == stack_pointer_rtx
6387	            || XEXP (x, 0) == frame_pointer_rtx))
6388	      {
6389                int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6390                unsigned HOST_WIDE_INT sp_mask = GET_MODE_MASK (mode);
6391
6392		sp_mask &= ~ (sp_alignment - 1);
6393		if ((sp_mask & ~ mask) == 0
6394		    && ((INTVAL (XEXP (x, 1)) - STACK_BIAS) & ~ mask) != 0)
6395		  return force_to_mode (plus_constant (XEXP (x, 0),
6396		  				       ((INTVAL (XEXP (x, 1)) -
6397							 STACK_BIAS) & mask)
6398						       + STACK_BIAS),
6399		 			mode, mask, reg, next_select);
6400              }
6401#endif
6402	    if ((nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
6403	        && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
6404	      return force_to_mode (plus_constant (XEXP (x, 0),
6405					           INTVAL (XEXP (x, 1)) & mask),
6406				    mode, mask, reg, next_select);
6407	  }
6408      }
6409
6410      /* ... fall through ...  */
6411
6412    case MINUS:
6413    case MULT:
6414      /* For PLUS, MINUS and MULT, we need any bits less significant than the
6415	 most significant bit in MASK since carries from those bits will
6416	 affect the bits we are interested in.  */
6417      mask = fuller_mask;
6418      goto binop;
6419
6420    case IOR:
6421    case XOR:
6422      /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6423	 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6424	 operation which may be a bitfield extraction.  Ensure that the
6425	 constant we form is not wider than the mode of X.  */
6426
6427      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6428	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6429	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6430	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6431	  && GET_CODE (XEXP (x, 1)) == CONST_INT
6432	  && ((INTVAL (XEXP (XEXP (x, 0), 1))
6433	       + floor_log2 (INTVAL (XEXP (x, 1))))
6434	      < GET_MODE_BITSIZE (GET_MODE (x)))
6435	  && (INTVAL (XEXP (x, 1))
6436	      & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6437	{
6438	  temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6439			      << INTVAL (XEXP (XEXP (x, 0), 1)));
6440	  temp = gen_binary (GET_CODE (x), GET_MODE (x),
6441			     XEXP (XEXP (x, 0), 0), temp);
6442	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6443			  XEXP (XEXP (x, 0), 1));
6444	  return force_to_mode (x, mode, mask, reg, next_select);
6445	}
6446
6447    binop:
6448      /* For most binary operations, just propagate into the operation and
6449	 change the mode if we have an operation of that mode.   */
6450
6451      op0 = gen_lowpart_for_combine (op_mode,
6452				     force_to_mode (XEXP (x, 0), mode, mask,
6453						    reg, next_select));
6454      op1 = gen_lowpart_for_combine (op_mode,
6455				     force_to_mode (XEXP (x, 1), mode, mask,
6456						    reg, next_select));
6457
6458      /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
6459	 MASK since OP1 might have been sign-extended but we never want
6460	 to turn on extra bits, since combine might have previously relied
6461	 on them being off.  */
6462      if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
6463	  && (INTVAL (op1) & mask) != 0)
6464	op1 = GEN_INT (INTVAL (op1) & mask);
6465
6466      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6467	x = gen_binary (code, op_mode, op0, op1);
6468      break;
6469
6470    case ASHIFT:
6471      /* For left shifts, do the same, but just for the first operand.
6472	 However, we cannot do anything with shifts where we cannot
6473	 guarantee that the counts are smaller than the size of the mode
6474	 because such a count will have a different meaning in a
6475	 wider mode.  */
6476
6477      if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6478	     && INTVAL (XEXP (x, 1)) >= 0
6479	     && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6480	  && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6481		&& (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
6482		    < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
6483	break;
6484
6485      /* If the shift count is a constant and we can do arithmetic in
6486	 the mode of the shift, refine which bits we need.  Otherwise, use the
6487	 conservative form of the mask.  */
6488      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6489	  && INTVAL (XEXP (x, 1)) >= 0
6490	  && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6491	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6492	mask >>= INTVAL (XEXP (x, 1));
6493      else
6494	mask = fuller_mask;
6495
6496      op0 = gen_lowpart_for_combine (op_mode,
6497				     force_to_mode (XEXP (x, 0), op_mode,
6498						    mask, reg, next_select));
6499
6500      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6501	x =  gen_binary (code, op_mode, op0, XEXP (x, 1));
6502      break;
6503
6504    case LSHIFTRT:
6505      /* Here we can only do something if the shift count is a constant,
6506	 this shift constant is valid for the host, and we can do arithmetic
6507	 in OP_MODE.  */
6508
6509      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6510	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6511	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6512	{
6513	  rtx inner = XEXP (x, 0);
6514
6515	  /* Select the mask of the bits we need for the shift operand.  */
6516	  mask <<= INTVAL (XEXP (x, 1));
6517
6518	  /* We can only change the mode of the shift if we can do arithmetic
6519	     in the mode of the shift and MASK is no wider than the width of
6520	     OP_MODE.  */
6521	  if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
6522	      || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
6523	    op_mode = GET_MODE (x);
6524
6525	  inner = force_to_mode (inner, op_mode, mask, reg, next_select);
6526
6527	  if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6528	    x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
6529	}
6530
6531      /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6532	 shift and AND produces only copies of the sign bit (C2 is one less
6533	 than a power of two), we can do this with just a shift.  */
6534
6535      if (GET_CODE (x) == LSHIFTRT
6536	  && GET_CODE (XEXP (x, 1)) == CONST_INT
6537	  && ((INTVAL (XEXP (x, 1))
6538	       + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6539	      >= GET_MODE_BITSIZE (GET_MODE (x)))
6540	  && exact_log2 (mask + 1) >= 0
6541	  && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6542	      >= exact_log2 (mask + 1)))
6543	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6544			GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6545				 - exact_log2 (mask + 1)));
6546      break;
6547
6548    case ASHIFTRT:
6549      /* If we are just looking for the sign bit, we don't need this shift at
6550	 all, even if it has a variable count.  */
6551      if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6552	  && (mask == ((HOST_WIDE_INT) 1
6553		       << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
6554	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6555
6556      /* If this is a shift by a constant, get a mask that contains those bits
6557	 that are not copies of the sign bit.  We then have two cases:  If
6558	 MASK only includes those bits, this can be a logical shift, which may
6559	 allow simplifications.  If MASK is a single-bit field not within
6560	 those bits, we are requesting a copy of the sign bit and hence can
6561	 shift the sign bit to the appropriate location.  */
6562
6563      if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
6564	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6565	{
6566	  int i = -1;
6567
6568	  /* If the considered data is wider then HOST_WIDE_INT, we can't
6569	     represent a mask for all its bits in a single scalar.
6570	     But we only care about the lower bits, so calculate these.  */
6571
6572	  if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
6573	    {
6574	      nonzero = ~ (HOST_WIDE_INT) 0;
6575
6576	      /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6577		 is the number of bits a full-width mask would have set.
6578		 We need only shift if these are fewer than nonzero can
6579		 hold.  If not, we must keep all bits set in nonzero.  */
6580
6581	      if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6582		  < HOST_BITS_PER_WIDE_INT)
6583		nonzero >>= INTVAL (XEXP (x, 1))
6584			    + HOST_BITS_PER_WIDE_INT
6585			    - GET_MODE_BITSIZE (GET_MODE (x)) ;
6586	    }
6587	  else
6588	    {
6589	      nonzero = GET_MODE_MASK (GET_MODE (x));
6590	      nonzero >>= INTVAL (XEXP (x, 1));
6591	    }
6592
6593	  if ((mask & ~ nonzero) == 0
6594	      || (i = exact_log2 (mask)) >= 0)
6595	    {
6596	      x = simplify_shift_const
6597		(x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6598		 i < 0 ? INTVAL (XEXP (x, 1))
6599		 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
6600
6601	      if (GET_CODE (x) != ASHIFTRT)
6602		return force_to_mode (x, mode, mask, reg, next_select);
6603	    }
6604	}
6605
6606      /* If MASK is 1, convert this to a LSHIFTRT.  This can be done
6607	 even if the shift count isn't a constant.  */
6608      if (mask == 1)
6609	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
6610
6611      /* If this is a sign-extension operation that just affects bits
6612	 we don't care about, remove it.  Be sure the call above returned
6613	 something that is still a shift.  */
6614
6615      if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
6616	  && GET_CODE (XEXP (x, 1)) == CONST_INT
6617	  && INTVAL (XEXP (x, 1)) >= 0
6618	  && (INTVAL (XEXP (x, 1))
6619	      <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
6620	  && GET_CODE (XEXP (x, 0)) == ASHIFT
6621	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6622	  && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
6623	return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
6624			      reg, next_select);
6625
6626      break;
6627
6628    case ROTATE:
6629    case ROTATERT:
6630      /* If the shift count is constant and we can do computations
6631	 in the mode of X, compute where the bits we care about are.
6632	 Otherwise, we can't do anything.  Don't change the mode of
6633	 the shift or propagate MODE into the shift, though.  */
6634      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6635	  && INTVAL (XEXP (x, 1)) >= 0)
6636	{
6637	  temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
6638					    GET_MODE (x), GEN_INT (mask),
6639					    XEXP (x, 1));
6640	  if (temp && GET_CODE(temp) == CONST_INT)
6641	    SUBST (XEXP (x, 0),
6642		   force_to_mode (XEXP (x, 0), GET_MODE (x),
6643				  INTVAL (temp), reg, next_select));
6644	}
6645      break;
6646
6647    case NEG:
6648      /* If we just want the low-order bit, the NEG isn't needed since it
6649	 won't change the low-order bit.    */
6650      if (mask == 1)
6651	return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6652
6653      /* We need any bits less significant than the most significant bit in
6654	 MASK since carries from those bits will affect the bits we are
6655	 interested in.  */
6656      mask = fuller_mask;
6657      goto unop;
6658
6659    case NOT:
6660      /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6661	 same as the XOR case above.  Ensure that the constant we form is not
6662	 wider than the mode of X.  */
6663
6664      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6665	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6666	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6667	  && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6668	      < GET_MODE_BITSIZE (GET_MODE (x)))
6669	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6670	{
6671	  temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6672	  temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6673	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6674
6675	  return force_to_mode (x, mode, mask, reg, next_select);
6676	}
6677
6678      /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
6679	 use the full mask inside the NOT.  */
6680      mask = fuller_mask;
6681
6682    unop:
6683      op0 = gen_lowpart_for_combine (op_mode,
6684				     force_to_mode (XEXP (x, 0), mode, mask,
6685						    reg, next_select));
6686      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6687	x = gen_unary (code, op_mode, op_mode, op0);
6688      break;
6689
6690    case NE:
6691      /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
6692	 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
6693	 which is equal to STORE_FLAG_VALUE.  */
6694      if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
6695	  && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
6696	  && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
6697	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6698
6699      break;
6700
6701    case IF_THEN_ELSE:
6702      /* We have no way of knowing if the IF_THEN_ELSE can itself be
6703	 written in a narrower mode.  We play it safe and do not do so.  */
6704
6705      SUBST (XEXP (x, 1),
6706	     gen_lowpart_for_combine (GET_MODE (x),
6707				      force_to_mode (XEXP (x, 1), mode,
6708						     mask, reg, next_select)));
6709      SUBST (XEXP (x, 2),
6710	     gen_lowpart_for_combine (GET_MODE (x),
6711				      force_to_mode (XEXP (x, 2), mode,
6712						     mask, reg,next_select)));
6713      break;
6714
6715    default:
6716      break;
6717    }
6718
6719  /* Ensure we return a value of the proper mode.  */
6720  return gen_lowpart_for_combine (mode, x);
6721}
6722
6723/* Return nonzero if X is an expression that has one of two values depending on
6724   whether some other value is zero or nonzero.  In that case, we return the
6725   value that is being tested, *PTRUE is set to the value if the rtx being
6726   returned has a nonzero value, and *PFALSE is set to the other alternative.
6727
6728   If we return zero, we set *PTRUE and *PFALSE to X.  */
6729
6730static rtx
6731if_then_else_cond (x, ptrue, pfalse)
6732     rtx x;
6733     rtx *ptrue, *pfalse;
6734{
6735  enum machine_mode mode = GET_MODE (x);
6736  enum rtx_code code = GET_CODE (x);
6737  int size = GET_MODE_BITSIZE (mode);
6738  rtx cond0, cond1, true0, true1, false0, false1;
6739  unsigned HOST_WIDE_INT nz;
6740
6741  /* If this is a unary operation whose operand has one of two values, apply
6742     our opcode to compute those values.  */
6743  if (GET_RTX_CLASS (code) == '1'
6744      && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6745    {
6746      *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6747      *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
6748      return cond0;
6749    }
6750
6751  /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
6752     make can't possibly match and would suppress other optimizations.  */
6753  else if (code == COMPARE)
6754    ;
6755
6756  /* If this is a binary operation, see if either side has only one of two
6757     values.  If either one does or if both do and they are conditional on
6758     the same value, compute the new true and false values.  */
6759  else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6760	   || GET_RTX_CLASS (code) == '<')
6761    {
6762      cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6763      cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6764
6765      if ((cond0 != 0 || cond1 != 0)
6766	  && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6767	{
6768	  /* If if_then_else_cond returned zero, then true/false are the
6769	     same rtl.  We must copy one of them to prevent invalid rtl
6770	     sharing.  */
6771	  if (cond0 == 0)
6772	    true0 = copy_rtx (true0);
6773	  else if (cond1 == 0)
6774	    true1 = copy_rtx (true1);
6775
6776	  *ptrue = gen_binary (code, mode, true0, true1);
6777	  *pfalse = gen_binary (code, mode, false0, false1);
6778	  return cond0 ? cond0 : cond1;
6779	}
6780
6781      /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
6782	 operands is zero when the other is non-zero, and vice-versa,
6783	 and STORE_FLAG_VALUE is 1 or -1.  */
6784
6785      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6786	  && (code == PLUS || code == IOR || code == XOR || code == MINUS
6787	   || code == UMAX)
6788	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6789	{
6790	  rtx op0 = XEXP (XEXP (x, 0), 1);
6791	  rtx op1 = XEXP (XEXP (x, 1), 1);
6792
6793	  cond0 = XEXP (XEXP (x, 0), 0);
6794	  cond1 = XEXP (XEXP (x, 1), 0);
6795
6796	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6797	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6798	      && reversible_comparison_p (cond1)
6799	      && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6800		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6801		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6802		  || ((swap_condition (GET_CODE (cond0))
6803		       == reverse_condition (GET_CODE (cond1)))
6804		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6805		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6806	      && ! side_effects_p (x))
6807	    {
6808	      *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6809	      *pfalse = gen_binary (MULT, mode,
6810				    (code == MINUS
6811				     ? gen_unary (NEG, mode, mode, op1) : op1),
6812				    const_true_rtx);
6813	      return cond0;
6814	    }
6815	}
6816
6817      /* Similarly for MULT, AND and UMIN, execpt that for these the result
6818	 is always zero.  */
6819      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6820	  && (code == MULT || code == AND || code == UMIN)
6821	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6822	{
6823	  cond0 = XEXP (XEXP (x, 0), 0);
6824	  cond1 = XEXP (XEXP (x, 1), 0);
6825
6826	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6827	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6828	      && reversible_comparison_p (cond1)
6829	      && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6830		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6831		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6832		  || ((swap_condition (GET_CODE (cond0))
6833		       == reverse_condition (GET_CODE (cond1)))
6834		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6835		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6836	      && ! side_effects_p (x))
6837	    {
6838	      *ptrue = *pfalse = const0_rtx;
6839	      return cond0;
6840	    }
6841	}
6842    }
6843
6844  else if (code == IF_THEN_ELSE)
6845    {
6846      /* If we have IF_THEN_ELSE already, extract the condition and
6847	 canonicalize it if it is NE or EQ.  */
6848      cond0 = XEXP (x, 0);
6849      *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6850      if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6851	return XEXP (cond0, 0);
6852      else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6853	{
6854	  *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6855	  return XEXP (cond0, 0);
6856	}
6857      else
6858	return cond0;
6859    }
6860
6861  /* If X is a normal SUBREG with both inner and outer modes integral,
6862     we can narrow both the true and false values of the inner expression,
6863     if there is a condition.  */
6864  else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6865	   && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6866	   && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6867	   && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6868					       &true0, &false0)))
6869    {
6870      *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6871      *pfalse
6872	= force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6873
6874      return cond0;
6875    }
6876
6877  /* If X is a constant, this isn't special and will cause confusions
6878     if we treat it as such.  Likewise if it is equivalent to a constant.  */
6879  else if (CONSTANT_P (x)
6880	   || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6881    ;
6882
6883  /* If X is known to be either 0 or -1, those are the true and
6884     false values when testing X.  */
6885  else if (num_sign_bit_copies (x, mode) == size)
6886    {
6887      *ptrue = constm1_rtx, *pfalse = const0_rtx;
6888      return x;
6889    }
6890
6891  /* Likewise for 0 or a single bit.  */
6892  else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6893    {
6894      *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6895      return x;
6896    }
6897
6898  /* Otherwise fail; show no condition with true and false values the same.  */
6899  *ptrue = *pfalse = x;
6900  return 0;
6901}
6902
6903/* Return the value of expression X given the fact that condition COND
6904   is known to be true when applied to REG as its first operand and VAL
6905   as its second.  X is known to not be shared and so can be modified in
6906   place.
6907
6908   We only handle the simplest cases, and specifically those cases that
6909   arise with IF_THEN_ELSE expressions.  */
6910
6911static rtx
6912known_cond (x, cond, reg, val)
6913     rtx x;
6914     enum rtx_code cond;
6915     rtx reg, val;
6916{
6917  enum rtx_code code = GET_CODE (x);
6918  rtx temp;
6919  char *fmt;
6920  int i, j;
6921
6922  if (side_effects_p (x))
6923    return x;
6924
6925  if (cond == EQ && rtx_equal_p (x, reg))
6926    return val;
6927
6928  /* If X is (abs REG) and we know something about REG's relationship
6929     with zero, we may be able to simplify this.  */
6930
6931  if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6932    switch (cond)
6933      {
6934      case GE:  case GT:  case EQ:
6935	return XEXP (x, 0);
6936      case LT:  case LE:
6937	return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
6938			  XEXP (x, 0));
6939      default:
6940	break;
6941      }
6942
6943  /* The only other cases we handle are MIN, MAX, and comparisons if the
6944     operands are the same as REG and VAL.  */
6945
6946  else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6947    {
6948      if (rtx_equal_p (XEXP (x, 0), val))
6949	cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6950
6951      if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6952	{
6953	  if (GET_RTX_CLASS (code) == '<')
6954	    return (comparison_dominates_p (cond, code) ? const_true_rtx
6955		    : (comparison_dominates_p (cond,
6956					       reverse_condition (code))
6957		       ? const0_rtx : x));
6958
6959	  else if (code == SMAX || code == SMIN
6960		   || code == UMIN || code == UMAX)
6961	    {
6962	      int unsignedp = (code == UMIN || code == UMAX);
6963
6964	      if (code == SMAX || code == UMAX)
6965		cond = reverse_condition (cond);
6966
6967	      switch (cond)
6968		{
6969		case GE:   case GT:
6970		  return unsignedp ? x : XEXP (x, 1);
6971		case LE:   case LT:
6972		  return unsignedp ? x : XEXP (x, 0);
6973		case GEU:  case GTU:
6974		  return unsignedp ? XEXP (x, 1) : x;
6975		case LEU:  case LTU:
6976		  return unsignedp ? XEXP (x, 0) : x;
6977		default:
6978		  break;
6979		}
6980	    }
6981	}
6982    }
6983
6984  fmt = GET_RTX_FORMAT (code);
6985  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6986    {
6987      if (fmt[i] == 'e')
6988	SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6989      else if (fmt[i] == 'E')
6990	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6991	  SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6992						cond, reg, val));
6993    }
6994
6995  return x;
6996}
6997
6998/* See if X and Y are equal for the purposes of seeing if we can rewrite an
6999   assignment as a field assignment.  */
7000
7001static int
7002rtx_equal_for_field_assignment_p (x, y)
7003     rtx x;
7004     rtx y;
7005{
7006  if (x == y || rtx_equal_p (x, y))
7007    return 1;
7008
7009  if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7010    return 0;
7011
7012  /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7013     Note that all SUBREGs of MEM are paradoxical; otherwise they
7014     would have been rewritten.  */
7015  if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7016      && GET_CODE (SUBREG_REG (y)) == MEM
7017      && rtx_equal_p (SUBREG_REG (y),
7018		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7019    return 1;
7020
7021  if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7022      && GET_CODE (SUBREG_REG (x)) == MEM
7023      && rtx_equal_p (SUBREG_REG (x),
7024		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7025    return 1;
7026
7027  /* We used to see if get_last_value of X and Y were the same but that's
7028     not correct.  In one direction, we'll cause the assignment to have
7029     the wrong destination and in the case, we'll import a register into this
7030     insn that might have already have been dead.   So fail if none of the
7031     above cases are true.  */
7032  return 0;
7033}
7034
7035/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7036   Return that assignment if so.
7037
7038   We only handle the most common cases.  */
7039
7040static rtx
7041make_field_assignment (x)
7042     rtx x;
7043{
7044  rtx dest = SET_DEST (x);
7045  rtx src = SET_SRC (x);
7046  rtx assign;
7047  rtx rhs, lhs;
7048  HOST_WIDE_INT c1;
7049  int pos, len;
7050  rtx other;
7051  enum machine_mode mode;
7052
7053  /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7054     a clear of a one-bit field.  We will have changed it to
7055     (and (rotate (const_int -2) POS) DEST), so check for that.  Also check
7056     for a SUBREG.  */
7057
7058  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7059      && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7060      && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7061      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7062    {
7063      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7064				1, 1, 1, 0);
7065      if (assign != 0)
7066	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7067      return x;
7068    }
7069
7070  else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7071	   && subreg_lowpart_p (XEXP (src, 0))
7072	   && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7073	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7074	   && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7075	   && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7076	   && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7077    {
7078      assign = make_extraction (VOIDmode, dest, 0,
7079				XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7080				1, 1, 1, 0);
7081      if (assign != 0)
7082	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7083      return x;
7084    }
7085
7086  /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
7087     one-bit field.  */
7088  else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7089	   && XEXP (XEXP (src, 0), 0) == const1_rtx
7090	   && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7091    {
7092      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7093				1, 1, 1, 0);
7094      if (assign != 0)
7095	return gen_rtx_SET (VOIDmode, assign, const1_rtx);
7096      return x;
7097    }
7098
7099  /* The other case we handle is assignments into a constant-position
7100     field.  They look like (ior/xor (and DEST C1) OTHER).  If C1 represents
7101     a mask that has all one bits except for a group of zero bits and
7102     OTHER is known to have zeros where C1 has ones, this is such an
7103     assignment.  Compute the position and length from C1.  Shift OTHER
7104     to the appropriate position, force it to the required mode, and
7105     make the extraction.  Check for the AND in both operands.  */
7106
7107  if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
7108    return x;
7109
7110  rhs = expand_compound_operation (XEXP (src, 0));
7111  lhs = expand_compound_operation (XEXP (src, 1));
7112
7113  if (GET_CODE (rhs) == AND
7114      && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7115      && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7116    c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7117  else if (GET_CODE (lhs) == AND
7118	   && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7119	   && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7120    c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
7121  else
7122    return x;
7123
7124  pos = get_pos_from_mask ((~ c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
7125  if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
7126      || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7127      || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
7128    return x;
7129
7130  assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
7131  if (assign == 0)
7132    return x;
7133
7134  /* The mode to use for the source is the mode of the assignment, or of
7135     what is inside a possible STRICT_LOW_PART.  */
7136  mode = (GET_CODE (assign) == STRICT_LOW_PART
7137	  ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
7138
7139  /* Shift OTHER right POS places and make it the source, restricting it
7140     to the proper length and mode.  */
7141
7142  src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7143					     GET_MODE (src), other, pos),
7144		       mode,
7145		       GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7146		       ? GET_MODE_MASK (mode)
7147		       : ((HOST_WIDE_INT) 1 << len) - 1,
7148		       dest, 0);
7149
7150  return gen_rtx_combine (SET, VOIDmode, assign, src);
7151}
7152
7153/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7154   if so.  */
7155
7156static rtx
7157apply_distributive_law (x)
7158     rtx x;
7159{
7160  enum rtx_code code = GET_CODE (x);
7161  rtx lhs, rhs, other;
7162  rtx tem;
7163  enum rtx_code inner_code;
7164
7165  /* Distributivity is not true for floating point.
7166     It can change the value.  So don't do it.
7167     -- rms and moshier@world.std.com.  */
7168  if (FLOAT_MODE_P (GET_MODE (x)))
7169    return x;
7170
7171  /* The outer operation can only be one of the following:  */
7172  if (code != IOR && code != AND && code != XOR
7173      && code != PLUS && code != MINUS)
7174    return x;
7175
7176  lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7177
7178  /* If either operand is a primitive we can't do anything, so get out
7179     fast.  */
7180  if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
7181      || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
7182    return x;
7183
7184  lhs = expand_compound_operation (lhs);
7185  rhs = expand_compound_operation (rhs);
7186  inner_code = GET_CODE (lhs);
7187  if (inner_code != GET_CODE (rhs))
7188    return x;
7189
7190  /* See if the inner and outer operations distribute.  */
7191  switch (inner_code)
7192    {
7193    case LSHIFTRT:
7194    case ASHIFTRT:
7195    case AND:
7196    case IOR:
7197      /* These all distribute except over PLUS.  */
7198      if (code == PLUS || code == MINUS)
7199	return x;
7200      break;
7201
7202    case MULT:
7203      if (code != PLUS && code != MINUS)
7204	return x;
7205      break;
7206
7207    case ASHIFT:
7208      /* This is also a multiply, so it distributes over everything.  */
7209      break;
7210
7211    case SUBREG:
7212      /* Non-paradoxical SUBREGs distributes over all operations, provided
7213	 the inner modes and word numbers are the same, this is an extraction
7214	 of a low-order part, we don't convert an fp operation to int or
7215	 vice versa, and we would not be converting a single-word
7216	 operation into a multi-word operation.  The latter test is not
7217	 required, but it prevents generating unneeded multi-word operations.
7218	 Some of the previous tests are redundant given the latter test, but
7219	 are retained because they are required for correctness.
7220
7221	 We produce the result slightly differently in this case.  */
7222
7223      if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7224	  || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
7225	  || ! subreg_lowpart_p (lhs)
7226	  || (GET_MODE_CLASS (GET_MODE (lhs))
7227	      != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
7228	  || (GET_MODE_SIZE (GET_MODE (lhs))
7229	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
7230	  || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
7231	return x;
7232
7233      tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7234			SUBREG_REG (lhs), SUBREG_REG (rhs));
7235      return gen_lowpart_for_combine (GET_MODE (x), tem);
7236
7237    default:
7238      return x;
7239    }
7240
7241  /* Set LHS and RHS to the inner operands (A and B in the example
7242     above) and set OTHER to the common operand (C in the example).
7243     These is only one way to do this unless the inner operation is
7244     commutative.  */
7245  if (GET_RTX_CLASS (inner_code) == 'c'
7246      && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7247    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7248  else if (GET_RTX_CLASS (inner_code) == 'c'
7249	   && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7250    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7251  else if (GET_RTX_CLASS (inner_code) == 'c'
7252	   && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7253    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7254  else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7255    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7256  else
7257    return x;
7258
7259  /* Form the new inner operation, seeing if it simplifies first.  */
7260  tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7261
7262  /* There is one exception to the general way of distributing:
7263     (a ^ b) | (a ^ c) -> (~a) & (b ^ c)  */
7264  if (code == XOR && inner_code == IOR)
7265    {
7266      inner_code = AND;
7267      other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
7268    }
7269
7270  /* We may be able to continuing distributing the result, so call
7271     ourselves recursively on the inner operation before forming the
7272     outer operation, which we return.  */
7273  return gen_binary (inner_code, GET_MODE (x),
7274		     apply_distributive_law (tem), other);
7275}
7276
7277/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7278   in MODE.
7279
7280   Return an equivalent form, if different from X.  Otherwise, return X.  If
7281   X is zero, we are to always construct the equivalent form.  */
7282
7283static rtx
7284simplify_and_const_int (x, mode, varop, constop)
7285     rtx x;
7286     enum machine_mode mode;
7287     rtx varop;
7288     unsigned HOST_WIDE_INT constop;
7289{
7290  unsigned HOST_WIDE_INT nonzero;
7291  int width = GET_MODE_BITSIZE (mode);
7292  int i;
7293
7294  /* Simplify VAROP knowing that we will be only looking at some of the
7295     bits in it.  */
7296  varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
7297
7298  /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7299     CONST_INT, we are done.  */
7300  if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7301    return varop;
7302
7303  /* See what bits may be nonzero in VAROP.  Unlike the general case of
7304     a call to nonzero_bits, here we don't care about bits outside
7305     MODE.  */
7306
7307  nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7308
7309  /* If this would be an entire word for the target, but is not for
7310     the host, then sign-extend on the host so that the number will look
7311     the same way on the host that it would on the target.
7312
7313     For example, when building a 64 bit alpha hosted 32 bit sparc
7314     targeted compiler, then we want the 32 bit unsigned value -1 to be
7315     represented as a 64 bit value -1, and not as 0x00000000ffffffff.
7316     The later confuses the sparc backend.  */
7317
7318  if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
7319      && (nonzero & ((HOST_WIDE_INT) 1 << (width - 1))))
7320    nonzero |= ((HOST_WIDE_INT) (-1) << width);
7321
7322  /* Turn off all bits in the constant that are known to already be zero.
7323     Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
7324     which is tested below.  */
7325
7326  constop &= nonzero;
7327
7328  /* If we don't have any bits left, return zero.  */
7329  if (constop == 0)
7330    return const0_rtx;
7331
7332  /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7333     a power of two, we can replace this with a ASHIFT.  */
7334  if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7335      && (i = exact_log2 (constop)) >= 0)
7336    return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7337
7338  /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7339     or XOR, then try to apply the distributive law.  This may eliminate
7340     operations if either branch can be simplified because of the AND.
7341     It may also make some cases more complex, but those cases probably
7342     won't match a pattern either with or without this.  */
7343
7344  if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7345    return
7346      gen_lowpart_for_combine
7347	(mode,
7348	 apply_distributive_law
7349	 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7350		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7351					      XEXP (varop, 0), constop),
7352		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7353					      XEXP (varop, 1), constop))));
7354
7355  /* Get VAROP in MODE.  Try to get a SUBREG if not.  Don't make a new SUBREG
7356     if we already had one (just check for the simplest cases).  */
7357  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7358      && GET_MODE (XEXP (x, 0)) == mode
7359      && SUBREG_REG (XEXP (x, 0)) == varop)
7360    varop = XEXP (x, 0);
7361  else
7362    varop = gen_lowpart_for_combine (mode, varop);
7363
7364  /* If we can't make the SUBREG, try to return what we were given.  */
7365  if (GET_CODE (varop) == CLOBBER)
7366    return x ? x : varop;
7367
7368  /* If we are only masking insignificant bits, return VAROP.  */
7369  if (constop == nonzero)
7370    x = varop;
7371
7372  /* Otherwise, return an AND.  See how much, if any, of X we can use.  */
7373  else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
7374    x = gen_binary (AND, mode, varop, GEN_INT (constop));
7375
7376  else
7377    {
7378      if (GET_CODE (XEXP (x, 1)) != CONST_INT
7379	  || INTVAL (XEXP (x, 1)) != constop)
7380	SUBST (XEXP (x, 1), GEN_INT (constop));
7381
7382      SUBST (XEXP (x, 0), varop);
7383    }
7384
7385  return x;
7386}
7387
7388/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7389   We don't let nonzero_bits recur into num_sign_bit_copies, because that
7390   is less useful.  We can't allow both, because that results in exponential
7391   run time recursion.  There is a nullstone testcase that triggered
7392   this.  This macro avoids accidental uses of num_sign_bit_copies.  */
7393#define num_sign_bit_copies()
7394
7395/* Given an expression, X, compute which bits in X can be non-zero.
7396   We don't care about bits outside of those defined in MODE.
7397
7398   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7399   a shift, AND, or zero_extract, we can do better.  */
7400
7401static unsigned HOST_WIDE_INT
7402nonzero_bits (x, mode)
7403     rtx x;
7404     enum machine_mode mode;
7405{
7406  unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7407  unsigned HOST_WIDE_INT inner_nz;
7408  enum rtx_code code;
7409  int mode_width = GET_MODE_BITSIZE (mode);
7410  rtx tem;
7411
7412  /* For floating-point values, assume all bits are needed.  */
7413  if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7414    return nonzero;
7415
7416  /* If X is wider than MODE, use its mode instead.  */
7417  if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7418    {
7419      mode = GET_MODE (x);
7420      nonzero = GET_MODE_MASK (mode);
7421      mode_width = GET_MODE_BITSIZE (mode);
7422    }
7423
7424  if (mode_width > HOST_BITS_PER_WIDE_INT)
7425    /* Our only callers in this case look for single bit values.  So
7426       just return the mode mask.  Those tests will then be false.  */
7427    return nonzero;
7428
7429#ifndef WORD_REGISTER_OPERATIONS
7430  /* If MODE is wider than X, but both are a single word for both the host
7431     and target machines, we can compute this from which bits of the
7432     object might be nonzero in its own mode, taking into account the fact
7433     that on many CISC machines, accessing an object in a wider mode
7434     causes the high-order bits to become undefined.  So they are
7435     not known to be zero.  */
7436
7437  if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7438      && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7439      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7440      && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
7441    {
7442      nonzero &= nonzero_bits (x, GET_MODE (x));
7443      nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
7444      return nonzero;
7445    }
7446#endif
7447
7448  code = GET_CODE (x);
7449  switch (code)
7450    {
7451    case REG:
7452#ifdef POINTERS_EXTEND_UNSIGNED
7453      /* If pointers extend unsigned and this is a pointer in Pmode, say that
7454	 all the bits above ptr_mode are known to be zero.  */
7455      if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
7456	  && REGNO_POINTER_FLAG (REGNO (x)))
7457	nonzero &= GET_MODE_MASK (ptr_mode);
7458#endif
7459
7460#ifdef STACK_BOUNDARY
7461      /* If this is the stack pointer, we may know something about its
7462	 alignment.  If PUSH_ROUNDING is defined, it is possible for the
7463	 stack to be momentarily aligned only to that amount, so we pick
7464	 the least alignment.  */
7465
7466      /* We can't check for arg_pointer_rtx here, because it is not
7467	 guaranteed to have as much alignment as the stack pointer.
7468	 In particular, in the Irix6 n64 ABI, the stack has 128 bit
7469	 alignment but the argument pointer has only 64 bit alignment.  */
7470
7471      if ((x == frame_pointer_rtx
7472	   || x == stack_pointer_rtx
7473	   || x == hard_frame_pointer_rtx
7474	   || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
7475	       && REGNO (x) <= LAST_VIRTUAL_REGISTER))
7476#ifdef STACK_BIAS
7477	  && !STACK_BIAS
7478#endif
7479	      )
7480	{
7481	  int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
7482
7483#ifdef PUSH_ROUNDING
7484	  if (REGNO (x) == STACK_POINTER_REGNUM)
7485	    sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
7486#endif
7487
7488	  /* We must return here, otherwise we may get a worse result from
7489	     one of the choices below.  There is nothing useful below as
7490	     far as the stack pointer is concerned.  */
7491	  return nonzero &= ~ (sp_alignment - 1);
7492	}
7493#endif
7494
7495      /* If X is a register whose nonzero bits value is current, use it.
7496	 Otherwise, if X is a register whose value we can find, use that
7497	 value.  Otherwise, use the previously-computed global nonzero bits
7498	 for this register.  */
7499
7500      if (reg_last_set_value[REGNO (x)] != 0
7501	  && reg_last_set_mode[REGNO (x)] == mode
7502	  && (REG_N_SETS (REGNO (x)) == 1
7503	      || reg_last_set_label[REGNO (x)] == label_tick)
7504	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7505	return reg_last_set_nonzero_bits[REGNO (x)];
7506
7507      tem = get_last_value (x);
7508
7509      if (tem)
7510	{
7511#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7512	  /* If X is narrower than MODE and TEM is a non-negative
7513	     constant that would appear negative in the mode of X,
7514	     sign-extend it for use in reg_nonzero_bits because some
7515	     machines (maybe most) will actually do the sign-extension
7516	     and this is the conservative approach.
7517
7518	     ??? For 2.5, try to tighten up the MD files in this regard
7519	     instead of this kludge.  */
7520
7521	  if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
7522	      && GET_CODE (tem) == CONST_INT
7523	      && INTVAL (tem) > 0
7524	      && 0 != (INTVAL (tem)
7525		       & ((HOST_WIDE_INT) 1
7526			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7527	    tem = GEN_INT (INTVAL (tem)
7528			   | ((HOST_WIDE_INT) (-1)
7529			      << GET_MODE_BITSIZE (GET_MODE (x))));
7530#endif
7531	  return nonzero_bits (tem, mode);
7532	}
7533      else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
7534	return reg_nonzero_bits[REGNO (x)] & nonzero;
7535      else
7536	return nonzero;
7537
7538    case CONST_INT:
7539#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7540      /* If X is negative in MODE, sign-extend the value.  */
7541      if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
7542	  && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
7543	return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
7544#endif
7545
7546      return INTVAL (x);
7547
7548    case MEM:
7549#ifdef LOAD_EXTEND_OP
7550      /* In many, if not most, RISC machines, reading a byte from memory
7551	 zeros the rest of the register.  Noticing that fact saves a lot
7552	 of extra zero-extends.  */
7553      if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
7554	nonzero &= GET_MODE_MASK (GET_MODE (x));
7555#endif
7556      break;
7557
7558    case EQ:  case NE:
7559    case GT:  case GTU:
7560    case LT:  case LTU:
7561    case GE:  case GEU:
7562    case LE:  case LEU:
7563
7564      /* If this produces an integer result, we know which bits are set.
7565	 Code here used to clear bits outside the mode of X, but that is
7566	 now done above.  */
7567
7568      if (GET_MODE_CLASS (mode) == MODE_INT
7569	  && mode_width <= HOST_BITS_PER_WIDE_INT)
7570	nonzero = STORE_FLAG_VALUE;
7571      break;
7572
7573    case NEG:
7574#if 0
7575      /* Disabled to avoid exponential mutual recursion between nonzero_bits
7576	 and num_sign_bit_copies.  */
7577      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7578	  == GET_MODE_BITSIZE (GET_MODE (x)))
7579	nonzero = 1;
7580#endif
7581
7582      if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
7583	nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
7584      break;
7585
7586    case ABS:
7587#if 0
7588      /* Disabled to avoid exponential mutual recursion between nonzero_bits
7589	 and num_sign_bit_copies.  */
7590      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7591	  == GET_MODE_BITSIZE (GET_MODE (x)))
7592	nonzero = 1;
7593#endif
7594      break;
7595
7596    case TRUNCATE:
7597      nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
7598      break;
7599
7600    case ZERO_EXTEND:
7601      nonzero &= nonzero_bits (XEXP (x, 0), mode);
7602      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
7603	nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
7604      break;
7605
7606    case SIGN_EXTEND:
7607      /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
7608	 Otherwise, show all the bits in the outer mode but not the inner
7609	 may be non-zero.  */
7610      inner_nz = nonzero_bits (XEXP (x, 0), mode);
7611      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
7612	{
7613	  inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
7614	  if (inner_nz
7615	      & (((HOST_WIDE_INT) 1
7616		  << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
7617	    inner_nz |= (GET_MODE_MASK (mode)
7618			  & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
7619	}
7620
7621      nonzero &= inner_nz;
7622      break;
7623
7624    case AND:
7625      nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7626		  & nonzero_bits (XEXP (x, 1), mode));
7627      break;
7628
7629    case XOR:   case IOR:
7630    case UMIN:  case UMAX:  case SMIN:  case SMAX:
7631      nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7632		  | nonzero_bits (XEXP (x, 1), mode));
7633      break;
7634
7635    case PLUS:  case MINUS:
7636    case MULT:
7637    case DIV:   case UDIV:
7638    case MOD:   case UMOD:
7639      /* We can apply the rules of arithmetic to compute the number of
7640	 high- and low-order zero bits of these operations.  We start by
7641	 computing the width (position of the highest-order non-zero bit)
7642	 and the number of low-order zero bits for each value.  */
7643      {
7644	unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
7645	unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
7646	int width0 = floor_log2 (nz0) + 1;
7647	int width1 = floor_log2 (nz1) + 1;
7648	int low0 = floor_log2 (nz0 & -nz0);
7649	int low1 = floor_log2 (nz1 & -nz1);
7650	HOST_WIDE_INT op0_maybe_minusp
7651	  = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7652	HOST_WIDE_INT op1_maybe_minusp
7653	  = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7654	int result_width = mode_width;
7655	int result_low = 0;
7656
7657	switch (code)
7658	  {
7659	  case PLUS:
7660#ifdef STACK_BIAS
7661	    if (STACK_BIAS
7662	        && (XEXP (x, 0) == stack_pointer_rtx
7663	            || XEXP (x, 0) == frame_pointer_rtx)
7664	        && GET_CODE (XEXP (x, 1)) == CONST_INT)
7665	      {
7666		int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
7667
7668	        nz0 = (GET_MODE_MASK (mode) & ~ (sp_alignment - 1));
7669	        nz1 = INTVAL (XEXP (x, 1)) - STACK_BIAS;
7670	        width0 = floor_log2 (nz0) + 1;
7671	        width1 = floor_log2 (nz1) + 1;
7672	        low0 = floor_log2 (nz0 & -nz0);
7673	        low1 = floor_log2 (nz1 & -nz1);
7674	      }
7675#endif
7676	    result_width = MAX (width0, width1) + 1;
7677	    result_low = MIN (low0, low1);
7678	    break;
7679	  case MINUS:
7680	    result_low = MIN (low0, low1);
7681	    break;
7682	  case MULT:
7683	    result_width = width0 + width1;
7684	    result_low = low0 + low1;
7685	    break;
7686	  case DIV:
7687	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
7688	      result_width = width0;
7689	    break;
7690	  case UDIV:
7691	    result_width = width0;
7692	    break;
7693	  case MOD:
7694	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
7695	      result_width = MIN (width0, width1);
7696	    result_low = MIN (low0, low1);
7697	    break;
7698	  case UMOD:
7699	    result_width = MIN (width0, width1);
7700	    result_low = MIN (low0, low1);
7701	    break;
7702	  default:
7703	    abort ();
7704	  }
7705
7706	if (result_width < mode_width)
7707	  nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
7708
7709	if (result_low > 0)
7710	  nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
7711      }
7712      break;
7713
7714    case ZERO_EXTRACT:
7715      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7716	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7717	nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
7718      break;
7719
7720    case SUBREG:
7721      /* If this is a SUBREG formed for a promoted variable that has
7722	 been zero-extended, we know that at least the high-order bits
7723	 are zero, though others might be too.  */
7724
7725      if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
7726	nonzero = (GET_MODE_MASK (GET_MODE (x))
7727		   & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
7728
7729      /* If the inner mode is a single word for both the host and target
7730	 machines, we can compute this from which bits of the inner
7731	 object might be nonzero.  */
7732      if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
7733	  && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7734	      <= HOST_BITS_PER_WIDE_INT))
7735	{
7736	  nonzero &= nonzero_bits (SUBREG_REG (x), mode);
7737
7738#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
7739	  /* If this is a typical RISC machine, we only have to worry
7740	     about the way loads are extended.  */
7741	  if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
7742	      ? (nonzero
7743		 & (1L << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1)))
7744	      : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
7745#endif
7746	    {
7747	      /* On many CISC machines, accessing an object in a wider mode
7748		 causes the high-order bits to become undefined.  So they are
7749		 not known to be zero.  */
7750	      if (GET_MODE_SIZE (GET_MODE (x))
7751		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7752		nonzero |= (GET_MODE_MASK (GET_MODE (x))
7753			    & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
7754	    }
7755	}
7756      break;
7757
7758    case ASHIFTRT:
7759    case LSHIFTRT:
7760    case ASHIFT:
7761    case ROTATE:
7762      /* The nonzero bits are in two classes: any bits within MODE
7763	 that aren't in GET_MODE (x) are always significant.  The rest of the
7764	 nonzero bits are those that are significant in the operand of
7765	 the shift when shifted the appropriate number of bits.  This
7766	 shows that high-order bits are cleared by the right shift and
7767	 low-order bits by left shifts.  */
7768      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7769	  && INTVAL (XEXP (x, 1)) >= 0
7770	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7771	{
7772	  enum machine_mode inner_mode = GET_MODE (x);
7773	  int width = GET_MODE_BITSIZE (inner_mode);
7774	  int count = INTVAL (XEXP (x, 1));
7775	  unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
7776	  unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
7777	  unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
7778	  unsigned HOST_WIDE_INT outer = 0;
7779
7780	  if (mode_width > width)
7781	    outer = (op_nonzero & nonzero & ~ mode_mask);
7782
7783	  if (code == LSHIFTRT)
7784	    inner >>= count;
7785	  else if (code == ASHIFTRT)
7786	    {
7787	      inner >>= count;
7788
7789	      /* If the sign bit may have been nonzero before the shift, we
7790		 need to mark all the places it could have been copied to
7791		 by the shift as possibly nonzero.  */
7792	      if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7793		inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
7794	    }
7795	  else if (code == ASHIFT)
7796	    inner <<= count;
7797	  else
7798	    inner = ((inner << (count % width)
7799		      | (inner >> (width - (count % width)))) & mode_mask);
7800
7801	  nonzero &= (outer | inner);
7802	}
7803      break;
7804
7805    case FFS:
7806      /* This is at most the number of bits in the mode.  */
7807      nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
7808      break;
7809
7810    case IF_THEN_ELSE:
7811      nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7812		  | nonzero_bits (XEXP (x, 2), mode));
7813      break;
7814
7815    default:
7816      break;
7817    }
7818
7819  return nonzero;
7820}
7821
7822/* See the macro definition above.  */
7823#undef num_sign_bit_copies
7824
7825/* Return the number of bits at the high-order end of X that are known to
7826   be equal to the sign bit.  X will be used in mode MODE; if MODE is
7827   VOIDmode, X will be used in its own mode.  The returned value  will always
7828   be between 1 and the number of bits in MODE.  */
7829
7830static int
7831num_sign_bit_copies (x, mode)
7832     rtx x;
7833     enum machine_mode mode;
7834{
7835  enum rtx_code code = GET_CODE (x);
7836  int bitwidth;
7837  int num0, num1, result;
7838  unsigned HOST_WIDE_INT nonzero;
7839  rtx tem;
7840
7841  /* If we weren't given a mode, use the mode of X.  If the mode is still
7842     VOIDmode, we don't know anything.  Likewise if one of the modes is
7843     floating-point.  */
7844
7845  if (mode == VOIDmode)
7846    mode = GET_MODE (x);
7847
7848  if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
7849    return 1;
7850
7851  bitwidth = GET_MODE_BITSIZE (mode);
7852
7853  /* For a smaller object, just ignore the high bits.  */
7854  if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7855    return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7856		    - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7857
7858  if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7859    {
7860#ifndef WORD_REGISTER_OPERATIONS
7861  /* If this machine does not do all register operations on the entire
7862     register and MODE is wider than the mode of X, we can say nothing
7863     at all about the high-order bits.  */
7864      return 1;
7865#else
7866      /* Likewise on machines that do, if the mode of the object is smaller
7867	 than a word and loads of that size don't sign extend, we can say
7868	 nothing about the high order bits.  */
7869      if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
7870#ifdef LOAD_EXTEND_OP
7871	  && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
7872#endif
7873	  )
7874	return 1;
7875#endif
7876    }
7877
7878  switch (code)
7879    {
7880    case REG:
7881
7882#ifdef POINTERS_EXTEND_UNSIGNED
7883      /* If pointers extend signed and this is a pointer in Pmode, say that
7884	 all the bits above ptr_mode are known to be sign bit copies.  */
7885      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
7886	  && REGNO_POINTER_FLAG (REGNO (x)))
7887	return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
7888#endif
7889
7890      if (reg_last_set_value[REGNO (x)] != 0
7891	  && reg_last_set_mode[REGNO (x)] == mode
7892	  && (REG_N_SETS (REGNO (x)) == 1
7893	      || reg_last_set_label[REGNO (x)] == label_tick)
7894	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7895	return reg_last_set_sign_bit_copies[REGNO (x)];
7896
7897      tem =  get_last_value (x);
7898      if (tem != 0)
7899	return num_sign_bit_copies (tem, mode);
7900
7901      if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
7902	return reg_sign_bit_copies[REGNO (x)];
7903      break;
7904
7905    case MEM:
7906#ifdef LOAD_EXTEND_OP
7907      /* Some RISC machines sign-extend all loads of smaller than a word.  */
7908      if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
7909	return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
7910#endif
7911      break;
7912
7913    case CONST_INT:
7914      /* If the constant is negative, take its 1's complement and remask.
7915	 Then see how many zero bits we have.  */
7916      nonzero = INTVAL (x) & GET_MODE_MASK (mode);
7917      if (bitwidth <= HOST_BITS_PER_WIDE_INT
7918	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7919	nonzero = (~ nonzero) & GET_MODE_MASK (mode);
7920
7921      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
7922
7923    case SUBREG:
7924      /* If this is a SUBREG for a promoted object that is sign-extended
7925	 and we are looking at it in a wider mode, we know that at least the
7926	 high-order bits are known to be sign bit copies.  */
7927
7928      if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
7929	return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7930		    num_sign_bit_copies (SUBREG_REG (x), mode));
7931
7932      /* For a smaller object, just ignore the high bits.  */
7933      if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7934	{
7935	  num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7936	  return MAX (1, (num0
7937			  - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7938			     - bitwidth)));
7939	}
7940
7941#ifdef WORD_REGISTER_OPERATIONS
7942#ifdef LOAD_EXTEND_OP
7943      /* For paradoxical SUBREGs on machines where all register operations
7944	 affect the entire register, just look inside.  Note that we are
7945	 passing MODE to the recursive call, so the number of sign bit copies
7946	 will remain relative to that mode, not the inner mode.  */
7947
7948      /* This works only if loads sign extend.  Otherwise, if we get a
7949	 reload for the inner part, it may be loaded from the stack, and
7950	 then we lose all sign bit copies that existed before the store
7951	 to the stack.  */
7952
7953      if ((GET_MODE_SIZE (GET_MODE (x))
7954	   > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7955	  && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
7956	return num_sign_bit_copies (SUBREG_REG (x), mode);
7957#endif
7958#endif
7959      break;
7960
7961    case SIGN_EXTRACT:
7962      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7963	return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7964      break;
7965
7966    case SIGN_EXTEND:
7967      return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7968	      + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7969
7970    case TRUNCATE:
7971      /* For a smaller object, just ignore the high bits.  */
7972      num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7973      return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7974			      - bitwidth)));
7975
7976    case NOT:
7977      return num_sign_bit_copies (XEXP (x, 0), mode);
7978
7979    case ROTATE:       case ROTATERT:
7980      /* If we are rotating left by a number of bits less than the number
7981	 of sign bit copies, we can just subtract that amount from the
7982	 number.  */
7983      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7984	  && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
7985	{
7986	  num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7987	  return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
7988				 : bitwidth - INTVAL (XEXP (x, 1))));
7989	}
7990      break;
7991
7992    case NEG:
7993      /* In general, this subtracts one sign bit copy.  But if the value
7994	 is known to be positive, the number of sign bit copies is the
7995	 same as that of the input.  Finally, if the input has just one bit
7996	 that might be nonzero, all the bits are copies of the sign bit.  */
7997      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7998      if (bitwidth > HOST_BITS_PER_WIDE_INT)
7999	return num0 > 1 ? num0 - 1 : 1;
8000
8001      nonzero = nonzero_bits (XEXP (x, 0), mode);
8002      if (nonzero == 1)
8003	return bitwidth;
8004
8005      if (num0 > 1
8006	  && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
8007	num0--;
8008
8009      return num0;
8010
8011    case IOR:   case AND:   case XOR:
8012    case SMIN:  case SMAX:  case UMIN:  case UMAX:
8013      /* Logical operations will preserve the number of sign-bit copies.
8014	 MIN and MAX operations always return one of the operands.  */
8015      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8016      num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8017      return MIN (num0, num1);
8018
8019    case PLUS:  case MINUS:
8020      /* For addition and subtraction, we can have a 1-bit carry.  However,
8021	 if we are subtracting 1 from a positive number, there will not
8022	 be such a carry.  Furthermore, if the positive number is known to
8023	 be 0 or 1, we know the result is either -1 or 0.  */
8024
8025      if (code == PLUS && XEXP (x, 1) == constm1_rtx
8026	  && bitwidth <= HOST_BITS_PER_WIDE_INT)
8027	{
8028	  nonzero = nonzero_bits (XEXP (x, 0), mode);
8029	  if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8030	    return (nonzero == 1 || nonzero == 0 ? bitwidth
8031		    : bitwidth - floor_log2 (nonzero) - 1);
8032	}
8033
8034      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8035      num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8036      return MAX (1, MIN (num0, num1) - 1);
8037
8038    case MULT:
8039      /* The number of bits of the product is the sum of the number of
8040	 bits of both terms.  However, unless one of the terms if known
8041	 to be positive, we must allow for an additional bit since negating
8042	 a negative number can remove one sign bit copy.  */
8043
8044      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8045      num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8046
8047      result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8048      if (result > 0
8049	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8050	      || (((nonzero_bits (XEXP (x, 0), mode)
8051		    & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8052		  && ((nonzero_bits (XEXP (x, 1), mode)
8053		       & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
8054	result--;
8055
8056      return MAX (1, result);
8057
8058    case UDIV:
8059      /* The result must be <= the first operand.  If the first operand
8060         has the high bit set, we know nothing about the number of sign
8061         bit copies.  */
8062      if (bitwidth > HOST_BITS_PER_WIDE_INT)
8063	return 1;
8064      else if ((nonzero_bits (XEXP (x, 0), mode)
8065		& ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8066	return 1;
8067      else
8068	return num_sign_bit_copies (XEXP (x, 0), mode);
8069
8070    case UMOD:
8071      /* The result must be <= the scond operand.  */
8072      return num_sign_bit_copies (XEXP (x, 1), mode);
8073
8074    case DIV:
8075      /* Similar to unsigned division, except that we have to worry about
8076	 the case where the divisor is negative, in which case we have
8077	 to add 1.  */
8078      result = num_sign_bit_copies (XEXP (x, 0), mode);
8079      if (result > 1
8080	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8081	      || (nonzero_bits (XEXP (x, 1), mode)
8082		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8083	result--;
8084
8085      return result;
8086
8087    case MOD:
8088      result = num_sign_bit_copies (XEXP (x, 1), mode);
8089      if (result > 1
8090	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8091	      || (nonzero_bits (XEXP (x, 1), mode)
8092		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8093	result--;
8094
8095      return result;
8096
8097    case ASHIFTRT:
8098      /* Shifts by a constant add to the number of bits equal to the
8099	 sign bit.  */
8100      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8101      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8102	  && INTVAL (XEXP (x, 1)) > 0)
8103	num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
8104
8105      return num0;
8106
8107    case ASHIFT:
8108      /* Left shifts destroy copies.  */
8109      if (GET_CODE (XEXP (x, 1)) != CONST_INT
8110	  || INTVAL (XEXP (x, 1)) < 0
8111	  || INTVAL (XEXP (x, 1)) >= bitwidth)
8112	return 1;
8113
8114      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8115      return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8116
8117    case IF_THEN_ELSE:
8118      num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8119      num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8120      return MIN (num0, num1);
8121
8122    case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
8123    case GEU: case GTU: case LEU: case LTU:
8124      if (STORE_FLAG_VALUE == -1)
8125	return bitwidth;
8126      break;
8127
8128    default:
8129      break;
8130    }
8131
8132  /* If we haven't been able to figure it out by one of the above rules,
8133     see if some of the high-order bits are known to be zero.  If so,
8134     count those bits and return one less than that amount.  If we can't
8135     safely compute the mask for this mode, always return BITWIDTH.  */
8136
8137  if (bitwidth > HOST_BITS_PER_WIDE_INT)
8138    return 1;
8139
8140  nonzero = nonzero_bits (x, mode);
8141  return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
8142	  ? 1 : bitwidth - floor_log2 (nonzero) - 1);
8143}
8144
8145/* Return the number of "extended" bits there are in X, when interpreted
8146   as a quantity in MODE whose signedness is indicated by UNSIGNEDP.  For
8147   unsigned quantities, this is the number of high-order zero bits.
8148   For signed quantities, this is the number of copies of the sign bit
8149   minus 1.  In both case, this function returns the number of "spare"
8150   bits.  For example, if two quantities for which this function returns
8151   at least 1 are added, the addition is known not to overflow.
8152
8153   This function will always return 0 unless called during combine, which
8154   implies that it must be called from a define_split.  */
8155
8156int
8157extended_count (x, mode, unsignedp)
8158     rtx x;
8159     enum machine_mode mode;
8160     int unsignedp;
8161{
8162  if (nonzero_sign_valid == 0)
8163    return 0;
8164
8165  return (unsignedp
8166	  ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8167	     && (GET_MODE_BITSIZE (mode) - 1
8168		 - floor_log2 (nonzero_bits (x, mode))))
8169	  : num_sign_bit_copies (x, mode) - 1);
8170}
8171
8172/* This function is called from `simplify_shift_const' to merge two
8173   outer operations.  Specifically, we have already found that we need
8174   to perform operation *POP0 with constant *PCONST0 at the outermost
8175   position.  We would now like to also perform OP1 with constant CONST1
8176   (with *POP0 being done last).
8177
8178   Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8179   the resulting operation.  *PCOMP_P is set to 1 if we would need to
8180   complement the innermost operand, otherwise it is unchanged.
8181
8182   MODE is the mode in which the operation will be done.  No bits outside
8183   the width of this mode matter.  It is assumed that the width of this mode
8184   is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8185
8186   If *POP0 or OP1 are NIL, it means no operation is required.  Only NEG, PLUS,
8187   IOR, XOR, and AND are supported.  We may set *POP0 to SET if the proper
8188   result is simply *PCONST0.
8189
8190   If the resulting operation cannot be expressed as one operation, we
8191   return 0 and do not change *POP0, *PCONST0, and *PCOMP_P.  */
8192
8193static int
8194merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8195     enum rtx_code *pop0;
8196     HOST_WIDE_INT *pconst0;
8197     enum rtx_code op1;
8198     HOST_WIDE_INT const1;
8199     enum machine_mode mode;
8200     int *pcomp_p;
8201{
8202  enum rtx_code op0 = *pop0;
8203  HOST_WIDE_INT const0 = *pconst0;
8204  int width = GET_MODE_BITSIZE (mode);
8205
8206  const0 &= GET_MODE_MASK (mode);
8207  const1 &= GET_MODE_MASK (mode);
8208
8209  /* If OP0 is an AND, clear unimportant bits in CONST1.  */
8210  if (op0 == AND)
8211    const1 &= const0;
8212
8213  /* If OP0 or OP1 is NIL, this is easy.  Similarly if they are the same or
8214     if OP0 is SET.  */
8215
8216  if (op1 == NIL || op0 == SET)
8217    return 1;
8218
8219  else if (op0 == NIL)
8220    op0 = op1, const0 = const1;
8221
8222  else if (op0 == op1)
8223    {
8224      switch (op0)
8225	{
8226	case AND:
8227	  const0 &= const1;
8228	  break;
8229	case IOR:
8230	  const0 |= const1;
8231	  break;
8232	case XOR:
8233	  const0 ^= const1;
8234	  break;
8235	case PLUS:
8236	  const0 += const1;
8237	  break;
8238	case NEG:
8239	  op0 = NIL;
8240	  break;
8241	default:
8242	  break;
8243	}
8244    }
8245
8246  /* Otherwise, if either is a PLUS or NEG, we can't do anything.  */
8247  else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8248    return 0;
8249
8250  /* If the two constants aren't the same, we can't do anything.  The
8251     remaining six cases can all be done.  */
8252  else if (const0 != const1)
8253    return 0;
8254
8255  else
8256    switch (op0)
8257      {
8258      case IOR:
8259	if (op1 == AND)
8260	  /* (a & b) | b == b */
8261	  op0 = SET;
8262	else /* op1 == XOR */
8263	  /* (a ^ b) | b == a | b */
8264	  {;}
8265	break;
8266
8267      case XOR:
8268	if (op1 == AND)
8269	  /* (a & b) ^ b == (~a) & b */
8270	  op0 = AND, *pcomp_p = 1;
8271	else /* op1 == IOR */
8272	  /* (a | b) ^ b == a & ~b */
8273	  op0 = AND, *pconst0 = ~ const0;
8274	break;
8275
8276      case AND:
8277	if (op1 == IOR)
8278	  /* (a | b) & b == b */
8279	op0 = SET;
8280	else /* op1 == XOR */
8281	  /* (a ^ b) & b) == (~a) & b */
8282	  *pcomp_p = 1;
8283	break;
8284      default:
8285	break;
8286      }
8287
8288  /* Check for NO-OP cases.  */
8289  const0 &= GET_MODE_MASK (mode);
8290  if (const0 == 0
8291      && (op0 == IOR || op0 == XOR || op0 == PLUS))
8292    op0 = NIL;
8293  else if (const0 == 0 && op0 == AND)
8294    op0 = SET;
8295  else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
8296    op0 = NIL;
8297
8298  /* If this would be an entire word for the target, but is not for
8299     the host, then sign-extend on the host so that the number will look
8300     the same way on the host that it would on the target.
8301
8302     For example, when building a 64 bit alpha hosted 32 bit sparc
8303     targeted compiler, then we want the 32 bit unsigned value -1 to be
8304     represented as a 64 bit value -1, and not as 0x00000000ffffffff.
8305     The later confuses the sparc backend.  */
8306
8307  if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
8308      && (const0 & ((HOST_WIDE_INT) 1 << (width - 1))))
8309    const0 |= ((HOST_WIDE_INT) (-1) << width);
8310
8311  *pop0 = op0;
8312  *pconst0 = const0;
8313
8314  return 1;
8315}
8316
8317/* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
8318   The result of the shift is RESULT_MODE.  X, if non-zero, is an expression
8319   that we started with.
8320
8321   The shift is normally computed in the widest mode we find in VAROP, as
8322   long as it isn't a different number of words than RESULT_MODE.  Exceptions
8323   are ASHIFTRT and ROTATE, which are always done in their original mode,  */
8324
8325static rtx
8326simplify_shift_const (x, code, result_mode, varop, count)
8327     rtx x;
8328     enum rtx_code code;
8329     enum machine_mode result_mode;
8330     rtx varop;
8331     int count;
8332{
8333  enum rtx_code orig_code = code;
8334  int orig_count = count;
8335  enum machine_mode mode = result_mode;
8336  enum machine_mode shift_mode, tmode;
8337  int mode_words
8338    = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8339  /* We form (outer_op (code varop count) (outer_const)).  */
8340  enum rtx_code outer_op = NIL;
8341  HOST_WIDE_INT outer_const = 0;
8342  rtx const_rtx;
8343  int complement_p = 0;
8344  rtx new;
8345
8346  /* If we were given an invalid count, don't do anything except exactly
8347     what was requested.  */
8348
8349  if (count < 0 || count > GET_MODE_BITSIZE (mode))
8350    {
8351      if (x)
8352	return x;
8353
8354      return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (count));
8355    }
8356
8357  /* Unless one of the branches of the `if' in this loop does a `continue',
8358     we will `break' the loop after the `if'.  */
8359
8360  while (count != 0)
8361    {
8362      /* If we have an operand of (clobber (const_int 0)), just return that
8363	 value.  */
8364      if (GET_CODE (varop) == CLOBBER)
8365	return varop;
8366
8367      /* If we discovered we had to complement VAROP, leave.  Making a NOT
8368	 here would cause an infinite loop.  */
8369      if (complement_p)
8370	break;
8371
8372      /* Convert ROTATERT to ROTATE.  */
8373      if (code == ROTATERT)
8374	code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8375
8376      /* We need to determine what mode we will do the shift in.  If the
8377	 shift is a right shift or a ROTATE, we must always do it in the mode
8378	 it was originally done in.  Otherwise, we can do it in MODE, the
8379	 widest mode encountered.  */
8380      shift_mode
8381	= (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8382	   ? result_mode : mode);
8383
8384      /* Handle cases where the count is greater than the size of the mode
8385	 minus 1.  For ASHIFT, use the size minus one as the count (this can
8386	 occur when simplifying (lshiftrt (ashiftrt ..))).  For rotates,
8387	 take the count modulo the size.  For other shifts, the result is
8388	 zero.
8389
8390	 Since these shifts are being produced by the compiler by combining
8391	 multiple operations, each of which are defined, we know what the
8392	 result is supposed to be.  */
8393
8394      if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8395	{
8396	  if (code == ASHIFTRT)
8397	    count = GET_MODE_BITSIZE (shift_mode) - 1;
8398	  else if (code == ROTATE || code == ROTATERT)
8399	    count %= GET_MODE_BITSIZE (shift_mode);
8400	  else
8401	    {
8402	      /* We can't simply return zero because there may be an
8403		 outer op.  */
8404	      varop = const0_rtx;
8405	      count = 0;
8406	      break;
8407	    }
8408	}
8409
8410      /* Negative counts are invalid and should not have been made (a
8411	 programmer-specified negative count should have been handled
8412	 above).  */
8413      else if (count < 0)
8414	abort ();
8415
8416      /* An arithmetic right shift of a quantity known to be -1 or 0
8417	 is a no-op.  */
8418      if (code == ASHIFTRT
8419	  && (num_sign_bit_copies (varop, shift_mode)
8420	      == GET_MODE_BITSIZE (shift_mode)))
8421	{
8422	  count = 0;
8423	  break;
8424	}
8425
8426      /* If we are doing an arithmetic right shift and discarding all but
8427	 the sign bit copies, this is equivalent to doing a shift by the
8428	 bitsize minus one.  Convert it into that shift because it will often
8429	 allow other simplifications.  */
8430
8431      if (code == ASHIFTRT
8432	  && (count + num_sign_bit_copies (varop, shift_mode)
8433	      >= GET_MODE_BITSIZE (shift_mode)))
8434	count = GET_MODE_BITSIZE (shift_mode) - 1;
8435
8436      /* We simplify the tests below and elsewhere by converting
8437	 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8438	 `make_compound_operation' will convert it to a ASHIFTRT for
8439	 those machines (such as Vax) that don't have a LSHIFTRT.  */
8440      if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8441	  && code == ASHIFTRT
8442	  && ((nonzero_bits (varop, shift_mode)
8443	       & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8444	      == 0))
8445	code = LSHIFTRT;
8446
8447      switch (GET_CODE (varop))
8448	{
8449	case SIGN_EXTEND:
8450	case ZERO_EXTEND:
8451	case SIGN_EXTRACT:
8452	case ZERO_EXTRACT:
8453	  new = expand_compound_operation (varop);
8454	  if (new != varop)
8455	    {
8456	      varop = new;
8457	      continue;
8458	    }
8459	  break;
8460
8461	case MEM:
8462	  /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8463	     minus the width of a smaller mode, we can do this with a
8464	     SIGN_EXTEND or ZERO_EXTEND from the narrower memory location.  */
8465	  if ((code == ASHIFTRT || code == LSHIFTRT)
8466	      && ! mode_dependent_address_p (XEXP (varop, 0))
8467	      && ! MEM_VOLATILE_P (varop)
8468	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8469					 MODE_INT, 1)) != BLKmode)
8470	    {
8471	      if (BYTES_BIG_ENDIAN)
8472		new = gen_rtx_MEM (tmode, XEXP (varop, 0));
8473	      else
8474		new = gen_rtx_MEM (tmode,
8475				   plus_constant (XEXP (varop, 0),
8476						  count / BITS_PER_UNIT));
8477	      RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
8478	      MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
8479	      MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
8480	      varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8481				       : ZERO_EXTEND, mode, new);
8482	      count = 0;
8483	      continue;
8484	    }
8485	  break;
8486
8487	case USE:
8488	  /* Similar to the case above, except that we can only do this if
8489	     the resulting mode is the same as that of the underlying
8490	     MEM and adjust the address depending on the *bits* endianness
8491	     because of the way that bit-field extract insns are defined.  */
8492	  if ((code == ASHIFTRT || code == LSHIFTRT)
8493	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8494					 MODE_INT, 1)) != BLKmode
8495	      && tmode == GET_MODE (XEXP (varop, 0)))
8496	    {
8497	      if (BITS_BIG_ENDIAN)
8498		new = XEXP (varop, 0);
8499	      else
8500		{
8501		  new = copy_rtx (XEXP (varop, 0));
8502		  SUBST (XEXP (new, 0),
8503			 plus_constant (XEXP (new, 0),
8504					count / BITS_PER_UNIT));
8505		}
8506
8507	      varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8508				       : ZERO_EXTEND, mode, new);
8509	      count = 0;
8510	      continue;
8511	    }
8512	  break;
8513
8514	case SUBREG:
8515	  /* If VAROP is a SUBREG, strip it as long as the inner operand has
8516	     the same number of words as what we've seen so far.  Then store
8517	     the widest mode in MODE.  */
8518	  if (subreg_lowpart_p (varop)
8519	      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8520		  > GET_MODE_SIZE (GET_MODE (varop)))
8521	      && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8522		    + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
8523		  == mode_words))
8524	    {
8525	      varop = SUBREG_REG (varop);
8526	      if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
8527		mode = GET_MODE (varop);
8528	      continue;
8529	    }
8530	  break;
8531
8532	case MULT:
8533	  /* Some machines use MULT instead of ASHIFT because MULT
8534	     is cheaper.  But it is still better on those machines to
8535	     merge two shifts into one.  */
8536	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8537	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8538	    {
8539	      varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
8540				  GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
8541	      continue;
8542	    }
8543	  break;
8544
8545	case UDIV:
8546	  /* Similar, for when divides are cheaper.  */
8547	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8548	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8549	    {
8550	      varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
8551				  GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
8552	      continue;
8553	    }
8554	  break;
8555
8556	case ASHIFTRT:
8557	  /* If we are extracting just the sign bit of an arithmetic right
8558	     shift, that shift is not needed.  */
8559	  if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
8560	    {
8561	      varop = XEXP (varop, 0);
8562	      continue;
8563	    }
8564
8565	  /* ... fall through ...  */
8566
8567	case LSHIFTRT:
8568	case ASHIFT:
8569	case ROTATE:
8570	  /* Here we have two nested shifts.  The result is usually the
8571	     AND of a new shift with a mask.  We compute the result below.  */
8572	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8573	      && INTVAL (XEXP (varop, 1)) >= 0
8574	      && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
8575	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8576	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8577	    {
8578	      enum rtx_code first_code = GET_CODE (varop);
8579	      int first_count = INTVAL (XEXP (varop, 1));
8580	      unsigned HOST_WIDE_INT mask;
8581	      rtx mask_rtx;
8582
8583	      /* We have one common special case.  We can't do any merging if
8584		 the inner code is an ASHIFTRT of a smaller mode.  However, if
8585		 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8586		 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8587		 we can convert it to
8588		 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8589		 This simplifies certain SIGN_EXTEND operations.  */
8590	      if (code == ASHIFT && first_code == ASHIFTRT
8591		  && (GET_MODE_BITSIZE (result_mode)
8592		      - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
8593		{
8594		  /* C3 has the low-order C1 bits zero.  */
8595
8596		  mask = (GET_MODE_MASK (mode)
8597			  & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
8598
8599		  varop = simplify_and_const_int (NULL_RTX, result_mode,
8600						  XEXP (varop, 0), mask);
8601		  varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
8602						varop, count);
8603		  count = first_count;
8604		  code = ASHIFTRT;
8605		  continue;
8606		}
8607
8608	      /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8609		 than C1 high-order bits equal to the sign bit, we can convert
8610		 this to either an ASHIFT or a ASHIFTRT depending on the
8611		 two counts.
8612
8613		 We cannot do this if VAROP's mode is not SHIFT_MODE.  */
8614
8615	      if (code == ASHIFTRT && first_code == ASHIFT
8616		  && GET_MODE (varop) == shift_mode
8617		  && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
8618		      > first_count))
8619		{
8620		  count -= first_count;
8621		  if (count < 0)
8622		    count = - count, code = ASHIFT;
8623		  varop = XEXP (varop, 0);
8624		  continue;
8625		}
8626
8627	      /* There are some cases we can't do.  If CODE is ASHIFTRT,
8628		 we can only do this if FIRST_CODE is also ASHIFTRT.
8629
8630		 We can't do the case when CODE is ROTATE and FIRST_CODE is
8631		 ASHIFTRT.
8632
8633		 If the mode of this shift is not the mode of the outer shift,
8634		 we can't do this if either shift is a right shift or ROTATE.
8635
8636		 Finally, we can't do any of these if the mode is too wide
8637		 unless the codes are the same.
8638
8639		 Handle the case where the shift codes are the same
8640		 first.  */
8641
8642	      if (code == first_code)
8643		{
8644		  if (GET_MODE (varop) != result_mode
8645		      && (code == ASHIFTRT || code == LSHIFTRT
8646			  || code == ROTATE))
8647		    break;
8648
8649		  count += first_count;
8650		  varop = XEXP (varop, 0);
8651		  continue;
8652		}
8653
8654	      if (code == ASHIFTRT
8655		  || (code == ROTATE && first_code == ASHIFTRT)
8656		  || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
8657		  || (GET_MODE (varop) != result_mode
8658		      && (first_code == ASHIFTRT || first_code == LSHIFTRT
8659			  || first_code == ROTATE
8660			  || code == ROTATE)))
8661		break;
8662
8663	      /* To compute the mask to apply after the shift, shift the
8664		 nonzero bits of the inner shift the same way the
8665		 outer shift will.  */
8666
8667	      mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
8668
8669	      mask_rtx
8670		= simplify_binary_operation (code, result_mode, mask_rtx,
8671					     GEN_INT (count));
8672
8673	      /* Give up if we can't compute an outer operation to use.  */
8674	      if (mask_rtx == 0
8675		  || GET_CODE (mask_rtx) != CONST_INT
8676		  || ! merge_outer_ops (&outer_op, &outer_const, AND,
8677					INTVAL (mask_rtx),
8678					result_mode, &complement_p))
8679		break;
8680
8681	      /* If the shifts are in the same direction, we add the
8682		 counts.  Otherwise, we subtract them.  */
8683	      if ((code == ASHIFTRT || code == LSHIFTRT)
8684		  == (first_code == ASHIFTRT || first_code == LSHIFTRT))
8685		count += first_count;
8686	      else
8687		count -= first_count;
8688
8689	      /* If COUNT is positive, the new shift is usually CODE,
8690		 except for the two exceptions below, in which case it is
8691		 FIRST_CODE.  If the count is negative, FIRST_CODE should
8692		 always be used  */
8693	      if (count > 0
8694		  && ((first_code == ROTATE && code == ASHIFT)
8695		      || (first_code == ASHIFTRT && code == LSHIFTRT)))
8696		code = first_code;
8697	      else if (count < 0)
8698		code = first_code, count = - count;
8699
8700	      varop = XEXP (varop, 0);
8701	      continue;
8702	    }
8703
8704	  /* If we have (A << B << C) for any shift, we can convert this to
8705	     (A << C << B).  This wins if A is a constant.  Only try this if
8706	     B is not a constant.  */
8707
8708	  else if (GET_CODE (varop) == code
8709		   && GET_CODE (XEXP (varop, 1)) != CONST_INT
8710		   && 0 != (new
8711			    = simplify_binary_operation (code, mode,
8712							 XEXP (varop, 0),
8713							 GEN_INT (count))))
8714	    {
8715	      varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
8716	      count = 0;
8717	      continue;
8718	    }
8719	  break;
8720
8721	case NOT:
8722	  /* Make this fit the case below.  */
8723	  varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
8724				   GEN_INT (GET_MODE_MASK (mode)));
8725	  continue;
8726
8727	case IOR:
8728	case AND:
8729	case XOR:
8730	  /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
8731	     with C the size of VAROP - 1 and the shift is logical if
8732	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8733	     we have an (le X 0) operation.   If we have an arithmetic shift
8734	     and STORE_FLAG_VALUE is 1 or we have a logical shift with
8735	     STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
8736
8737	  if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
8738	      && XEXP (XEXP (varop, 0), 1) == constm1_rtx
8739	      && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8740	      && (code == LSHIFTRT || code == ASHIFTRT)
8741	      && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8742	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8743	    {
8744	      count = 0;
8745	      varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
8746				       const0_rtx);
8747
8748	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8749		varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8750
8751	      continue;
8752	    }
8753
8754	  /* If we have (shift (logical)), move the logical to the outside
8755	     to allow it to possibly combine with another logical and the
8756	     shift to combine with another shift.  This also canonicalizes to
8757	     what a ZERO_EXTRACT looks like.  Also, some machines have
8758	     (and (shift)) insns.  */
8759
8760	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8761	      && (new = simplify_binary_operation (code, result_mode,
8762						   XEXP (varop, 1),
8763						   GEN_INT (count))) != 0
8764	      && GET_CODE(new) == CONST_INT
8765	      && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
8766				  INTVAL (new), result_mode, &complement_p))
8767	    {
8768	      varop = XEXP (varop, 0);
8769	      continue;
8770	    }
8771
8772	  /* If we can't do that, try to simplify the shift in each arm of the
8773	     logical expression, make a new logical expression, and apply
8774	     the inverse distributive law.  */
8775	  {
8776	    rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8777					    XEXP (varop, 0), count);
8778	    rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8779					    XEXP (varop, 1), count);
8780
8781	    varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
8782	    varop = apply_distributive_law (varop);
8783
8784	    count = 0;
8785	  }
8786	  break;
8787
8788	case EQ:
8789	  /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
8790	     says that the sign bit can be tested, FOO has mode MODE, C is
8791	     GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
8792	     that may be nonzero.  */
8793	  if (code == LSHIFTRT
8794	      && XEXP (varop, 1) == const0_rtx
8795	      && GET_MODE (XEXP (varop, 0)) == result_mode
8796	      && count == GET_MODE_BITSIZE (result_mode) - 1
8797	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8798	      && ((STORE_FLAG_VALUE
8799		   & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
8800	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
8801	      && merge_outer_ops (&outer_op, &outer_const, XOR,
8802				  (HOST_WIDE_INT) 1, result_mode,
8803				  &complement_p))
8804	    {
8805	      varop = XEXP (varop, 0);
8806	      count = 0;
8807	      continue;
8808	    }
8809	  break;
8810
8811	case NEG:
8812	  /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
8813	     than the number of bits in the mode is equivalent to A.  */
8814	  if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8815	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
8816	    {
8817	      varop = XEXP (varop, 0);
8818	      count = 0;
8819	      continue;
8820	    }
8821
8822	  /* NEG commutes with ASHIFT since it is multiplication.  Move the
8823	     NEG outside to allow shifts to combine.  */
8824	  if (code == ASHIFT
8825	      && merge_outer_ops (&outer_op, &outer_const, NEG,
8826				  (HOST_WIDE_INT) 0, result_mode,
8827				  &complement_p))
8828	    {
8829	      varop = XEXP (varop, 0);
8830	      continue;
8831	    }
8832	  break;
8833
8834	case PLUS:
8835	  /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
8836	     is one less than the number of bits in the mode is
8837	     equivalent to (xor A 1).  */
8838	  if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8839	      && XEXP (varop, 1) == constm1_rtx
8840	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
8841	      && merge_outer_ops (&outer_op, &outer_const, XOR,
8842				  (HOST_WIDE_INT) 1, result_mode,
8843				  &complement_p))
8844	    {
8845	      count = 0;
8846	      varop = XEXP (varop, 0);
8847	      continue;
8848	    }
8849
8850	  /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
8851	     that might be nonzero in BAR are those being shifted out and those
8852	     bits are known zero in FOO, we can replace the PLUS with FOO.
8853	     Similarly in the other operand order.  This code occurs when
8854	     we are computing the size of a variable-size array.  */
8855
8856	  if ((code == ASHIFTRT || code == LSHIFTRT)
8857	      && count < HOST_BITS_PER_WIDE_INT
8858	      && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8859	      && (nonzero_bits (XEXP (varop, 1), result_mode)
8860		  & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
8861	    {
8862	      varop = XEXP (varop, 0);
8863	      continue;
8864	    }
8865	  else if ((code == ASHIFTRT || code == LSHIFTRT)
8866		   && count < HOST_BITS_PER_WIDE_INT
8867		   && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8868		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8869			    >> count)
8870		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8871			    & nonzero_bits (XEXP (varop, 1),
8872						 result_mode)))
8873	    {
8874	      varop = XEXP (varop, 1);
8875	      continue;
8876	    }
8877
8878	  /* (ashift (plus foo C) N) is (plus (ashift foo N) C').  */
8879	  if (code == ASHIFT
8880	      && GET_CODE (XEXP (varop, 1)) == CONST_INT
8881	      && (new = simplify_binary_operation (ASHIFT, result_mode,
8882						   XEXP (varop, 1),
8883						   GEN_INT (count))) != 0
8884	      && GET_CODE(new) == CONST_INT
8885	      && merge_outer_ops (&outer_op, &outer_const, PLUS,
8886				  INTVAL (new), result_mode, &complement_p))
8887	    {
8888	      varop = XEXP (varop, 0);
8889	      continue;
8890	    }
8891	  break;
8892
8893	case MINUS:
8894	  /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8895	     with C the size of VAROP - 1 and the shift is logical if
8896	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8897	     we have a (gt X 0) operation.  If the shift is arithmetic with
8898	     STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8899	     we have a (neg (gt X 0)) operation.  */
8900
8901	  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8902	      && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
8903	      && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8904	      && (code == LSHIFTRT || code == ASHIFTRT)
8905	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8906	      && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8907	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8908	    {
8909	      count = 0;
8910	      varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8911				       const0_rtx);
8912
8913	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8914		varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8915
8916	      continue;
8917	    }
8918	  break;
8919
8920	case TRUNCATE:
8921	  /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
8922	     if the truncate does not affect the value.  */
8923	  if (code == LSHIFTRT
8924	      && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
8925	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8926	      && (INTVAL (XEXP (XEXP (varop, 0), 1))
8927		  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
8928		      - GET_MODE_BITSIZE (GET_MODE (varop)))))
8929	    {
8930	      rtx varop_inner = XEXP (varop, 0);
8931
8932	      varop_inner = gen_rtx_combine (LSHIFTRT,
8933					     GET_MODE (varop_inner),
8934					     XEXP (varop_inner, 0),
8935					     GEN_INT (count + INTVAL (XEXP (varop_inner, 1))));
8936	      varop = gen_rtx_combine (TRUNCATE, GET_MODE (varop),
8937				       varop_inner);
8938	      count = 0;
8939	      continue;
8940	    }
8941	  break;
8942
8943	default:
8944	  break;
8945	}
8946
8947      break;
8948    }
8949
8950  /* We need to determine what mode to do the shift in.  If the shift is
8951     a right shift or ROTATE, we must always do it in the mode it was
8952     originally done in.  Otherwise, we can do it in MODE, the widest mode
8953     encountered.  The code we care about is that of the shift that will
8954     actually be done, not the shift that was originally requested.  */
8955  shift_mode
8956    = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8957       ? result_mode : mode);
8958
8959  /* We have now finished analyzing the shift.  The result should be
8960     a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places.  If
8961     OUTER_OP is non-NIL, it is an operation that needs to be applied
8962     to the result of the shift.  OUTER_CONST is the relevant constant,
8963     but we must turn off all bits turned off in the shift.
8964
8965     If we were passed a value for X, see if we can use any pieces of
8966     it.  If not, make new rtx.  */
8967
8968  if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
8969      && GET_CODE (XEXP (x, 1)) == CONST_INT
8970      && INTVAL (XEXP (x, 1)) == count)
8971    const_rtx = XEXP (x, 1);
8972  else
8973    const_rtx = GEN_INT (count);
8974
8975  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8976      && GET_MODE (XEXP (x, 0)) == shift_mode
8977      && SUBREG_REG (XEXP (x, 0)) == varop)
8978    varop = XEXP (x, 0);
8979  else if (GET_MODE (varop) != shift_mode)
8980    varop = gen_lowpart_for_combine (shift_mode, varop);
8981
8982  /* If we can't make the SUBREG, try to return what we were given.  */
8983  if (GET_CODE (varop) == CLOBBER)
8984    return x ? x : varop;
8985
8986  new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
8987  if (new != 0)
8988    x = new;
8989  else
8990    {
8991      if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
8992	x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
8993
8994      SUBST (XEXP (x, 0), varop);
8995      SUBST (XEXP (x, 1), const_rtx);
8996    }
8997
8998  /* If we have an outer operation and we just made a shift, it is
8999     possible that we could have simplified the shift were it not
9000     for the outer operation.  So try to do the simplification
9001     recursively.  */
9002
9003  if (outer_op != NIL && GET_CODE (x) == code
9004      && GET_CODE (XEXP (x, 1)) == CONST_INT)
9005    x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9006			      INTVAL (XEXP (x, 1)));
9007
9008  /* If we were doing a LSHIFTRT in a wider mode than it was originally,
9009     turn off all the bits that the shift would have turned off.  */
9010  if (orig_code == LSHIFTRT && result_mode != shift_mode)
9011    x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9012				GET_MODE_MASK (result_mode) >> orig_count);
9013
9014  /* Do the remainder of the processing in RESULT_MODE.  */
9015  x = gen_lowpart_for_combine (result_mode, x);
9016
9017  /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9018     operation.  */
9019  if (complement_p)
9020    x = gen_unary (NOT, result_mode, result_mode, x);
9021
9022  if (outer_op != NIL)
9023    {
9024      if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9025	{
9026	  int width = GET_MODE_BITSIZE (result_mode);
9027
9028	  outer_const &= GET_MODE_MASK (result_mode);
9029
9030	  /* If this would be an entire word for the target, but is not for
9031	     the host, then sign-extend on the host so that the number will
9032	     look the same way on the host that it would on the target.
9033
9034	     For example, when building a 64 bit alpha hosted 32 bit sparc
9035	     targeted compiler, then we want the 32 bit unsigned value -1 to be
9036	     represented as a 64 bit value -1, and not as 0x00000000ffffffff.
9037	     The later confuses the sparc backend.  */
9038
9039	  if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
9040	      && (outer_const & ((HOST_WIDE_INT) 1 << (width - 1))))
9041	    outer_const |= ((HOST_WIDE_INT) (-1) << width);
9042	}
9043
9044      if (outer_op == AND)
9045	x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9046      else if (outer_op == SET)
9047	/* This means that we have determined that the result is
9048	   equivalent to a constant.  This should be rare.  */
9049	x = GEN_INT (outer_const);
9050      else if (GET_RTX_CLASS (outer_op) == '1')
9051	x = gen_unary (outer_op, result_mode, result_mode, x);
9052      else
9053	x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
9054    }
9055
9056  return x;
9057}
9058
9059/* Like recog, but we receive the address of a pointer to a new pattern.
9060   We try to match the rtx that the pointer points to.
9061   If that fails, we may try to modify or replace the pattern,
9062   storing the replacement into the same pointer object.
9063
9064   Modifications include deletion or addition of CLOBBERs.
9065
9066   PNOTES is a pointer to a location where any REG_UNUSED notes added for
9067   the CLOBBERs are placed.
9068
9069   PADDED_SCRATCHES is set to the number of (clobber (scratch)) patterns
9070   we had to add.
9071
9072   The value is the final insn code from the pattern ultimately matched,
9073   or -1.  */
9074
9075static int
9076recog_for_combine (pnewpat, insn, pnotes, padded_scratches)
9077     rtx *pnewpat;
9078     rtx insn;
9079     rtx *pnotes;
9080     int *padded_scratches;
9081{
9082  register rtx pat = *pnewpat;
9083  int insn_code_number;
9084  int num_clobbers_to_add = 0;
9085  int i;
9086  rtx notes = 0;
9087
9088  *padded_scratches = 0;
9089
9090  /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9091     we use to indicate that something didn't match.  If we find such a
9092     thing, force rejection.  */
9093  if (GET_CODE (pat) == PARALLEL)
9094    for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9095      if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9096	  && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
9097	return -1;
9098
9099  /* Is the result of combination a valid instruction?  */
9100  insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9101
9102  /* If it isn't, there is the possibility that we previously had an insn
9103     that clobbered some register as a side effect, but the combined
9104     insn doesn't need to do that.  So try once more without the clobbers
9105     unless this represents an ASM insn.  */
9106
9107  if (insn_code_number < 0 && ! check_asm_operands (pat)
9108      && GET_CODE (pat) == PARALLEL)
9109    {
9110      int pos;
9111
9112      for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9113	if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9114	  {
9115	    if (i != pos)
9116	      SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9117	    pos++;
9118	  }
9119
9120      SUBST_INT (XVECLEN (pat, 0), pos);
9121
9122      if (pos == 1)
9123	pat = XVECEXP (pat, 0, 0);
9124
9125      insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9126    }
9127
9128  /* If we had any clobbers to add, make a new pattern than contains
9129     them.  Then check to make sure that all of them are dead.  */
9130  if (num_clobbers_to_add)
9131    {
9132      rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9133				     gen_rtvec (GET_CODE (pat) == PARALLEL
9134						? XVECLEN (pat, 0) + num_clobbers_to_add
9135						: num_clobbers_to_add + 1));
9136
9137      if (GET_CODE (pat) == PARALLEL)
9138	for (i = 0; i < XVECLEN (pat, 0); i++)
9139	  XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9140      else
9141	XVECEXP (newpat, 0, 0) = pat;
9142
9143      add_clobbers (newpat, insn_code_number);
9144
9145      for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9146	   i < XVECLEN (newpat, 0); i++)
9147	{
9148	  if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9149	      && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9150	    return -1;
9151	  else if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == SCRATCH)
9152	    (*padded_scratches)++;
9153	  notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9154				     XEXP (XVECEXP (newpat, 0, i), 0), notes);
9155	}
9156      pat = newpat;
9157    }
9158
9159  *pnewpat = pat;
9160  *pnotes = notes;
9161
9162  return insn_code_number;
9163}
9164
9165/* Like gen_lowpart but for use by combine.  In combine it is not possible
9166   to create any new pseudoregs.  However, it is safe to create
9167   invalid memory addresses, because combine will try to recognize
9168   them and all they will do is make the combine attempt fail.
9169
9170   If for some reason this cannot do its job, an rtx
9171   (clobber (const_int 0)) is returned.
9172   An insn containing that will not be recognized.  */
9173
9174#undef gen_lowpart
9175
9176static rtx
9177gen_lowpart_for_combine (mode, x)
9178     enum machine_mode mode;
9179     register rtx x;
9180{
9181  rtx result;
9182
9183  if (GET_MODE (x) == mode)
9184    return x;
9185
9186  /* We can only support MODE being wider than a word if X is a
9187     constant integer or has a mode the same size.  */
9188
9189  if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9190      && ! ((GET_MODE (x) == VOIDmode
9191	     && (GET_CODE (x) == CONST_INT
9192		 || GET_CODE (x) == CONST_DOUBLE))
9193	    || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
9194    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9195
9196  /* X might be a paradoxical (subreg (mem)).  In that case, gen_lowpart
9197     won't know what to do.  So we will strip off the SUBREG here and
9198     process normally.  */
9199  if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9200    {
9201      x = SUBREG_REG (x);
9202      if (GET_MODE (x) == mode)
9203	return x;
9204    }
9205
9206  result = gen_lowpart_common (mode, x);
9207  if (result != 0
9208      && GET_CODE (result) == SUBREG
9209      && GET_CODE (SUBREG_REG (result)) == REG
9210      && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
9211      && (GET_MODE_SIZE (GET_MODE (result))
9212	  != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
9213    REG_CHANGES_SIZE (REGNO (SUBREG_REG (result))) = 1;
9214
9215  if (result)
9216    return result;
9217
9218  if (GET_CODE (x) == MEM)
9219    {
9220      register int offset = 0;
9221      rtx new;
9222
9223      /* Refuse to work on a volatile memory ref or one with a mode-dependent
9224	 address.  */
9225      if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
9226	return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9227
9228      /* If we want to refer to something bigger than the original memref,
9229	 generate a perverse subreg instead.  That will force a reload
9230	 of the original memref X.  */
9231      if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
9232	return gen_rtx_SUBREG (mode, x, 0);
9233
9234      if (WORDS_BIG_ENDIAN)
9235	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9236		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
9237      if (BYTES_BIG_ENDIAN)
9238	{
9239	  /* Adjust the address so that the address-after-the-data is
9240	     unchanged.  */
9241	  offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9242		     - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9243	}
9244      new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
9245      RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
9246      MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
9247      MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
9248      return new;
9249    }
9250
9251  /* If X is a comparison operator, rewrite it in a new mode.  This
9252     probably won't match, but may allow further simplifications.  */
9253  else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
9254    return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
9255
9256  /* If we couldn't simplify X any other way, just enclose it in a
9257     SUBREG.  Normally, this SUBREG won't match, but some patterns may
9258     include an explicit SUBREG or we may simplify it further in combine.  */
9259  else
9260    {
9261      int word = 0;
9262
9263      if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
9264	word = ((GET_MODE_SIZE (GET_MODE (x))
9265		 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
9266		/ UNITS_PER_WORD);
9267      return gen_rtx_SUBREG (mode, x, word);
9268    }
9269}
9270
9271/* Make an rtx expression.  This is a subset of gen_rtx and only supports
9272   expressions of 1, 2, or 3 operands, each of which are rtx expressions.
9273
9274   If the identical expression was previously in the insn (in the undobuf),
9275   it will be returned.  Only if it is not found will a new expression
9276   be made.  */
9277
9278/*VARARGS2*/
9279static rtx
9280gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
9281{
9282#ifndef __STDC__
9283  enum rtx_code code;
9284  enum machine_mode mode;
9285#endif
9286  va_list p;
9287  int n_args;
9288  rtx args[3];
9289  int j;
9290  char *fmt;
9291  rtx rt;
9292  struct undo *undo;
9293
9294  VA_START (p, mode);
9295
9296#ifndef __STDC__
9297  code = va_arg (p, enum rtx_code);
9298  mode = va_arg (p, enum machine_mode);
9299#endif
9300
9301  n_args = GET_RTX_LENGTH (code);
9302  fmt = GET_RTX_FORMAT (code);
9303
9304  if (n_args == 0 || n_args > 3)
9305    abort ();
9306
9307  /* Get each arg and verify that it is supposed to be an expression.  */
9308  for (j = 0; j < n_args; j++)
9309    {
9310      if (*fmt++ != 'e')
9311	abort ();
9312
9313      args[j] = va_arg (p, rtx);
9314    }
9315
9316  /* See if this is in undobuf.  Be sure we don't use objects that came
9317     from another insn; this could produce circular rtl structures.  */
9318
9319  for (undo = undobuf.undos; undo != undobuf.previous_undos; undo = undo->next)
9320    if (!undo->is_int
9321	&& GET_CODE (undo->old_contents.r) == code
9322	&& GET_MODE (undo->old_contents.r) == mode)
9323      {
9324	for (j = 0; j < n_args; j++)
9325	  if (XEXP (undo->old_contents.r, j) != args[j])
9326	    break;
9327
9328	if (j == n_args)
9329	  return undo->old_contents.r;
9330      }
9331
9332  /* Otherwise make a new rtx.  We know we have 1, 2, or 3 args.
9333     Use rtx_alloc instead of gen_rtx because it's faster on RISC.  */
9334  rt = rtx_alloc (code);
9335  PUT_MODE (rt, mode);
9336  XEXP (rt, 0) = args[0];
9337  if (n_args > 1)
9338    {
9339      XEXP (rt, 1) = args[1];
9340      if (n_args > 2)
9341	XEXP (rt, 2) = args[2];
9342    }
9343  return rt;
9344}
9345
9346/* These routines make binary and unary operations by first seeing if they
9347   fold; if not, a new expression is allocated.  */
9348
9349static rtx
9350gen_binary (code, mode, op0, op1)
9351     enum rtx_code code;
9352     enum machine_mode mode;
9353     rtx op0, op1;
9354{
9355  rtx result;
9356  rtx tem;
9357
9358  if (GET_RTX_CLASS (code) == 'c'
9359      && (GET_CODE (op0) == CONST_INT
9360	  || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
9361    tem = op0, op0 = op1, op1 = tem;
9362
9363  if (GET_RTX_CLASS (code) == '<')
9364    {
9365      enum machine_mode op_mode = GET_MODE (op0);
9366
9367      /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
9368	 just (REL_OP X Y).  */
9369      if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9370	{
9371	  op1 = XEXP (op0, 1);
9372	  op0 = XEXP (op0, 0);
9373	  op_mode = GET_MODE (op0);
9374	}
9375
9376      if (op_mode == VOIDmode)
9377	op_mode = GET_MODE (op1);
9378      result = simplify_relational_operation (code, op_mode, op0, op1);
9379    }
9380  else
9381    result = simplify_binary_operation (code, mode, op0, op1);
9382
9383  if (result)
9384    return result;
9385
9386  /* Put complex operands first and constants second.  */
9387  if (GET_RTX_CLASS (code) == 'c'
9388      && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9389	  || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
9390	      && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
9391	  || (GET_CODE (op0) == SUBREG
9392	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
9393	      && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
9394    return gen_rtx_combine (code, mode, op1, op0);
9395
9396  /* If we are turning off bits already known off in OP0, we need not do
9397     an AND.  */
9398  else if (code == AND && GET_CODE (op1) == CONST_INT
9399	   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9400	   && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
9401    return op0;
9402
9403  return gen_rtx_combine (code, mode, op0, op1);
9404}
9405
9406static rtx
9407gen_unary (code, mode, op0_mode, op0)
9408     enum rtx_code code;
9409     enum machine_mode mode, op0_mode;
9410     rtx op0;
9411{
9412  rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
9413
9414  if (result)
9415    return result;
9416
9417  return gen_rtx_combine (code, mode, op0);
9418}
9419
9420/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9421   comparison code that will be tested.
9422
9423   The result is a possibly different comparison code to use.  *POP0 and
9424   *POP1 may be updated.
9425
9426   It is possible that we might detect that a comparison is either always
9427   true or always false.  However, we do not perform general constant
9428   folding in combine, so this knowledge isn't useful.  Such tautologies
9429   should have been detected earlier.  Hence we ignore all such cases.  */
9430
9431static enum rtx_code
9432simplify_comparison (code, pop0, pop1)
9433     enum rtx_code code;
9434     rtx *pop0;
9435     rtx *pop1;
9436{
9437  rtx op0 = *pop0;
9438  rtx op1 = *pop1;
9439  rtx tem, tem1;
9440  int i;
9441  enum machine_mode mode, tmode;
9442
9443  /* Try a few ways of applying the same transformation to both operands.  */
9444  while (1)
9445    {
9446#ifndef WORD_REGISTER_OPERATIONS
9447      /* The test below this one won't handle SIGN_EXTENDs on these machines,
9448	 so check specially.  */
9449      if (code != GTU && code != GEU && code != LTU && code != LEU
9450	  && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9451	  && GET_CODE (XEXP (op0, 0)) == ASHIFT
9452	  && GET_CODE (XEXP (op1, 0)) == ASHIFT
9453	  && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9454	  && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9455	  && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
9456	      == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
9457	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
9458	  && GET_CODE (XEXP (op1, 1)) == CONST_INT
9459	  && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9460	  && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
9461	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
9462	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
9463	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
9464	  && (INTVAL (XEXP (op0, 1))
9465	      == (GET_MODE_BITSIZE (GET_MODE (op0))
9466		  - (GET_MODE_BITSIZE
9467		     (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9468	{
9469	  op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9470	  op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9471	}
9472#endif
9473
9474      /* If both operands are the same constant shift, see if we can ignore the
9475	 shift.  We can if the shift is a rotate or if the bits shifted out of
9476	 this shift are known to be zero for both inputs and if the type of
9477	 comparison is compatible with the shift.  */
9478      if (GET_CODE (op0) == GET_CODE (op1)
9479	  && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9480	  && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
9481	      || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
9482		  && (code != GT && code != LT && code != GE && code != LE))
9483	      || (GET_CODE (op0) == ASHIFTRT
9484		  && (code != GTU && code != LTU
9485		      && code != GEU && code != GEU)))
9486	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
9487	  && INTVAL (XEXP (op0, 1)) >= 0
9488	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9489	  && XEXP (op0, 1) == XEXP (op1, 1))
9490	{
9491	  enum machine_mode mode = GET_MODE (op0);
9492	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9493	  int shift_count = INTVAL (XEXP (op0, 1));
9494
9495	  if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9496	    mask &= (mask >> shift_count) << shift_count;
9497	  else if (GET_CODE (op0) == ASHIFT)
9498	    mask = (mask & (mask << shift_count)) >> shift_count;
9499
9500	  if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
9501	      && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
9502	    op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9503	  else
9504	    break;
9505	}
9506
9507      /* If both operands are AND's of a paradoxical SUBREG by constant, the
9508	 SUBREGs are of the same mode, and, in both cases, the AND would
9509	 be redundant if the comparison was done in the narrower mode,
9510	 do the comparison in the narrower mode (e.g., we are AND'ing with 1
9511	 and the operand's possibly nonzero bits are 0xffffff01; in that case
9512	 if we only care about QImode, we don't need the AND).  This case
9513	 occurs if the output mode of an scc insn is not SImode and
9514	 STORE_FLAG_VALUE == 1 (e.g., the 386).
9515
9516	 Similarly, check for a case where the AND's are ZERO_EXTEND
9517	 operations from some narrower mode even though a SUBREG is not
9518	 present.  */
9519
9520      else if  (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9521		&& GET_CODE (XEXP (op0, 1)) == CONST_INT
9522		&& GET_CODE (XEXP (op1, 1)) == CONST_INT)
9523	{
9524	  rtx inner_op0 = XEXP (op0, 0);
9525	  rtx inner_op1 = XEXP (op1, 0);
9526	  HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9527	  HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9528	  int changed = 0;
9529
9530	  if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9531	      && (GET_MODE_SIZE (GET_MODE (inner_op0))
9532		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9533	      && (GET_MODE (SUBREG_REG (inner_op0))
9534		  == GET_MODE (SUBREG_REG (inner_op1)))
9535	      && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
9536		  <= HOST_BITS_PER_WIDE_INT)
9537	      && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
9538					     GET_MODE (SUBREG_REG (inner_op0)))))
9539	      && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9540					     GET_MODE (SUBREG_REG (inner_op1))))))
9541	    {
9542	      op0 = SUBREG_REG (inner_op0);
9543	      op1 = SUBREG_REG (inner_op1);
9544
9545	      /* The resulting comparison is always unsigned since we masked
9546		 off the original sign bit.  */
9547	      code = unsigned_condition (code);
9548
9549	      changed = 1;
9550	    }
9551
9552	  else if (c0 == c1)
9553	    for (tmode = GET_CLASS_NARROWEST_MODE
9554		 (GET_MODE_CLASS (GET_MODE (op0)));
9555		 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
9556	      if (c0 == GET_MODE_MASK (tmode))
9557		{
9558		  op0 = gen_lowpart_for_combine (tmode, inner_op0);
9559		  op1 = gen_lowpart_for_combine (tmode, inner_op1);
9560		  code = unsigned_condition (code);
9561		  changed = 1;
9562		  break;
9563		}
9564
9565	  if (! changed)
9566	    break;
9567	}
9568
9569      /* If both operands are NOT, we can strip off the outer operation
9570	 and adjust the comparison code for swapped operands; similarly for
9571	 NEG, except that this must be an equality comparison.  */
9572      else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9573	       || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9574		   && (code == EQ || code == NE)))
9575	op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
9576
9577      else
9578	break;
9579    }
9580
9581  /* If the first operand is a constant, swap the operands and adjust the
9582     comparison code appropriately, but don't do this if the second operand
9583     is already a constant integer.  */
9584  if (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9585    {
9586      tem = op0, op0 = op1, op1 = tem;
9587      code = swap_condition (code);
9588    }
9589
9590  /* We now enter a loop during which we will try to simplify the comparison.
9591     For the most part, we only are concerned with comparisons with zero,
9592     but some things may really be comparisons with zero but not start
9593     out looking that way.  */
9594
9595  while (GET_CODE (op1) == CONST_INT)
9596    {
9597      enum machine_mode mode = GET_MODE (op0);
9598      int mode_width = GET_MODE_BITSIZE (mode);
9599      unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9600      int equality_comparison_p;
9601      int sign_bit_comparison_p;
9602      int unsigned_comparison_p;
9603      HOST_WIDE_INT const_op;
9604
9605      /* We only want to handle integral modes.  This catches VOIDmode,
9606	 CCmode, and the floating-point modes.  An exception is that we
9607	 can handle VOIDmode if OP0 is a COMPARE or a comparison
9608	 operation.  */
9609
9610      if (GET_MODE_CLASS (mode) != MODE_INT
9611	  && ! (mode == VOIDmode
9612		&& (GET_CODE (op0) == COMPARE
9613		    || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
9614	break;
9615
9616      /* Get the constant we are comparing against and turn off all bits
9617	 not on in our mode.  */
9618      const_op = INTVAL (op1);
9619      if (mode_width <= HOST_BITS_PER_WIDE_INT)
9620	const_op &= mask;
9621
9622      /* If we are comparing against a constant power of two and the value
9623	 being compared can only have that single bit nonzero (e.g., it was
9624	 `and'ed with that bit), we can replace this with a comparison
9625	 with zero.  */
9626      if (const_op
9627	  && (code == EQ || code == NE || code == GE || code == GEU
9628	      || code == LT || code == LTU)
9629	  && mode_width <= HOST_BITS_PER_WIDE_INT
9630	  && exact_log2 (const_op) >= 0
9631	  && nonzero_bits (op0, mode) == const_op)
9632	{
9633	  code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9634	  op1 = const0_rtx, const_op = 0;
9635	}
9636
9637      /* Similarly, if we are comparing a value known to be either -1 or
9638	 0 with -1, change it to the opposite comparison against zero.  */
9639
9640      if (const_op == -1
9641	  && (code == EQ || code == NE || code == GT || code == LE
9642	      || code == GEU || code == LTU)
9643	  && num_sign_bit_copies (op0, mode) == mode_width)
9644	{
9645	  code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9646	  op1 = const0_rtx, const_op = 0;
9647	}
9648
9649      /* Do some canonicalizations based on the comparison code.  We prefer
9650	 comparisons against zero and then prefer equality comparisons.
9651	 If we can reduce the size of a constant, we will do that too.  */
9652
9653      switch (code)
9654	{
9655	case LT:
9656	  /* < C is equivalent to <= (C - 1) */
9657	  if (const_op > 0)
9658	    {
9659	      const_op -= 1;
9660	      op1 = GEN_INT (const_op);
9661	      code = LE;
9662	      /* ... fall through to LE case below.  */
9663	    }
9664	  else
9665	    break;
9666
9667	case LE:
9668	  /* <= C is equivalent to < (C + 1); we do this for C < 0  */
9669	  if (const_op < 0)
9670	    {
9671	      const_op += 1;
9672	      op1 = GEN_INT (const_op);
9673	      code = LT;
9674	    }
9675
9676	  /* If we are doing a <= 0 comparison on a value known to have
9677	     a zero sign bit, we can replace this with == 0.  */
9678	  else if (const_op == 0
9679		   && mode_width <= HOST_BITS_PER_WIDE_INT
9680		   && (nonzero_bits (op0, mode)
9681		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
9682	    code = EQ;
9683	  break;
9684
9685	case GE:
9686	  /* >= C is equivalent to > (C - 1).  */
9687	  if (const_op > 0)
9688	    {
9689	      const_op -= 1;
9690	      op1 = GEN_INT (const_op);
9691	      code = GT;
9692	      /* ... fall through to GT below.  */
9693	    }
9694	  else
9695	    break;
9696
9697	case GT:
9698	  /* > C is equivalent to >= (C + 1); we do this for C < 0*/
9699	  if (const_op < 0)
9700	    {
9701	      const_op += 1;
9702	      op1 = GEN_INT (const_op);
9703	      code = GE;
9704	    }
9705
9706	  /* If we are doing a > 0 comparison on a value known to have
9707	     a zero sign bit, we can replace this with != 0.  */
9708	  else if (const_op == 0
9709		   && mode_width <= HOST_BITS_PER_WIDE_INT
9710		   && (nonzero_bits (op0, mode)
9711		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
9712	    code = NE;
9713	  break;
9714
9715	case LTU:
9716	  /* < C is equivalent to <= (C - 1).  */
9717	  if (const_op > 0)
9718	    {
9719	      const_op -= 1;
9720	      op1 = GEN_INT (const_op);
9721	      code = LEU;
9722	      /* ... fall through ...  */
9723	    }
9724
9725	  /* (unsigned) < 0x80000000 is equivalent to >= 0.  */
9726	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9727		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9728	    {
9729	      const_op = 0, op1 = const0_rtx;
9730	      code = GE;
9731	      break;
9732	    }
9733	  else
9734	    break;
9735
9736	case LEU:
9737	  /* unsigned <= 0 is equivalent to == 0 */
9738	  if (const_op == 0)
9739	    code = EQ;
9740
9741	  /* (unsigned) <= 0x7fffffff is equivalent to >= 0.  */
9742	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9743		   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
9744	    {
9745	      const_op = 0, op1 = const0_rtx;
9746	      code = GE;
9747	    }
9748	  break;
9749
9750	case GEU:
9751	  /* >= C is equivalent to < (C - 1).  */
9752	  if (const_op > 1)
9753	    {
9754	      const_op -= 1;
9755	      op1 = GEN_INT (const_op);
9756	      code = GTU;
9757	      /* ... fall through ...  */
9758	    }
9759
9760	  /* (unsigned) >= 0x80000000 is equivalent to < 0.  */
9761	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9762		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9763	    {
9764	      const_op = 0, op1 = const0_rtx;
9765	      code = LT;
9766	      break;
9767	    }
9768	  else
9769	    break;
9770
9771	case GTU:
9772	  /* unsigned > 0 is equivalent to != 0 */
9773	  if (const_op == 0)
9774	    code = NE;
9775
9776	  /* (unsigned) > 0x7fffffff is equivalent to < 0.  */
9777	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9778		    && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
9779	    {
9780	      const_op = 0, op1 = const0_rtx;
9781	      code = LT;
9782	    }
9783	  break;
9784
9785	default:
9786	  break;
9787	}
9788
9789      /* Compute some predicates to simplify code below.  */
9790
9791      equality_comparison_p = (code == EQ || code == NE);
9792      sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
9793      unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
9794			       || code == LEU);
9795
9796      /* If this is a sign bit comparison and we can do arithmetic in
9797	 MODE, say that we will only be needing the sign bit of OP0.  */
9798      if (sign_bit_comparison_p
9799	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9800	op0 = force_to_mode (op0, mode,
9801			     ((HOST_WIDE_INT) 1
9802			      << (GET_MODE_BITSIZE (mode) - 1)),
9803			     NULL_RTX, 0);
9804
9805      /* Now try cases based on the opcode of OP0.  If none of the cases
9806	 does a "continue", we exit this loop immediately after the
9807	 switch.  */
9808
9809      switch (GET_CODE (op0))
9810	{
9811	case ZERO_EXTRACT:
9812	  /* If we are extracting a single bit from a variable position in
9813	     a constant that has only a single bit set and are comparing it
9814	     with zero, we can convert this into an equality comparison
9815	     between the position and the location of the single bit.  */
9816
9817	  if (GET_CODE (XEXP (op0, 0)) == CONST_INT
9818	      && XEXP (op0, 1) == const1_rtx
9819	      && equality_comparison_p && const_op == 0
9820	      && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
9821	    {
9822	      if (BITS_BIG_ENDIAN)
9823#ifdef HAVE_extzv
9824		i = (GET_MODE_BITSIZE
9825		     (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
9826#else
9827	        i = BITS_PER_WORD - 1 - i;
9828#endif
9829
9830	      op0 = XEXP (op0, 2);
9831	      op1 = GEN_INT (i);
9832	      const_op = i;
9833
9834	      /* Result is nonzero iff shift count is equal to I.  */
9835	      code = reverse_condition (code);
9836	      continue;
9837	    }
9838
9839	  /* ... fall through ...  */
9840
9841	case SIGN_EXTRACT:
9842	  tem = expand_compound_operation (op0);
9843	  if (tem != op0)
9844	    {
9845	      op0 = tem;
9846	      continue;
9847	    }
9848	  break;
9849
9850	case NOT:
9851	  /* If testing for equality, we can take the NOT of the constant.  */
9852	  if (equality_comparison_p
9853	      && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
9854	    {
9855	      op0 = XEXP (op0, 0);
9856	      op1 = tem;
9857	      continue;
9858	    }
9859
9860	  /* If just looking at the sign bit, reverse the sense of the
9861	     comparison.  */
9862	  if (sign_bit_comparison_p)
9863	    {
9864	      op0 = XEXP (op0, 0);
9865	      code = (code == GE ? LT : GE);
9866	      continue;
9867	    }
9868	  break;
9869
9870	case NEG:
9871	  /* If testing for equality, we can take the NEG of the constant.  */
9872	  if (equality_comparison_p
9873	      && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
9874	    {
9875	      op0 = XEXP (op0, 0);
9876	      op1 = tem;
9877	      continue;
9878	    }
9879
9880	  /* The remaining cases only apply to comparisons with zero.  */
9881	  if (const_op != 0)
9882	    break;
9883
9884	  /* When X is ABS or is known positive,
9885	     (neg X) is < 0 if and only if X != 0.  */
9886
9887	  if (sign_bit_comparison_p
9888	      && (GET_CODE (XEXP (op0, 0)) == ABS
9889		  || (mode_width <= HOST_BITS_PER_WIDE_INT
9890		      && (nonzero_bits (XEXP (op0, 0), mode)
9891			  & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
9892	    {
9893	      op0 = XEXP (op0, 0);
9894	      code = (code == LT ? NE : EQ);
9895	      continue;
9896	    }
9897
9898	  /* If we have NEG of something whose two high-order bits are the
9899	     same, we know that "(-a) < 0" is equivalent to "a > 0".  */
9900	  if (num_sign_bit_copies (op0, mode) >= 2)
9901	    {
9902	      op0 = XEXP (op0, 0);
9903	      code = swap_condition (code);
9904	      continue;
9905	    }
9906	  break;
9907
9908	case ROTATE:
9909	  /* If we are testing equality and our count is a constant, we
9910	     can perform the inverse operation on our RHS.  */
9911	  if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9912	      && (tem = simplify_binary_operation (ROTATERT, mode,
9913						   op1, XEXP (op0, 1))) != 0)
9914	    {
9915	      op0 = XEXP (op0, 0);
9916	      op1 = tem;
9917	      continue;
9918	    }
9919
9920	  /* If we are doing a < 0 or >= 0 comparison, it means we are testing
9921	     a particular bit.  Convert it to an AND of a constant of that
9922	     bit.  This will be converted into a ZERO_EXTRACT.  */
9923	  if (const_op == 0 && sign_bit_comparison_p
9924	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
9925	      && mode_width <= HOST_BITS_PER_WIDE_INT)
9926	    {
9927	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9928					    ((HOST_WIDE_INT) 1
9929					     << (mode_width - 1
9930						 - INTVAL (XEXP (op0, 1)))));
9931	      code = (code == LT ? NE : EQ);
9932	      continue;
9933	    }
9934
9935	  /* ... fall through ...  */
9936
9937	case ABS:
9938	  /* ABS is ignorable inside an equality comparison with zero.  */
9939	  if (const_op == 0 && equality_comparison_p)
9940	    {
9941	      op0 = XEXP (op0, 0);
9942	      continue;
9943	    }
9944	  break;
9945
9946
9947	case SIGN_EXTEND:
9948	  /* Can simplify (compare (zero/sign_extend FOO) CONST)
9949	     to (compare FOO CONST) if CONST fits in FOO's mode and we
9950	     are either testing inequality or have an unsigned comparison
9951	     with ZERO_EXTEND or a signed comparison with SIGN_EXTEND.  */
9952	  if (! unsigned_comparison_p
9953	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9954		  <= HOST_BITS_PER_WIDE_INT)
9955	      && ((unsigned HOST_WIDE_INT) const_op
9956		  < (((HOST_WIDE_INT) 1
9957		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
9958	    {
9959	      op0 = XEXP (op0, 0);
9960	      continue;
9961	    }
9962	  break;
9963
9964	case SUBREG:
9965	  /* Check for the case where we are comparing A - C1 with C2,
9966	     both constants are smaller than 1/2 the maximum positive
9967	     value in MODE, and the comparison is equality or unsigned.
9968	     In that case, if A is either zero-extended to MODE or has
9969	     sufficient sign bits so that the high-order bit in MODE
9970	     is a copy of the sign in the inner mode, we can prove that it is
9971	     safe to do the operation in the wider mode.  This simplifies
9972	     many range checks.  */
9973
9974	  if (mode_width <= HOST_BITS_PER_WIDE_INT
9975	      && subreg_lowpart_p (op0)
9976	      && GET_CODE (SUBREG_REG (op0)) == PLUS
9977	      && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
9978	      && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
9979	      && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
9980		  < GET_MODE_MASK (mode) / 2)
9981	      && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
9982	      && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
9983				      GET_MODE (SUBREG_REG (op0)))
9984			& ~ GET_MODE_MASK (mode))
9985		  || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
9986					   GET_MODE (SUBREG_REG (op0)))
9987		      > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9988			 - GET_MODE_BITSIZE (mode)))))
9989	    {
9990	      op0 = SUBREG_REG (op0);
9991	      continue;
9992	    }
9993
9994	  /* If the inner mode is narrower and we are extracting the low part,
9995	     we can treat the SUBREG as if it were a ZERO_EXTEND.  */
9996	  if (subreg_lowpart_p (op0)
9997	      && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
9998	    /* Fall through */ ;
9999	  else
10000	    break;
10001
10002	  /* ... fall through ...  */
10003
10004	case ZERO_EXTEND:
10005	  if ((unsigned_comparison_p || equality_comparison_p)
10006	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10007		  <= HOST_BITS_PER_WIDE_INT)
10008	      && ((unsigned HOST_WIDE_INT) const_op
10009		  < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10010	    {
10011	      op0 = XEXP (op0, 0);
10012	      continue;
10013	    }
10014	  break;
10015
10016	case PLUS:
10017	  /* (eq (plus X A) B) -> (eq X (minus B A)).  We can only do
10018	     this for equality comparisons due to pathological cases involving
10019	     overflows.  */
10020	  if (equality_comparison_p
10021	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10022							op1, XEXP (op0, 1))))
10023	    {
10024	      op0 = XEXP (op0, 0);
10025	      op1 = tem;
10026	      continue;
10027	    }
10028
10029	  /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0.  */
10030	  if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10031	      && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10032	    {
10033	      op0 = XEXP (XEXP (op0, 0), 0);
10034	      code = (code == LT ? EQ : NE);
10035	      continue;
10036	    }
10037	  break;
10038
10039	case MINUS:
10040	  /* (eq (minus A B) C) -> (eq A (plus B C)) or
10041	     (eq B (minus A C)), whichever simplifies.  We can only do
10042	     this for equality comparisons due to pathological cases involving
10043	     overflows.  */
10044	  if (equality_comparison_p
10045	      && 0 != (tem = simplify_binary_operation (PLUS, mode,
10046							XEXP (op0, 1), op1)))
10047	    {
10048	      op0 = XEXP (op0, 0);
10049	      op1 = tem;
10050	      continue;
10051	    }
10052
10053	  if (equality_comparison_p
10054	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10055							XEXP (op0, 0), op1)))
10056	    {
10057	      op0 = XEXP (op0, 1);
10058	      op1 = tem;
10059	      continue;
10060	    }
10061
10062	  /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10063	     of bits in X minus 1, is one iff X > 0.  */
10064	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10065	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10066	      && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
10067	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10068	    {
10069	      op0 = XEXP (op0, 1);
10070	      code = (code == GE ? LE : GT);
10071	      continue;
10072	    }
10073	  break;
10074
10075	case XOR:
10076	  /* (eq (xor A B) C) -> (eq A (xor B C)).  This is a simplification
10077	     if C is zero or B is a constant.  */
10078	  if (equality_comparison_p
10079	      && 0 != (tem = simplify_binary_operation (XOR, mode,
10080							XEXP (op0, 1), op1)))
10081	    {
10082	      op0 = XEXP (op0, 0);
10083	      op1 = tem;
10084	      continue;
10085	    }
10086	  break;
10087
10088	case EQ:  case NE:
10089	case LT:  case LTU:  case LE:  case LEU:
10090	case GT:  case GTU:  case GE:  case GEU:
10091	  /* We can't do anything if OP0 is a condition code value, rather
10092	     than an actual data value.  */
10093	  if (const_op != 0
10094#ifdef HAVE_cc0
10095	      || XEXP (op0, 0) == cc0_rtx
10096#endif
10097	      || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10098	    break;
10099
10100	  /* Get the two operands being compared.  */
10101	  if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10102	    tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10103	  else
10104	    tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10105
10106	  /* Check for the cases where we simply want the result of the
10107	     earlier test or the opposite of that result.  */
10108	  if (code == NE
10109	      || (code == EQ && reversible_comparison_p (op0))
10110	      || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10111		  && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10112		  && (STORE_FLAG_VALUE
10113		      & (((HOST_WIDE_INT) 1
10114			  << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
10115		  && (code == LT
10116		      || (code == GE && reversible_comparison_p (op0)))))
10117	    {
10118	      code = (code == LT || code == NE
10119		      ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
10120	      op0 = tem, op1 = tem1;
10121	      continue;
10122	    }
10123	  break;
10124
10125	case IOR:
10126	  /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10127	     iff X <= 0.  */
10128	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10129	      && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10130	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10131	    {
10132	      op0 = XEXP (op0, 1);
10133	      code = (code == GE ? GT : LE);
10134	      continue;
10135	    }
10136	  break;
10137
10138	case AND:
10139	  /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1).  This
10140	     will be converted to a ZERO_EXTRACT later.  */
10141	  if (const_op == 0 && equality_comparison_p
10142	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
10143	      && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10144	    {
10145	      op0 = simplify_and_const_int
10146		(op0, mode, gen_rtx_combine (LSHIFTRT, mode,
10147					     XEXP (op0, 1),
10148					     XEXP (XEXP (op0, 0), 1)),
10149		 (HOST_WIDE_INT) 1);
10150	      continue;
10151	    }
10152
10153	  /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10154	     zero and X is a comparison and C1 and C2 describe only bits set
10155	     in STORE_FLAG_VALUE, we can compare with X.  */
10156	  if (const_op == 0 && equality_comparison_p
10157	      && mode_width <= HOST_BITS_PER_WIDE_INT
10158	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10159	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10160	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10161	      && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
10162	      && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
10163	    {
10164	      mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10165		      << INTVAL (XEXP (XEXP (op0, 0), 1)));
10166	      if ((~ STORE_FLAG_VALUE & mask) == 0
10167		  && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10168		      || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10169			  && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10170		{
10171		  op0 = XEXP (XEXP (op0, 0), 0);
10172		  continue;
10173		}
10174	    }
10175
10176	  /* If we are doing an equality comparison of an AND of a bit equal
10177	     to the sign bit, replace this with a LT or GE comparison of
10178	     the underlying value.  */
10179	  if (equality_comparison_p
10180	      && const_op == 0
10181	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10182	      && mode_width <= HOST_BITS_PER_WIDE_INT
10183	      && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10184		  == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10185	    {
10186	      op0 = XEXP (op0, 0);
10187	      code = (code == EQ ? GE : LT);
10188	      continue;
10189	    }
10190
10191	  /* If this AND operation is really a ZERO_EXTEND from a narrower
10192	     mode, the constant fits within that mode, and this is either an
10193	     equality or unsigned comparison, try to do this comparison in
10194	     the narrower mode.  */
10195	  if ((equality_comparison_p || unsigned_comparison_p)
10196	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10197	      && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10198				   & GET_MODE_MASK (mode))
10199				  + 1)) >= 0
10200	      && const_op >> i == 0
10201	      && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10202	    {
10203	      op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10204	      continue;
10205	    }
10206
10207	  /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
10208	     in both M1 and M2 and the SUBREG is either paradoxical or
10209	     represents the low part, permute the SUBREG and the AND and
10210	     try again.  */
10211	  if (GET_CODE (XEXP (op0, 0)) == SUBREG
10212	      && ((mode_width
10213		   >= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10214#ifdef WORD_REGISTER_OPERATIONS
10215		  || subreg_lowpart_p (XEXP (op0, 0))
10216#endif
10217		  )
10218#ifndef WORD_REGISTER_OPERATIONS
10219	      /* It is unsafe to commute the AND into the SUBREG if the SUBREG
10220		 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
10221		 As originally written the upper bits have a defined value
10222		 due to the AND operation.  However, if we commute the AND
10223		 inside the SUBREG then they no longer have defined values
10224		 and the meaning of the code has been changed.  */
10225	      && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
10226		  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10227#endif
10228	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10229	      && mode_width <= HOST_BITS_PER_WIDE_INT
10230	      && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10231		  <= HOST_BITS_PER_WIDE_INT)
10232	      && (INTVAL (XEXP (op0, 1)) & ~ mask) == 0
10233	      && 0 == (~ GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10234		       & INTVAL (XEXP (op0, 1)))
10235	      && INTVAL (XEXP (op0, 1)) != mask
10236	      && (INTVAL (XEXP (op0, 1))
10237		  != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10238
10239	    {
10240	      op0
10241		= gen_lowpart_for_combine
10242		  (mode,
10243		   gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
10244			       SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
10245	      continue;
10246	    }
10247
10248	  break;
10249
10250	case ASHIFT:
10251	  /* If we have (compare (ashift FOO N) (const_int C)) and
10252	     the high order N bits of FOO (N+1 if an inequality comparison)
10253	     are known to be zero, we can do this by comparing FOO with C
10254	     shifted right N bits so long as the low-order N bits of C are
10255	     zero.  */
10256	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10257	      && INTVAL (XEXP (op0, 1)) >= 0
10258	      && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
10259		  < HOST_BITS_PER_WIDE_INT)
10260	      && ((const_op
10261		   & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
10262	      && mode_width <= HOST_BITS_PER_WIDE_INT
10263	      && (nonzero_bits (XEXP (op0, 0), mode)
10264		  & ~ (mask >> (INTVAL (XEXP (op0, 1))
10265				+ ! equality_comparison_p))) == 0)
10266	    {
10267	      const_op >>= INTVAL (XEXP (op0, 1));
10268	      op1 = GEN_INT (const_op);
10269	      op0 = XEXP (op0, 0);
10270	      continue;
10271	    }
10272
10273	  /* If we are doing a sign bit comparison, it means we are testing
10274	     a particular bit.  Convert it to the appropriate AND.  */
10275	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10276	      && mode_width <= HOST_BITS_PER_WIDE_INT)
10277	    {
10278	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10279					    ((HOST_WIDE_INT) 1
10280					     << (mode_width - 1
10281						 - INTVAL (XEXP (op0, 1)))));
10282	      code = (code == LT ? NE : EQ);
10283	      continue;
10284	    }
10285
10286	  /* If this an equality comparison with zero and we are shifting
10287	     the low bit to the sign bit, we can convert this to an AND of the
10288	     low-order bit.  */
10289	  if (const_op == 0 && equality_comparison_p
10290	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10291	      && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10292	    {
10293	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10294					    (HOST_WIDE_INT) 1);
10295	      continue;
10296	    }
10297	  break;
10298
10299	case ASHIFTRT:
10300	  /* If this is an equality comparison with zero, we can do this
10301	     as a logical shift, which might be much simpler.  */
10302	  if (equality_comparison_p && const_op == 0
10303	      && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10304	    {
10305	      op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10306					  XEXP (op0, 0),
10307					  INTVAL (XEXP (op0, 1)));
10308	      continue;
10309	    }
10310
10311	  /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10312	     do the comparison in a narrower mode.  */
10313	  if (! unsigned_comparison_p
10314	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10315	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
10316	      && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10317	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10318					 MODE_INT, 1)) != BLKmode
10319	      && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
10320		  || ((unsigned HOST_WIDE_INT) - const_op
10321		      <= GET_MODE_MASK (tmode))))
10322	    {
10323	      op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10324	      continue;
10325	    }
10326
10327	  /* ... fall through ...  */
10328	case LSHIFTRT:
10329	  /* If we have (compare (xshiftrt FOO N) (const_int C)) and
10330	     the low order N bits of FOO are known to be zero, we can do this
10331	     by comparing FOO with C shifted left N bits so long as no
10332	     overflow occurs.  */
10333	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10334	      && INTVAL (XEXP (op0, 1)) >= 0
10335	      && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10336	      && mode_width <= HOST_BITS_PER_WIDE_INT
10337	      && (nonzero_bits (XEXP (op0, 0), mode)
10338		  & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
10339	      && (const_op == 0
10340		  || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
10341		      < mode_width)))
10342	    {
10343	      const_op <<= INTVAL (XEXP (op0, 1));
10344	      op1 = GEN_INT (const_op);
10345	      op0 = XEXP (op0, 0);
10346	      continue;
10347	    }
10348
10349	  /* If we are using this shift to extract just the sign bit, we
10350	     can replace this with an LT or GE comparison.  */
10351	  if (const_op == 0
10352	      && (equality_comparison_p || sign_bit_comparison_p)
10353	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10354	      && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10355	    {
10356	      op0 = XEXP (op0, 0);
10357	      code = (code == NE || code == GT ? LT : GE);
10358	      continue;
10359	    }
10360	  break;
10361
10362	default:
10363	  break;
10364	}
10365
10366      break;
10367    }
10368
10369  /* Now make any compound operations involved in this comparison.  Then,
10370     check for an outmost SUBREG on OP0 that is not doing anything or is
10371     paradoxical.  The latter case can only occur when it is known that the
10372     "extra" bits will be zero.  Therefore, it is safe to remove the SUBREG.
10373     We can never remove a SUBREG for a non-equality comparison because the
10374     sign bit is in a different place in the underlying object.  */
10375
10376  op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10377  op1 = make_compound_operation (op1, SET);
10378
10379  if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10380      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10381      && (code == NE || code == EQ)
10382      && ((GET_MODE_SIZE (GET_MODE (op0))
10383	   > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
10384    {
10385      op0 = SUBREG_REG (op0);
10386      op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
10387    }
10388
10389  else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10390	   && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10391	   && (code == NE || code == EQ)
10392	   && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10393	       <= HOST_BITS_PER_WIDE_INT)
10394	   && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
10395	       & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
10396	   && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
10397					      op1),
10398	       (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
10399		& ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
10400    op0 = SUBREG_REG (op0), op1 = tem;
10401
10402  /* We now do the opposite procedure: Some machines don't have compare
10403     insns in all modes.  If OP0's mode is an integer mode smaller than a
10404     word and we can't do a compare in that mode, see if there is a larger
10405     mode for which we can do the compare.  There are a number of cases in
10406     which we can use the wider mode.  */
10407
10408  mode = GET_MODE (op0);
10409  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10410      && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10411      && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
10412    for (tmode = GET_MODE_WIDER_MODE (mode);
10413	 (tmode != VOIDmode
10414	  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
10415	 tmode = GET_MODE_WIDER_MODE (tmode))
10416      if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
10417	{
10418	  /* If the only nonzero bits in OP0 and OP1 are those in the
10419	     narrower mode and this is an equality or unsigned comparison,
10420	     we can use the wider mode.  Similarly for sign-extended
10421	     values, in which case it is true for all comparisons.  */
10422	  if (((code == EQ || code == NE
10423		|| code == GEU || code == GTU || code == LEU || code == LTU)
10424	       && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
10425	       && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
10426	      || ((num_sign_bit_copies (op0, tmode)
10427		   > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
10428		  && (num_sign_bit_copies (op1, tmode)
10429		      > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
10430	    {
10431	      op0 = gen_lowpart_for_combine (tmode, op0);
10432	      op1 = gen_lowpart_for_combine (tmode, op1);
10433	      break;
10434	    }
10435
10436	  /* If this is a test for negative, we can make an explicit
10437	     test of the sign bit.  */
10438
10439	  if (op1 == const0_rtx && (code == LT || code == GE)
10440	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10441	    {
10442	      op0 = gen_binary (AND, tmode,
10443				gen_lowpart_for_combine (tmode, op0),
10444				GEN_INT ((HOST_WIDE_INT) 1
10445					 << (GET_MODE_BITSIZE (mode) - 1)));
10446	      code = (code == LT) ? NE : EQ;
10447	      break;
10448	    }
10449	}
10450
10451#ifdef CANONICALIZE_COMPARISON
10452  /* If this machine only supports a subset of valid comparisons, see if we
10453     can convert an unsupported one into a supported one.  */
10454  CANONICALIZE_COMPARISON (code, op0, op1);
10455#endif
10456
10457  *pop0 = op0;
10458  *pop1 = op1;
10459
10460  return code;
10461}
10462
10463/* Return 1 if we know that X, a comparison operation, is not operating
10464   on a floating-point value or is EQ or NE, meaning that we can safely
10465   reverse it.  */
10466
10467static int
10468reversible_comparison_p (x)
10469     rtx x;
10470{
10471  if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
10472      || flag_fast_math
10473      || GET_CODE (x) == NE || GET_CODE (x) == EQ)
10474    return 1;
10475
10476  switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
10477    {
10478    case MODE_INT:
10479    case MODE_PARTIAL_INT:
10480    case MODE_COMPLEX_INT:
10481      return 1;
10482
10483    case MODE_CC:
10484      /* If the mode of the condition codes tells us that this is safe,
10485	 we need look no further.  */
10486      if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
10487	return 1;
10488
10489      /* Otherwise try and find where the condition codes were last set and
10490	 use that.  */
10491      x = get_last_value (XEXP (x, 0));
10492      return (x && GET_CODE (x) == COMPARE
10493	      && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
10494
10495    default:
10496      return 0;
10497    }
10498}
10499
10500/* Utility function for following routine.  Called when X is part of a value
10501   being stored into reg_last_set_value.  Sets reg_last_set_table_tick
10502   for each register mentioned.  Similar to mention_regs in cse.c  */
10503
10504static void
10505update_table_tick (x)
10506     rtx x;
10507{
10508  register enum rtx_code code = GET_CODE (x);
10509  register char *fmt = GET_RTX_FORMAT (code);
10510  register int i;
10511
10512  if (code == REG)
10513    {
10514      int regno = REGNO (x);
10515      int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10516			      ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10517
10518      for (i = regno; i < endregno; i++)
10519	reg_last_set_table_tick[i] = label_tick;
10520
10521      return;
10522    }
10523
10524  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10525    /* Note that we can't have an "E" in values stored; see
10526       get_last_value_validate.  */
10527    if (fmt[i] == 'e')
10528      update_table_tick (XEXP (x, i));
10529}
10530
10531/* Record that REG is set to VALUE in insn INSN.  If VALUE is zero, we
10532   are saying that the register is clobbered and we no longer know its
10533   value.  If INSN is zero, don't update reg_last_set; this is only permitted
10534   with VALUE also zero and is used to invalidate the register.  */
10535
10536static void
10537record_value_for_reg (reg, insn, value)
10538     rtx reg;
10539     rtx insn;
10540     rtx value;
10541{
10542  int regno = REGNO (reg);
10543  int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10544			  ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
10545  int i;
10546
10547  /* If VALUE contains REG and we have a previous value for REG, substitute
10548     the previous value.  */
10549  if (value && insn && reg_overlap_mentioned_p (reg, value))
10550    {
10551      rtx tem;
10552
10553      /* Set things up so get_last_value is allowed to see anything set up to
10554	 our insn.  */
10555      subst_low_cuid = INSN_CUID (insn);
10556      tem = get_last_value (reg);
10557
10558      if (tem)
10559	value = replace_rtx (copy_rtx (value), reg, tem);
10560    }
10561
10562  /* For each register modified, show we don't know its value, that
10563     we don't know about its bitwise content, that its value has been
10564     updated, and that we don't know the location of the death of the
10565     register.  */
10566  for (i = regno; i < endregno; i ++)
10567    {
10568      if (insn)
10569	reg_last_set[i] = insn;
10570      reg_last_set_value[i] = 0;
10571      reg_last_set_mode[i] = 0;
10572      reg_last_set_nonzero_bits[i] = 0;
10573      reg_last_set_sign_bit_copies[i] = 0;
10574      reg_last_death[i] = 0;
10575    }
10576
10577  /* Mark registers that are being referenced in this value.  */
10578  if (value)
10579    update_table_tick (value);
10580
10581  /* Now update the status of each register being set.
10582     If someone is using this register in this block, set this register
10583     to invalid since we will get confused between the two lives in this
10584     basic block.  This makes using this register always invalid.  In cse, we
10585     scan the table to invalidate all entries using this register, but this
10586     is too much work for us.  */
10587
10588  for (i = regno; i < endregno; i++)
10589    {
10590      reg_last_set_label[i] = label_tick;
10591      if (value && reg_last_set_table_tick[i] == label_tick)
10592	reg_last_set_invalid[i] = 1;
10593      else
10594	reg_last_set_invalid[i] = 0;
10595    }
10596
10597  /* The value being assigned might refer to X (like in "x++;").  In that
10598     case, we must replace it with (clobber (const_int 0)) to prevent
10599     infinite loops.  */
10600  if (value && ! get_last_value_validate (&value, insn,
10601					  reg_last_set_label[regno], 0))
10602    {
10603      value = copy_rtx (value);
10604      if (! get_last_value_validate (&value, insn,
10605				     reg_last_set_label[regno], 1))
10606	value = 0;
10607    }
10608
10609  /* For the main register being modified, update the value, the mode, the
10610     nonzero bits, and the number of sign bit copies.  */
10611
10612  reg_last_set_value[regno] = value;
10613
10614  if (value)
10615    {
10616      subst_low_cuid = INSN_CUID (insn);
10617      reg_last_set_mode[regno] = GET_MODE (reg);
10618      reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
10619      reg_last_set_sign_bit_copies[regno]
10620	= num_sign_bit_copies (value, GET_MODE (reg));
10621    }
10622}
10623
10624/* Used for communication between the following two routines.  */
10625static rtx record_dead_insn;
10626
10627/* Called via note_stores from record_dead_and_set_regs to handle one
10628   SET or CLOBBER in an insn.  */
10629
10630static void
10631record_dead_and_set_regs_1 (dest, setter)
10632     rtx dest, setter;
10633{
10634  if (GET_CODE (dest) == SUBREG)
10635    dest = SUBREG_REG (dest);
10636
10637  if (GET_CODE (dest) == REG)
10638    {
10639      /* If we are setting the whole register, we know its value.  Otherwise
10640	 show that we don't know the value.  We can handle SUBREG in
10641	 some cases.  */
10642      if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
10643	record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
10644      else if (GET_CODE (setter) == SET
10645	       && GET_CODE (SET_DEST (setter)) == SUBREG
10646	       && SUBREG_REG (SET_DEST (setter)) == dest
10647	       && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
10648	       && subreg_lowpart_p (SET_DEST (setter)))
10649	record_value_for_reg (dest, record_dead_insn,
10650			      gen_lowpart_for_combine (GET_MODE (dest),
10651						       SET_SRC (setter)));
10652      else
10653	record_value_for_reg (dest, record_dead_insn, NULL_RTX);
10654    }
10655  else if (GET_CODE (dest) == MEM
10656	   /* Ignore pushes, they clobber nothing.  */
10657	   && ! push_operand (dest, GET_MODE (dest)))
10658    mem_last_set = INSN_CUID (record_dead_insn);
10659}
10660
10661/* Update the records of when each REG was most recently set or killed
10662   for the things done by INSN.  This is the last thing done in processing
10663   INSN in the combiner loop.
10664
10665   We update reg_last_set, reg_last_set_value, reg_last_set_mode,
10666   reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
10667   and also the similar information mem_last_set (which insn most recently
10668   modified memory) and last_call_cuid (which insn was the most recent
10669   subroutine call).  */
10670
10671static void
10672record_dead_and_set_regs (insn)
10673     rtx insn;
10674{
10675  register rtx link;
10676  int i;
10677
10678  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
10679    {
10680      if (REG_NOTE_KIND (link) == REG_DEAD
10681	  && GET_CODE (XEXP (link, 0)) == REG)
10682	{
10683	  int regno = REGNO (XEXP (link, 0));
10684	  int endregno
10685	    = regno + (regno < FIRST_PSEUDO_REGISTER
10686		       ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
10687		       : 1);
10688
10689	  for (i = regno; i < endregno; i++)
10690	    reg_last_death[i] = insn;
10691	}
10692      else if (REG_NOTE_KIND (link) == REG_INC)
10693	record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
10694    }
10695
10696  if (GET_CODE (insn) == CALL_INSN)
10697    {
10698      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
10699	if (call_used_regs[i])
10700	  {
10701	    reg_last_set_value[i] = 0;
10702	    reg_last_set_mode[i] = 0;
10703	    reg_last_set_nonzero_bits[i] = 0;
10704	    reg_last_set_sign_bit_copies[i] = 0;
10705	    reg_last_death[i] = 0;
10706	  }
10707
10708      last_call_cuid = mem_last_set = INSN_CUID (insn);
10709    }
10710
10711  record_dead_insn = insn;
10712  note_stores (PATTERN (insn), record_dead_and_set_regs_1);
10713}
10714
10715/* Utility routine for the following function.  Verify that all the registers
10716   mentioned in *LOC are valid when *LOC was part of a value set when
10717   label_tick == TICK.  Return 0 if some are not.
10718
10719   If REPLACE is non-zero, replace the invalid reference with
10720   (clobber (const_int 0)) and return 1.  This replacement is useful because
10721   we often can get useful information about the form of a value (e.g., if
10722   it was produced by a shift that always produces -1 or 0) even though
10723   we don't know exactly what registers it was produced from.  */
10724
10725static int
10726get_last_value_validate (loc, insn, tick, replace)
10727     rtx *loc;
10728     rtx insn;
10729     int tick;
10730     int replace;
10731{
10732  rtx x = *loc;
10733  char *fmt = GET_RTX_FORMAT (GET_CODE (x));
10734  int len = GET_RTX_LENGTH (GET_CODE (x));
10735  int i;
10736
10737  if (GET_CODE (x) == REG)
10738    {
10739      int regno = REGNO (x);
10740      int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10741			      ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10742      int j;
10743
10744      for (j = regno; j < endregno; j++)
10745	if (reg_last_set_invalid[j]
10746	    /* If this is a pseudo-register that was only set once, it is
10747	       always valid.  */
10748	    || (! (regno >= FIRST_PSEUDO_REGISTER && REG_N_SETS (regno) == 1)
10749		&& reg_last_set_label[j] > tick))
10750	  {
10751	    if (replace)
10752	      *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10753	    return replace;
10754	  }
10755
10756      return 1;
10757    }
10758  /* If this is a memory reference, make sure that there were
10759     no stores after it that might have clobbered the value.  We don't
10760     have alias info, so we assume any store invalidates it.  */
10761  else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
10762	   && INSN_CUID (insn) <= mem_last_set)
10763    {
10764      if (replace)
10765	*loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10766      return replace;
10767    }
10768
10769  for (i = 0; i < len; i++)
10770    if ((fmt[i] == 'e'
10771	 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
10772	/* Don't bother with these.  They shouldn't occur anyway.  */
10773	|| fmt[i] == 'E')
10774      return 0;
10775
10776  /* If we haven't found a reason for it to be invalid, it is valid.  */
10777  return 1;
10778}
10779
10780/* Get the last value assigned to X, if known.  Some registers
10781   in the value may be replaced with (clobber (const_int 0)) if their value
10782   is known longer known reliably.  */
10783
10784static rtx
10785get_last_value (x)
10786     rtx x;
10787{
10788  int regno;
10789  rtx value;
10790
10791  /* If this is a non-paradoxical SUBREG, get the value of its operand and
10792     then convert it to the desired mode.  If this is a paradoxical SUBREG,
10793     we cannot predict what values the "extra" bits might have.  */
10794  if (GET_CODE (x) == SUBREG
10795      && subreg_lowpart_p (x)
10796      && (GET_MODE_SIZE (GET_MODE (x))
10797	  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
10798      && (value = get_last_value (SUBREG_REG (x))) != 0)
10799    return gen_lowpart_for_combine (GET_MODE (x), value);
10800
10801  if (GET_CODE (x) != REG)
10802    return 0;
10803
10804  regno = REGNO (x);
10805  value = reg_last_set_value[regno];
10806
10807  /* If we don't have a value or if it isn't for this basic block,
10808     return 0.  */
10809
10810  if (value == 0
10811      || (REG_N_SETS (regno) != 1
10812	  && reg_last_set_label[regno] != label_tick))
10813    return 0;
10814
10815  /* If the value was set in a later insn than the ones we are processing,
10816     we can't use it even if the register was only set once, but make a quick
10817     check to see if the previous insn set it to something.  This is commonly
10818     the case when the same pseudo is used by repeated insns.
10819
10820     This does not work if there exists an instruction which is temporarily
10821     not on the insn chain.  */
10822
10823  if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
10824    {
10825      rtx insn, set;
10826
10827      /* We can not do anything useful in this case, because there is
10828	 an instruction which is not on the insn chain.  */
10829      if (subst_prev_insn)
10830	return 0;
10831
10832      /* Skip over USE insns.  They are not useful here, and they may have
10833	 been made by combine, in which case they do not have a INSN_CUID
10834	 value.  We can't use prev_real_insn, because that would incorrectly
10835	 take us backwards across labels.  Skip over BARRIERs also, since
10836	 they could have been made by combine.  If we see one, we must be
10837	 optimizing dead code, so it doesn't matter what we do.  */
10838      for (insn = prev_nonnote_insn (subst_insn);
10839	   insn && ((GET_CODE (insn) == INSN
10840		     && GET_CODE (PATTERN (insn)) == USE)
10841		    || GET_CODE (insn) == BARRIER
10842		    || INSN_CUID (insn) >= subst_low_cuid);
10843	   insn = prev_nonnote_insn (insn))
10844	;
10845
10846      if (insn
10847	  && (set = single_set (insn)) != 0
10848	  && rtx_equal_p (SET_DEST (set), x))
10849	{
10850	  value = SET_SRC (set);
10851
10852	  /* Make sure that VALUE doesn't reference X.  Replace any
10853	     explicit references with a CLOBBER.  If there are any remaining
10854	     references (rare), don't use the value.  */
10855
10856	  if (reg_mentioned_p (x, value))
10857	    value = replace_rtx (copy_rtx (value), x,
10858				 gen_rtx_CLOBBER (GET_MODE (x), const0_rtx));
10859
10860	  if (reg_overlap_mentioned_p (x, value))
10861	    return 0;
10862	}
10863      else
10864	return 0;
10865    }
10866
10867  /* If the value has all its registers valid, return it.  */
10868  if (get_last_value_validate (&value, reg_last_set[regno],
10869			       reg_last_set_label[regno], 0))
10870    return value;
10871
10872  /* Otherwise, make a copy and replace any invalid register with
10873     (clobber (const_int 0)).  If that fails for some reason, return 0.  */
10874
10875  value = copy_rtx (value);
10876  if (get_last_value_validate (&value, reg_last_set[regno],
10877			       reg_last_set_label[regno], 1))
10878    return value;
10879
10880  return 0;
10881}
10882
10883/* Return nonzero if expression X refers to a REG or to memory
10884   that is set in an instruction more recent than FROM_CUID.  */
10885
10886static int
10887use_crosses_set_p (x, from_cuid)
10888     register rtx x;
10889     int from_cuid;
10890{
10891  register char *fmt;
10892  register int i;
10893  register enum rtx_code code = GET_CODE (x);
10894
10895  if (code == REG)
10896    {
10897      register int regno = REGNO (x);
10898      int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
10899			    ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10900
10901#ifdef PUSH_ROUNDING
10902      /* Don't allow uses of the stack pointer to be moved,
10903	 because we don't know whether the move crosses a push insn.  */
10904      if (regno == STACK_POINTER_REGNUM)
10905	return 1;
10906#endif
10907      for (;regno < endreg; regno++)
10908	if (reg_last_set[regno]
10909	    && INSN_CUID (reg_last_set[regno]) > from_cuid)
10910	  return 1;
10911      return 0;
10912    }
10913
10914  if (code == MEM && mem_last_set > from_cuid)
10915    return 1;
10916
10917  fmt = GET_RTX_FORMAT (code);
10918
10919  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10920    {
10921      if (fmt[i] == 'E')
10922	{
10923	  register int j;
10924	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10925	    if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
10926	      return 1;
10927	}
10928      else if (fmt[i] == 'e'
10929	       && use_crosses_set_p (XEXP (x, i), from_cuid))
10930	return 1;
10931    }
10932  return 0;
10933}
10934
10935/* Define three variables used for communication between the following
10936   routines.  */
10937
10938static int reg_dead_regno, reg_dead_endregno;
10939static int reg_dead_flag;
10940
10941/* Function called via note_stores from reg_dead_at_p.
10942
10943   If DEST is within [reg_dead_regno, reg_dead_endregno), set
10944   reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET.  */
10945
10946static void
10947reg_dead_at_p_1 (dest, x)
10948     rtx dest;
10949     rtx x;
10950{
10951  int regno, endregno;
10952
10953  if (GET_CODE (dest) != REG)
10954    return;
10955
10956  regno = REGNO (dest);
10957  endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10958		      ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
10959
10960  if (reg_dead_endregno > regno && reg_dead_regno < endregno)
10961    reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
10962}
10963
10964/* Return non-zero if REG is known to be dead at INSN.
10965
10966   We scan backwards from INSN.  If we hit a REG_DEAD note or a CLOBBER
10967   referencing REG, it is dead.  If we hit a SET referencing REG, it is
10968   live.  Otherwise, see if it is live or dead at the start of the basic
10969   block we are in.  Hard regs marked as being live in NEWPAT_USED_REGS
10970   must be assumed to be always live.  */
10971
10972static int
10973reg_dead_at_p (reg, insn)
10974     rtx reg;
10975     rtx insn;
10976{
10977  int block, i;
10978
10979  /* Set variables for reg_dead_at_p_1.  */
10980  reg_dead_regno = REGNO (reg);
10981  reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
10982					? HARD_REGNO_NREGS (reg_dead_regno,
10983							    GET_MODE (reg))
10984					: 1);
10985
10986  reg_dead_flag = 0;
10987
10988  /* Check that reg isn't mentioned in NEWPAT_USED_REGS.  */
10989  if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
10990    {
10991      for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10992	if (TEST_HARD_REG_BIT (newpat_used_regs, i))
10993	  return 0;
10994    }
10995
10996  /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
10997     beginning of function.  */
10998  for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
10999       insn = prev_nonnote_insn (insn))
11000    {
11001      note_stores (PATTERN (insn), reg_dead_at_p_1);
11002      if (reg_dead_flag)
11003	return reg_dead_flag == 1 ? 1 : 0;
11004
11005      if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11006	return 1;
11007    }
11008
11009  /* Get the basic block number that we were in.  */
11010  if (insn == 0)
11011    block = 0;
11012  else
11013    {
11014      for (block = 0; block < n_basic_blocks; block++)
11015	if (insn == basic_block_head[block])
11016	  break;
11017
11018      if (block == n_basic_blocks)
11019	return 0;
11020    }
11021
11022  for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11023    if (REGNO_REG_SET_P (basic_block_live_at_start[block], i))
11024      return 0;
11025
11026  return 1;
11027}
11028
11029/* Note hard registers in X that are used.  This code is similar to
11030   that in flow.c, but much simpler since we don't care about pseudos.  */
11031
11032static void
11033mark_used_regs_combine (x)
11034     rtx x;
11035{
11036  register RTX_CODE code = GET_CODE (x);
11037  register int regno;
11038  int i;
11039
11040  switch (code)
11041    {
11042    case LABEL_REF:
11043    case SYMBOL_REF:
11044    case CONST_INT:
11045    case CONST:
11046    case CONST_DOUBLE:
11047    case PC:
11048    case ADDR_VEC:
11049    case ADDR_DIFF_VEC:
11050    case ASM_INPUT:
11051#ifdef HAVE_cc0
11052    /* CC0 must die in the insn after it is set, so we don't need to take
11053       special note of it here.  */
11054    case CC0:
11055#endif
11056      return;
11057
11058    case CLOBBER:
11059      /* If we are clobbering a MEM, mark any hard registers inside the
11060	 address as used.  */
11061      if (GET_CODE (XEXP (x, 0)) == MEM)
11062	mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11063      return;
11064
11065    case REG:
11066      regno = REGNO (x);
11067      /* A hard reg in a wide mode may really be multiple registers.
11068	 If so, mark all of them just like the first.  */
11069      if (regno < FIRST_PSEUDO_REGISTER)
11070	{
11071	  /* None of this applies to the stack, frame or arg pointers */
11072	  if (regno == STACK_POINTER_REGNUM
11073#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11074	      || regno == HARD_FRAME_POINTER_REGNUM
11075#endif
11076#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11077	      || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11078#endif
11079	      || regno == FRAME_POINTER_REGNUM)
11080	    return;
11081
11082	  i = HARD_REGNO_NREGS (regno, GET_MODE (x));
11083	  while (i-- > 0)
11084	    SET_HARD_REG_BIT (newpat_used_regs, regno + i);
11085	}
11086      return;
11087
11088    case SET:
11089      {
11090	/* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11091	   the address.  */
11092	register rtx testreg = SET_DEST (x);
11093
11094	while (GET_CODE (testreg) == SUBREG
11095	       || GET_CODE (testreg) == ZERO_EXTRACT
11096	       || GET_CODE (testreg) == SIGN_EXTRACT
11097	       || GET_CODE (testreg) == STRICT_LOW_PART)
11098	  testreg = XEXP (testreg, 0);
11099
11100	if (GET_CODE (testreg) == MEM)
11101	  mark_used_regs_combine (XEXP (testreg, 0));
11102
11103	mark_used_regs_combine (SET_SRC (x));
11104      }
11105      return;
11106
11107    default:
11108      break;
11109    }
11110
11111  /* Recursively scan the operands of this expression.  */
11112
11113  {
11114    register char *fmt = GET_RTX_FORMAT (code);
11115
11116    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11117      {
11118        if (fmt[i] == 'e')
11119	  mark_used_regs_combine (XEXP (x, i));
11120        else if (fmt[i] == 'E')
11121          {
11122            register int j;
11123
11124            for (j = 0; j < XVECLEN (x, i); j++)
11125              mark_used_regs_combine (XVECEXP (x, i, j));
11126          }
11127      }
11128  }
11129}
11130
11131
11132/* Remove register number REGNO from the dead registers list of INSN.
11133
11134   Return the note used to record the death, if there was one.  */
11135
11136rtx
11137remove_death (regno, insn)
11138     int regno;
11139     rtx insn;
11140{
11141  register rtx note = find_regno_note (insn, REG_DEAD, regno);
11142
11143  if (note)
11144    {
11145      REG_N_DEATHS (regno)--;
11146      remove_note (insn, note);
11147    }
11148
11149  return note;
11150}
11151
11152/* For each register (hardware or pseudo) used within expression X, if its
11153   death is in an instruction with cuid between FROM_CUID (inclusive) and
11154   TO_INSN (exclusive), put a REG_DEAD note for that register in the
11155   list headed by PNOTES.
11156
11157   That said, don't move registers killed by maybe_kill_insn.
11158
11159   This is done when X is being merged by combination into TO_INSN.  These
11160   notes will then be distributed as needed.  */
11161
11162static void
11163move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
11164     rtx x;
11165     rtx maybe_kill_insn;
11166     int from_cuid;
11167     rtx to_insn;
11168     rtx *pnotes;
11169{
11170  register char *fmt;
11171  register int len, i;
11172  register enum rtx_code code = GET_CODE (x);
11173
11174  if (code == REG)
11175    {
11176      register int regno = REGNO (x);
11177      register rtx where_dead = reg_last_death[regno];
11178      register rtx before_dead, after_dead;
11179
11180      /* Don't move the register if it gets killed in between from and to */
11181      if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11182	  && !reg_referenced_p (x, maybe_kill_insn))
11183	return;
11184
11185      /* WHERE_DEAD could be a USE insn made by combine, so first we
11186	 make sure that we have insns with valid INSN_CUID values.  */
11187      before_dead = where_dead;
11188      while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11189	before_dead = PREV_INSN (before_dead);
11190      after_dead = where_dead;
11191      while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11192	after_dead = NEXT_INSN (after_dead);
11193
11194      if (before_dead && after_dead
11195	  && INSN_CUID (before_dead) >= from_cuid
11196	  && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11197	      || (where_dead != after_dead
11198		  && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
11199	{
11200	  rtx note = remove_death (regno, where_dead);
11201
11202	  /* It is possible for the call above to return 0.  This can occur
11203	     when reg_last_death points to I2 or I1 that we combined with.
11204	     In that case make a new note.
11205
11206	     We must also check for the case where X is a hard register
11207	     and NOTE is a death note for a range of hard registers
11208	     including X.  In that case, we must put REG_DEAD notes for
11209	     the remaining registers in place of NOTE.  */
11210
11211	  if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11212	      && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11213		  > GET_MODE_SIZE (GET_MODE (x))))
11214	    {
11215	      int deadregno = REGNO (XEXP (note, 0));
11216	      int deadend
11217		= (deadregno + HARD_REGNO_NREGS (deadregno,
11218						 GET_MODE (XEXP (note, 0))));
11219	      int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11220	      int i;
11221
11222	      for (i = deadregno; i < deadend; i++)
11223		if (i < regno || i >= ourend)
11224		  REG_NOTES (where_dead)
11225		    = gen_rtx_EXPR_LIST (REG_DEAD,
11226					 gen_rtx_REG (reg_raw_mode[i], i),
11227					 REG_NOTES (where_dead));
11228	    }
11229	  /* If we didn't find any note, or if we found a REG_DEAD note that
11230	     covers only part of the given reg, and we have a multi-reg hard
11231	     register, then to be safe we must check for REG_DEAD notes
11232	     for each register other than the first.  They could have
11233	     their own REG_DEAD notes lying around.  */
11234	  else if ((note == 0
11235		    || (note != 0
11236			&& (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11237			    < GET_MODE_SIZE (GET_MODE (x)))))
11238		   && regno < FIRST_PSEUDO_REGISTER
11239		   && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
11240	    {
11241	      int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11242	      int i, offset;
11243	      rtx oldnotes = 0;
11244
11245	      if (note)
11246		offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
11247	      else
11248		offset = 1;
11249
11250	      for (i = regno + offset; i < ourend; i++)
11251		move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
11252			     maybe_kill_insn, from_cuid, to_insn, &oldnotes);
11253	    }
11254
11255	  if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
11256	    {
11257	      XEXP (note, 1) = *pnotes;
11258	      *pnotes = note;
11259	    }
11260	  else
11261	    *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
11262
11263	  REG_N_DEATHS (regno)++;
11264	}
11265
11266      return;
11267    }
11268
11269  else if (GET_CODE (x) == SET)
11270    {
11271      rtx dest = SET_DEST (x);
11272
11273      move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
11274
11275      /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11276	 that accesses one word of a multi-word item, some
11277	 piece of everything register in the expression is used by
11278	 this insn, so remove any old death.  */
11279
11280      if (GET_CODE (dest) == ZERO_EXTRACT
11281	  || GET_CODE (dest) == STRICT_LOW_PART
11282	  || (GET_CODE (dest) == SUBREG
11283	      && (((GET_MODE_SIZE (GET_MODE (dest))
11284		    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11285		  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11286		       + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
11287	{
11288	  move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
11289	  return;
11290	}
11291
11292      /* If this is some other SUBREG, we know it replaces the entire
11293	 value, so use that as the destination.  */
11294      if (GET_CODE (dest) == SUBREG)
11295	dest = SUBREG_REG (dest);
11296
11297      /* If this is a MEM, adjust deaths of anything used in the address.
11298	 For a REG (the only other possibility), the entire value is
11299	 being replaced so the old value is not used in this insn.  */
11300
11301      if (GET_CODE (dest) == MEM)
11302	move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11303		     to_insn, pnotes);
11304      return;
11305    }
11306
11307  else if (GET_CODE (x) == CLOBBER)
11308    return;
11309
11310  len = GET_RTX_LENGTH (code);
11311  fmt = GET_RTX_FORMAT (code);
11312
11313  for (i = 0; i < len; i++)
11314    {
11315      if (fmt[i] == 'E')
11316	{
11317	  register int j;
11318	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11319	    move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11320			 to_insn, pnotes);
11321	}
11322      else if (fmt[i] == 'e')
11323	move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
11324    }
11325}
11326
11327/* Return 1 if X is the target of a bit-field assignment in BODY, the
11328   pattern of an insn.  X must be a REG.  */
11329
11330static int
11331reg_bitfield_target_p (x, body)
11332     rtx x;
11333     rtx body;
11334{
11335  int i;
11336
11337  if (GET_CODE (body) == SET)
11338    {
11339      rtx dest = SET_DEST (body);
11340      rtx target;
11341      int regno, tregno, endregno, endtregno;
11342
11343      if (GET_CODE (dest) == ZERO_EXTRACT)
11344	target = XEXP (dest, 0);
11345      else if (GET_CODE (dest) == STRICT_LOW_PART)
11346	target = SUBREG_REG (XEXP (dest, 0));
11347      else
11348	return 0;
11349
11350      if (GET_CODE (target) == SUBREG)
11351	target = SUBREG_REG (target);
11352
11353      if (GET_CODE (target) != REG)
11354	return 0;
11355
11356      tregno = REGNO (target), regno = REGNO (x);
11357      if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11358	return target == x;
11359
11360      endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
11361      endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11362
11363      return endregno > tregno && regno < endtregno;
11364    }
11365
11366  else if (GET_CODE (body) == PARALLEL)
11367    for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
11368      if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
11369	return 1;
11370
11371  return 0;
11372}
11373
11374/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11375   as appropriate.  I3 and I2 are the insns resulting from the combination
11376   insns including FROM (I2 may be zero).
11377
11378   ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11379   not need REG_DEAD notes because they are being substituted for.  This
11380   saves searching in the most common cases.
11381
11382   Each note in the list is either ignored or placed on some insns, depending
11383   on the type of note.  */
11384
11385static void
11386distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
11387     rtx notes;
11388     rtx from_insn;
11389     rtx i3, i2;
11390     rtx elim_i2, elim_i1;
11391{
11392  rtx note, next_note;
11393  rtx tem;
11394
11395  for (note = notes; note; note = next_note)
11396    {
11397      rtx place = 0, place2 = 0;
11398
11399      /* If this NOTE references a pseudo register, ensure it references
11400	 the latest copy of that register.  */
11401      if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
11402	  && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
11403	XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
11404
11405      next_note = XEXP (note, 1);
11406      switch (REG_NOTE_KIND (note))
11407	{
11408	case REG_BR_PROB:
11409	case REG_EXEC_COUNT:
11410	  /* Doesn't matter much where we put this, as long as it's somewhere.
11411	     It is preferable to keep these notes on branches, which is most
11412	     likely to be i3.  */
11413	  place = i3;
11414	  break;
11415
11416	case REG_UNUSED:
11417	  /* Any clobbers for i3 may still exist, and so we must process
11418	     REG_UNUSED notes from that insn.
11419
11420	     Any clobbers from i2 or i1 can only exist if they were added by
11421	     recog_for_combine.  In that case, recog_for_combine created the
11422	     necessary REG_UNUSED notes.  Trying to keep any original
11423	     REG_UNUSED notes from these insns can cause incorrect output
11424	     if it is for the same register as the original i3 dest.
11425	     In that case, we will notice that the register is set in i3,
11426	     and then add a REG_UNUSED note for the destination of i3, which
11427	     is wrong.  However, it is possible to have REG_UNUSED notes from
11428	     i2 or i1 for register which were both used and clobbered, so
11429	     we keep notes from i2 or i1 if they will turn into REG_DEAD
11430	     notes.  */
11431
11432	  /* If this register is set or clobbered in I3, put the note there
11433	     unless there is one already.  */
11434	  if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
11435	    {
11436	      if (from_insn != i3)
11437		break;
11438
11439	      if (! (GET_CODE (XEXP (note, 0)) == REG
11440		     ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
11441		     : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
11442		place = i3;
11443	    }
11444	  /* Otherwise, if this register is used by I3, then this register
11445	     now dies here, so we must put a REG_DEAD note here unless there
11446	     is one already.  */
11447	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
11448		   && ! (GET_CODE (XEXP (note, 0)) == REG
11449			 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
11450			 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
11451	    {
11452	      PUT_REG_NOTE_KIND (note, REG_DEAD);
11453	      place = i3;
11454	    }
11455	  break;
11456
11457	case REG_EQUAL:
11458	case REG_EQUIV:
11459	case REG_NONNEG:
11460	case REG_NOALIAS:
11461	  /* These notes say something about results of an insn.  We can
11462	     only support them if they used to be on I3 in which case they
11463	     remain on I3.  Otherwise they are ignored.
11464
11465	     If the note refers to an expression that is not a constant, we
11466	     must also ignore the note since we cannot tell whether the
11467	     equivalence is still true.  It might be possible to do
11468	     slightly better than this (we only have a problem if I2DEST
11469	     or I1DEST is present in the expression), but it doesn't
11470	     seem worth the trouble.  */
11471
11472	  if (from_insn == i3
11473	      && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
11474	    place = i3;
11475	  break;
11476
11477	case REG_INC:
11478	case REG_NO_CONFLICT:
11479	case REG_LABEL:
11480	  /* These notes say something about how a register is used.  They must
11481	     be present on any use of the register in I2 or I3.  */
11482	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
11483	    place = i3;
11484
11485	  if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
11486	    {
11487	      if (place)
11488		place2 = i2;
11489	      else
11490		place = i2;
11491	    }
11492	  break;
11493
11494	case REG_WAS_0:
11495	  /* It is too much trouble to try to see if this note is still
11496	     correct in all situations.  It is better to simply delete it.  */
11497	  break;
11498
11499	case REG_RETVAL:
11500	  /* If the insn previously containing this note still exists,
11501	     put it back where it was.  Otherwise move it to the previous
11502	     insn.  Adjust the corresponding REG_LIBCALL note.  */
11503	  if (GET_CODE (from_insn) != NOTE)
11504	    place = from_insn;
11505	  else
11506	    {
11507	      tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
11508	      place = prev_real_insn (from_insn);
11509	      if (tem && place)
11510		XEXP (tem, 0) = place;
11511	    }
11512	  break;
11513
11514	case REG_LIBCALL:
11515	  /* This is handled similarly to REG_RETVAL.  */
11516	  if (GET_CODE (from_insn) != NOTE)
11517	    place = from_insn;
11518	  else
11519	    {
11520	      tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
11521	      place = next_real_insn (from_insn);
11522	      if (tem && place)
11523		XEXP (tem, 0) = place;
11524	    }
11525	  break;
11526
11527	case REG_DEAD:
11528	  /* If the register is used as an input in I3, it dies there.
11529	     Similarly for I2, if it is non-zero and adjacent to I3.
11530
11531	     If the register is not used as an input in either I3 or I2
11532	     and it is not one of the registers we were supposed to eliminate,
11533	     there are two possibilities.  We might have a non-adjacent I2
11534	     or we might have somehow eliminated an additional register
11535	     from a computation.  For example, we might have had A & B where
11536	     we discover that B will always be zero.  In this case we will
11537	     eliminate the reference to A.
11538
11539	     In both cases, we must search to see if we can find a previous
11540	     use of A and put the death note there.  */
11541
11542	  if (from_insn
11543	      && GET_CODE (from_insn) == CALL_INSN
11544              && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
11545	    place = from_insn;
11546	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
11547	    place = i3;
11548	  else if (i2 != 0 && next_nonnote_insn (i2) == i3
11549		   && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11550	    place = i2;
11551
11552	  if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
11553	    break;
11554
11555	  /* If the register is used in both I2 and I3 and it dies in I3,
11556	     we might have added another reference to it.  If reg_n_refs
11557	     was 2, bump it to 3.  This has to be correct since the
11558	     register must have been set somewhere.  The reason this is
11559	     done is because local-alloc.c treats 2 references as a
11560	     special case.  */
11561
11562	  if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
11563	      && REG_N_REFS (REGNO (XEXP (note, 0)))== 2
11564	      && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11565	    REG_N_REFS (REGNO (XEXP (note, 0))) = 3;
11566
11567	  if (place == 0)
11568	    {
11569	      for (tem = prev_nonnote_insn (i3);
11570		   place == 0 && tem
11571		   && (GET_CODE (tem) == INSN || GET_CODE (tem) == CALL_INSN);
11572		   tem = prev_nonnote_insn (tem))
11573		{
11574		  /* If the register is being set at TEM, see if that is all
11575		     TEM is doing.  If so, delete TEM.  Otherwise, make this
11576		     into a REG_UNUSED note instead.  */
11577		  if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
11578		    {
11579		      rtx set = single_set (tem);
11580		      rtx inner_dest = 0;
11581
11582		      if (set != 0)
11583			for (inner_dest = SET_DEST (set);
11584			     GET_CODE (inner_dest) == STRICT_LOW_PART
11585			     || GET_CODE (inner_dest) == SUBREG
11586			     || GET_CODE (inner_dest) == ZERO_EXTRACT;
11587			     inner_dest = XEXP (inner_dest, 0))
11588			  ;
11589
11590		      /* Verify that it was the set, and not a clobber that
11591			 modified the register.  */
11592
11593		      if (set != 0 && ! side_effects_p (SET_SRC (set))
11594			  && rtx_equal_p (XEXP (note, 0), inner_dest))
11595			{
11596			  /* Move the notes and links of TEM elsewhere.
11597			     This might delete other dead insns recursively.
11598			     First set the pattern to something that won't use
11599			     any register.  */
11600
11601			  PATTERN (tem) = pc_rtx;
11602
11603			  distribute_notes (REG_NOTES (tem), tem, tem,
11604					    NULL_RTX, NULL_RTX, NULL_RTX);
11605			  distribute_links (LOG_LINKS (tem));
11606
11607			  PUT_CODE (tem, NOTE);
11608			  NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
11609			  NOTE_SOURCE_FILE (tem) = 0;
11610			}
11611		      /* If the register is both set and used here, put the
11612			 REG_DEAD note here, but place a REG_UNUSED note
11613			 here too unless there already is one.  */
11614		      else if (reg_referenced_p (XEXP (note, 0),
11615						 PATTERN (tem)))
11616			{
11617			  place = tem;
11618
11619			  if (! find_regno_note (tem, REG_UNUSED,
11620						 REGNO (XEXP (note, 0))))
11621			    REG_NOTES (tem)
11622			      = gen_rtx_EXPR_LIST (REG_UNUSED,
11623						   XEXP (note, 0),
11624						   REG_NOTES (tem));
11625			}
11626		      else
11627			{
11628			  PUT_REG_NOTE_KIND (note, REG_UNUSED);
11629
11630			  /*  If there isn't already a REG_UNUSED note, put one
11631			      here.  */
11632			  if (! find_regno_note (tem, REG_UNUSED,
11633						 REGNO (XEXP (note, 0))))
11634			    place = tem;
11635			  break;
11636		      }
11637		  }
11638		else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
11639			 || (GET_CODE (tem) == CALL_INSN
11640			     && find_reg_fusage (tem, USE, XEXP (note, 0))))
11641		  {
11642		    place = tem;
11643
11644		    /* If we are doing a 3->2 combination, and we have a
11645		       register which formerly died in i3 and was not used
11646		       by i2, which now no longer dies in i3 and is used in
11647		       i2 but does not die in i2, and place is between i2
11648		       and i3, then we may need to move a link from place to
11649		       i2.  */
11650		    if (i2 && INSN_UID (place) <= max_uid_cuid
11651			&& INSN_CUID (place) > INSN_CUID (i2)
11652			&& from_insn && INSN_CUID (from_insn) > INSN_CUID (i2)
11653			&& reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11654		      {
11655			rtx links = LOG_LINKS (place);
11656			LOG_LINKS (place) = 0;
11657			distribute_links (links);
11658		      }
11659		    break;
11660		  }
11661		}
11662
11663	      /* If we haven't found an insn for the death note and it
11664		 is still a REG_DEAD note, but we have hit a CODE_LABEL,
11665		 insert a USE insn for the register at that label and
11666		 put the death node there.  This prevents problems with
11667		 call-state tracking in caller-save.c.  */
11668	      if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0)
11669		{
11670		  place
11671		    = emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (note, 0)),
11672				       tem);
11673
11674		  /* If this insn was emitted between blocks, then update
11675		     basic_block_head of the current block to include it.  */
11676		  if (basic_block_end[this_basic_block - 1] == tem)
11677		    basic_block_head[this_basic_block] = place;
11678		}
11679	    }
11680
11681	  /* If the register is set or already dead at PLACE, we needn't do
11682	     anything with this note if it is still a REG_DEAD note.
11683	     We can here if it is set at all, not if is it totally replace,
11684	     which is what `dead_or_set_p' checks, so also check for it being
11685	     set partially.  */
11686
11687
11688	  if (place && REG_NOTE_KIND (note) == REG_DEAD)
11689	    {
11690	      int regno = REGNO (XEXP (note, 0));
11691
11692	      if (dead_or_set_p (place, XEXP (note, 0))
11693		  || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
11694		{
11695		  /* Unless the register previously died in PLACE, clear
11696		     reg_last_death.  [I no longer understand why this is
11697		     being done.] */
11698		  if (reg_last_death[regno] != place)
11699		    reg_last_death[regno] = 0;
11700		  place = 0;
11701		}
11702	      else
11703		reg_last_death[regno] = place;
11704
11705	      /* If this is a death note for a hard reg that is occupying
11706		 multiple registers, ensure that we are still using all
11707		 parts of the object.  If we find a piece of the object
11708		 that is unused, we must add a USE for that piece before
11709		 PLACE and put the appropriate REG_DEAD note on it.
11710
11711		 An alternative would be to put a REG_UNUSED for the pieces
11712		 on the insn that set the register, but that can't be done if
11713		 it is not in the same block.  It is simpler, though less
11714		 efficient, to add the USE insns.  */
11715
11716	      if (place && regno < FIRST_PSEUDO_REGISTER
11717		  && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
11718		{
11719		  int endregno
11720		    = regno + HARD_REGNO_NREGS (regno,
11721						GET_MODE (XEXP (note, 0)));
11722		  int all_used = 1;
11723		  int i;
11724
11725		  for (i = regno; i < endregno; i++)
11726		    if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
11727			&& ! find_regno_fusage (place, USE, i))
11728		      {
11729			rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
11730			rtx p;
11731
11732			/* See if we already placed a USE note for this
11733			   register in front of PLACE.  */
11734			for (p = place;
11735			     GET_CODE (PREV_INSN (p)) == INSN
11736			     && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
11737			     p = PREV_INSN (p))
11738			  if (rtx_equal_p (piece,
11739					   XEXP (PATTERN (PREV_INSN (p)), 0)))
11740			    {
11741			      p = 0;
11742			      break;
11743			    }
11744
11745			if (p)
11746			  {
11747			    rtx use_insn
11748			      = emit_insn_before (gen_rtx_USE (VOIDmode,
11749							       piece),
11750						  p);
11751			    REG_NOTES (use_insn)
11752			      = gen_rtx_EXPR_LIST (REG_DEAD, piece,
11753						   REG_NOTES (use_insn));
11754			  }
11755
11756			all_used = 0;
11757		      }
11758
11759		  /* Check for the case where the register dying partially
11760		     overlaps the register set by this insn.  */
11761		  if (all_used)
11762		    for (i = regno; i < endregno; i++)
11763		      if (dead_or_set_regno_p (place, i))
11764			  {
11765			    all_used = 0;
11766			    break;
11767			  }
11768
11769		  if (! all_used)
11770		    {
11771		      /* Put only REG_DEAD notes for pieces that are
11772			 still used and that are not already dead or set.  */
11773
11774		      for (i = regno; i < endregno; i++)
11775			{
11776			  rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
11777
11778			  if ((reg_referenced_p (piece, PATTERN (place))
11779			       || (GET_CODE (place) == CALL_INSN
11780				   && find_reg_fusage (place, USE, piece)))
11781			      && ! dead_or_set_p (place, piece)
11782			      && ! reg_bitfield_target_p (piece,
11783							  PATTERN (place)))
11784			    REG_NOTES (place)
11785			      = gen_rtx_EXPR_LIST (REG_DEAD,
11786						   piece, REG_NOTES (place));
11787			}
11788
11789		      place = 0;
11790		    }
11791		}
11792	    }
11793	  break;
11794
11795	default:
11796	  /* Any other notes should not be present at this point in the
11797	     compilation.  */
11798	  abort ();
11799	}
11800
11801      if (place)
11802	{
11803	  XEXP (note, 1) = REG_NOTES (place);
11804	  REG_NOTES (place) = note;
11805	}
11806      else if ((REG_NOTE_KIND (note) == REG_DEAD
11807		|| REG_NOTE_KIND (note) == REG_UNUSED)
11808	       && GET_CODE (XEXP (note, 0)) == REG)
11809	REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
11810
11811      if (place2)
11812	{
11813	  if ((REG_NOTE_KIND (note) == REG_DEAD
11814	       || REG_NOTE_KIND (note) == REG_UNUSED)
11815	      && GET_CODE (XEXP (note, 0)) == REG)
11816	    REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
11817
11818	  REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
11819					       REG_NOTE_KIND (note),
11820					       XEXP (note, 0),
11821					       REG_NOTES (place2));
11822	}
11823    }
11824}
11825
11826/* Similarly to above, distribute the LOG_LINKS that used to be present on
11827   I3, I2, and I1 to new locations.  This is also called in one case to
11828   add a link pointing at I3 when I3's destination is changed.  */
11829
11830static void
11831distribute_links (links)
11832     rtx links;
11833{
11834  rtx link, next_link;
11835
11836  for (link = links; link; link = next_link)
11837    {
11838      rtx place = 0;
11839      rtx insn;
11840      rtx set, reg;
11841
11842      next_link = XEXP (link, 1);
11843
11844      /* If the insn that this link points to is a NOTE or isn't a single
11845	 set, ignore it.  In the latter case, it isn't clear what we
11846	 can do other than ignore the link, since we can't tell which
11847	 register it was for.  Such links wouldn't be used by combine
11848	 anyway.
11849
11850	 It is not possible for the destination of the target of the link to
11851	 have been changed by combine.  The only potential of this is if we
11852	 replace I3, I2, and I1 by I3 and I2.  But in that case the
11853	 destination of I2 also remains unchanged.  */
11854
11855      if (GET_CODE (XEXP (link, 0)) == NOTE
11856	  || (set = single_set (XEXP (link, 0))) == 0)
11857	continue;
11858
11859      reg = SET_DEST (set);
11860      while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
11861	     || GET_CODE (reg) == SIGN_EXTRACT
11862	     || GET_CODE (reg) == STRICT_LOW_PART)
11863	reg = XEXP (reg, 0);
11864
11865      /* A LOG_LINK is defined as being placed on the first insn that uses
11866	 a register and points to the insn that sets the register.  Start
11867	 searching at the next insn after the target of the link and stop
11868	 when we reach a set of the register or the end of the basic block.
11869
11870	 Note that this correctly handles the link that used to point from
11871	 I3 to I2.  Also note that not much searching is typically done here
11872	 since most links don't point very far away.  */
11873
11874      for (insn = NEXT_INSN (XEXP (link, 0));
11875	   (insn && (this_basic_block == n_basic_blocks - 1
11876		     || basic_block_head[this_basic_block + 1] != insn));
11877	   insn = NEXT_INSN (insn))
11878	if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
11879	    && reg_overlap_mentioned_p (reg, PATTERN (insn)))
11880	  {
11881	    if (reg_referenced_p (reg, PATTERN (insn)))
11882	      place = insn;
11883	    break;
11884	  }
11885	else if (GET_CODE (insn) == CALL_INSN
11886	      && find_reg_fusage (insn, USE, reg))
11887	  {
11888	    place = insn;
11889	    break;
11890	  }
11891
11892      /* If we found a place to put the link, place it there unless there
11893	 is already a link to the same insn as LINK at that point.  */
11894
11895      if (place)
11896	{
11897	  rtx link2;
11898
11899	  for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
11900	    if (XEXP (link2, 0) == XEXP (link, 0))
11901	      break;
11902
11903	  if (link2 == 0)
11904	    {
11905	      XEXP (link, 1) = LOG_LINKS (place);
11906	      LOG_LINKS (place) = link;
11907
11908	      /* Set added_links_insn to the earliest insn we added a
11909		 link to.  */
11910	      if (added_links_insn == 0
11911		  || INSN_CUID (added_links_insn) > INSN_CUID (place))
11912		added_links_insn = place;
11913	    }
11914	}
11915    }
11916}
11917
11918/* Compute INSN_CUID for INSN, which is an insn made by combine.  */
11919
11920static int
11921insn_cuid (insn)
11922     rtx insn;
11923{
11924  while (insn != 0 && INSN_UID (insn) > max_uid_cuid
11925	 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
11926    insn = NEXT_INSN (insn);
11927
11928  if (INSN_UID (insn) > max_uid_cuid)
11929    abort ();
11930
11931  return INSN_CUID (insn);
11932}
11933
11934void
11935dump_combine_stats (file)
11936     FILE *file;
11937{
11938  fprintf
11939    (file,
11940     ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
11941     combine_attempts, combine_merges, combine_extras, combine_successes);
11942}
11943
11944void
11945dump_combine_total_stats (file)
11946     FILE *file;
11947{
11948  fprintf
11949    (file,
11950     "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
11951     total_attempts, total_merges, total_extras, total_successes);
11952}
11953