combine.c revision 110611
1168404Spjd/* Optimize by combining instructions for GNU compiler.
2168404Spjd   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3168404Spjd   1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4168404Spjd
5168404SpjdThis file is part of GCC.
6168404Spjd
7168404SpjdGCC is free software; you can redistribute it and/or modify it under
8168404Spjdthe terms of the GNU General Public License as published by the Free
9168404SpjdSoftware Foundation; either version 2, or (at your option) any later
10168404Spjdversion.
11168404Spjd
12168404SpjdGCC is distributed in the hope that it will be useful, but WITHOUT ANY
13168404SpjdWARRANTY; without even the implied warranty of MERCHANTABILITY or
14168404SpjdFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15168404Spjdfor more details.
16168404Spjd
17168404SpjdYou should have received a copy of the GNU General Public License
18168404Spjdalong with GCC; see the file COPYING.  If not, write to the Free
19168404SpjdSoftware Foundation, 59 Temple Place - Suite 330, Boston, MA
20168404Spjd02111-1307, USA.  */
21168404Spjd
22168404Spjd/* This module is essentially the "combiner" phase of the U. of Arizona
23219089Spjd   Portable Optimizer, but redone to work on our list-structured
24252219Sdelphij   representation for RTL instead of their string representation.
25252219Sdelphij
26240415Smm   The LOG_LINKS of each insn identify the most recent assignment
27230438Spjd   to each REG used in the insn.  It is a list of previous insns,
28226706Spjd   each of which contains a SET for a REG that is used in this insn
29235216Smm   and not used or set in between.  LOG_LINKs never cross basic blocks.
30251646Sdelphij   They were set up by the preceding pass (lifetime analysis).
31168404Spjd
32168404Spjd   We try to combine each pair of insns joined by a logical link.
33168404Spjd   We also try to combine triples of insns A, B and C when
34168404Spjd   C has a link back to B and B has a link back to A.
35168404Spjd
36168404Spjd   LOG_LINKS does not have links for use of the CC0.  They don't
37168404Spjd   need to, because the insn that sets the CC0 is always immediately
38168404Spjd   before the insn that tests it.  So we always regard a branch
39168404Spjd   insn as having a logical link to the preceding insn.  The same is true
40168404Spjd   for an insn explicitly using CC0.
41185029Spjd
42168404Spjd   We check (with use_crosses_set_p) to avoid combining in such a way
43168404Spjd   as to move a computation to a place where its value would be different.
44168404Spjd
45168404Spjd   Combination is done by mathematically substituting the previous
46185029Spjd   insn(s) values for the regs they set into the expressions in
47185029Spjd   the later insns that refer to these regs.  If the result is a valid insn
48185029Spjd   for our target machine, according to the machine description,
49185029Spjd   we install it, delete the earlier insns, and update the data flow
50209962Smm   information (LOG_LINKS and REG_NOTES) for what we did.
51168404Spjd
52219089Spjd   There are a few exceptions where the dataflow information created by
53168404Spjd   flow.c aren't completely updated:
54168404Spjd
55209962Smm   - reg_live_length is not updated
56168404Spjd   - reg_n_refs is not adjusted in the rare case when a register is
57168404Spjd     no longer required in a computation
58168404Spjd   - there are extremely rare cases (see distribute_regnotes) when a
59168404Spjd     REG_DEAD note is lost
60168404Spjd   - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61185029Spjd     removed because there is no way to know which register it was
62168404Spjd     linking
63209962Smm
64209962Smm   To simplify substitution, we combine only when the earlier insn(s)
65168676Spjd   consist of only a single assignment.  To simplify updating afterward,
66168404Spjd   we never combine when a subroutine call appears in the middle.
67168404Spjd
68168404Spjd   Since we do not represent assignments to CC0 explicitly except when that
69168404Spjd   is all an insn does, there is no LOG_LINKS entry in an insn that uses
70168404Spjd   the condition code for the insn that set the condition code.
71168404Spjd   Fortunately, these two insns must be consecutive.
72168404Spjd   Therefore, every JUMP_INSN is taken to have an implicit logical link
73168404Spjd   to the preceding insn.  This is not quite right, since non-jumps can
74168404Spjd   also use the condition code; but in practice such insns would not
75168404Spjd   combine anyway.  */
76168404Spjd
77168404Spjd#include "config.h"
78168404Spjd#include "system.h"
79168404Spjd#include "rtl.h"
80168404Spjd#include "tm_p.h"
81168404Spjd#include "flags.h"
82168404Spjd#include "regs.h"
83168404Spjd#include "hard-reg-set.h"
84168404Spjd#include "basic-block.h"
85168404Spjd#include "insn-config.h"
86168404Spjd#include "function.h"
87168404Spjd/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
88168404Spjd#include "expr.h"
89168404Spjd#include "insn-attr.h"
90168404Spjd#include "recog.h"
91168404Spjd#include "real.h"
92168404Spjd#include "toplev.h"
93168404Spjd
94168404Spjd/* It is not safe to use ordinary gen_lowpart in combine.
95168404Spjd   Use gen_lowpart_for_combine instead.  See comments there.  */
96168404Spjd#define gen_lowpart dont_use_gen_lowpart_you_dummy
97168404Spjd
98168404Spjd/* Number of attempts to combine instructions in this function.  */
99168404Spjd
100168404Spjdstatic int combine_attempts;
101168404Spjd
102168404Spjd/* Number of attempts that got as far as substitution in this function.  */
103168404Spjd
104168404Spjdstatic int combine_merges;
105168404Spjd
106168404Spjd/* Number of instructions combined with added SETs in this function.  */
107168404Spjd
108168404Spjdstatic int combine_extras;
109168404Spjd
110168404Spjd/* Number of instructions combined in this function.  */
111168404Spjd
112168404Spjdstatic int combine_successes;
113168404Spjd
114168404Spjd/* Totals over entire compilation.  */
115168404Spjd
116168404Spjdstatic int total_attempts, total_merges, total_extras, total_successes;
117168404Spjd
118168404Spjd
119168404Spjd/* Vector mapping INSN_UIDs to cuids.
120168404Spjd   The cuids are like uids but increase monotonically always.
121168404Spjd   Combine always uses cuids so that it can compare them.
122168404Spjd   But actually renumbering the uids, which we used to do,
123168404Spjd   proves to be a bad idea because it makes it hard to compare
124168404Spjd   the dumps produced by earlier passes with those from later passes.  */
125168404Spjd
126168404Spjdstatic int *uid_cuid;
127168404Spjdstatic int max_uid_cuid;
128168404Spjd
129168404Spjd/* Get the cuid of an insn.  */
130209962Smm
131209962Smm#define INSN_CUID(INSN) \
132168404Spjd(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
133219089Spjd
134185029Spjd/* In case BITS_PER_WORD == HOST_BITS_PER_WIDE_INT, shifting by
135185029Spjd   BITS_PER_WORD would invoke undefined behavior.  Work around it.  */
136168404Spjd
137168404Spjd#define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \
138168404Spjd  (((unsigned HOST_WIDE_INT) (val) << (BITS_PER_WORD - 1)) << 1)
139168404Spjd
140219089Spjd/* Maximum register number, which is the size of the tables below.  */
141168404Spjd
142168404Spjdstatic unsigned int combine_max_regno;
143168404Spjd
144168404Spjd/* Record last point of death of (hard or pseudo) register n.  */
145168404Spjd
146168404Spjdstatic rtx *reg_last_death;
147168404Spjd
148168404Spjd/* Record last point of modification of (hard or pseudo) register n.  */
149168404Spjd
150168404Spjdstatic rtx *reg_last_set;
151168404Spjd
152168404Spjd/* Record the cuid of the last insn that invalidated memory
153168404Spjd   (anything that writes memory, and subroutine calls, but not pushes).  */
154168404Spjd
155168404Spjdstatic int mem_last_set;
156168404Spjd
157168404Spjd/* Record the cuid of the last CALL_INSN
158168404Spjd   so we can tell whether a potential combination crosses any calls.  */
159168404Spjd
160168404Spjdstatic int last_call_cuid;
161168404Spjd
162168404Spjd/* When `subst' is called, this is the insn that is being modified
163168404Spjd   (by combining in a previous insn).  The PATTERN of this insn
164168404Spjd   is still the old pattern partially modified and it should not be
165168404Spjd   looked at, but this may be used to examine the successors of the insn
166168404Spjd   to judge whether a simplification is valid.  */
167168404Spjd
168168404Spjdstatic rtx subst_insn;
169168404Spjd
170168404Spjd/* This is an insn that belongs before subst_insn, but is not currently
171168404Spjd   on the insn chain.  */
172168404Spjd
173168404Spjdstatic rtx subst_prev_insn;
174168404Spjd
175168404Spjd/* This is the lowest CUID that `subst' is currently dealing with.
176168404Spjd   get_last_value will not return a value if the register was set at or
177168404Spjd   after this CUID.  If not for this mechanism, we could get confused if
178168404Spjd   I2 or I1 in try_combine were an insn that used the old value of a register
179168404Spjd   to obtain a new value.  In that case, we might erroneously get the
180168404Spjd   new value of the register when we wanted the old one.  */
181168404Spjd
182168404Spjdstatic int subst_low_cuid;
183168404Spjd
184168404Spjd/* This contains any hard registers that are used in newpat; reg_dead_at_p
185168404Spjd   must consider all these registers to be always live.  */
186168404Spjd
187168404Spjdstatic HARD_REG_SET newpat_used_regs;
188168404Spjd
189168404Spjd/* This is an insn to which a LOG_LINKS entry has been added.  If this
190168404Spjd   insn is the earlier than I2 or I3, combine should rescan starting at
191168404Spjd   that location.  */
192168404Spjd
193168404Spjdstatic rtx added_links_insn;
194168404Spjd
195168404Spjd/* Basic block number of the block in which we are performing combines.  */
196168404Spjdstatic int this_basic_block;
197168404Spjd
198168404Spjd/* A bitmap indicating which blocks had registers go dead at entry.
199168404Spjd   After combine, we'll need to re-do global life analysis with
200168404Spjd   those blocks as starting points.  */
201168404Spjdstatic sbitmap refresh_blocks;
202168404Spjdstatic int need_refresh;
203168404Spjd
204168404Spjd/* The next group of arrays allows the recording of the last value assigned
205168404Spjd   to (hard or pseudo) register n.  We use this information to see if a
206168404Spjd   operation being processed is redundant given a prior operation performed
207168404Spjd   on the register.  For example, an `and' with a constant is redundant if
208168404Spjd   all the zero bits are already known to be turned off.
209185029Spjd
210185029Spjd   We use an approach similar to that used by cse, but change it in the
211185029Spjd   following ways:
212185029Spjd
213185029Spjd   (1) We do not want to reinitialize at each label.
214185029Spjd   (2) It is useful, but not critical, to know the actual value assigned
215185029Spjd       to a register.  Often just its form is helpful.
216168404Spjd
217168404Spjd   Therefore, we maintain the following arrays:
218168404Spjd
219168404Spjd   reg_last_set_value		the last value assigned
220168404Spjd   reg_last_set_label		records the value of label_tick when the
221168404Spjd				register was assigned
222185029Spjd   reg_last_set_table_tick	records the value of label_tick when a
223185029Spjd				value using the register is assigned
224185029Spjd   reg_last_set_invalid		set to non-zero when it is not valid
225168404Spjd				to use the value of this register in some
226168404Spjd				register's value
227168404Spjd
228168404Spjd   To understand the usage of these tables, it is important to understand
229168404Spjd   the distinction between the value in reg_last_set_value being valid
230168404Spjd   and the register being validly contained in some other expression in the
231185029Spjd   table.
232185029Spjd
233168404Spjd   Entry I in reg_last_set_value is valid if it is non-zero, and either
234168404Spjd   reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
235168404Spjd
236168404Spjd   Register I may validly appear in any expression returned for the value
237185029Spjd   of another register if reg_n_sets[i] is 1.  It may also appear in the
238168404Spjd   value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
239185029Spjd   reg_last_set_invalid[j] is zero.
240185029Spjd
241185029Spjd   If an expression is found in the table containing a register which may
242185029Spjd   not validly appear in an expression, the register is replaced by
243168404Spjd   something that won't match, (clobber (const_int 0)).
244168404Spjd
245185029Spjd   reg_last_set_invalid[i] is set non-zero when register I is being assigned
246168404Spjd   to and reg_last_set_table_tick[i] == label_tick.  */
247168404Spjd
248168404Spjd/* Record last value assigned to (hard or pseudo) register n.  */
249168404Spjd
250185029Spjdstatic rtx *reg_last_set_value;
251185029Spjd
252185029Spjd/* Record the value of label_tick when the value for register n is placed in
253185029Spjd   reg_last_set_value[n].  */
254185029Spjd
255168404Spjdstatic int *reg_last_set_label;
256168404Spjd
257185029Spjd/* Record the value of label_tick when an expression involving register n
258168404Spjd   is placed in reg_last_set_value.  */
259168404Spjd
260185029Spjdstatic int *reg_last_set_table_tick;
261185029Spjd
262185029Spjd/* Set non-zero if references to register n in expressions should not be
263185029Spjd   used.  */
264185029Spjd
265185029Spjdstatic char *reg_last_set_invalid;
266185029Spjd
267185029Spjd/* Incremented for each label.  */
268185029Spjd
269185029Spjdstatic int label_tick;
270185029Spjd
271185029Spjd/* Some registers that are set more than once and used in more than one
272185029Spjd   basic block are nevertheless always set in similar ways.  For example,
273185029Spjd   a QImode register may be loaded from memory in two places on a machine
274185029Spjd   where byte loads zero extend.
275185029Spjd
276185029Spjd   We record in the following array what we know about the nonzero
277185029Spjd   bits of a register, specifically which bits are known to be zero.
278185029Spjd
279185029Spjd   If an entry is zero, it means that we don't know anything special.  */
280185029Spjd
281185029Spjdstatic unsigned HOST_WIDE_INT *reg_nonzero_bits;
282185029Spjd
283185029Spjd/* Mode used to compute significance in reg_nonzero_bits.  It is the largest
284185029Spjd   integer mode that can fit in HOST_BITS_PER_WIDE_INT.  */
285185029Spjd
286168404Spjdstatic enum machine_mode nonzero_bits_mode;
287185029Spjd
288185029Spjd/* Nonzero if we know that a register has some leading bits that are always
289185029Spjd   equal to the sign bit.  */
290185029Spjd
291185029Spjdstatic unsigned char *reg_sign_bit_copies;
292185029Spjd
293185029Spjd/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
294185029Spjd   It is zero while computing them and after combine has completed.  This
295185029Spjd   former test prevents propagating values based on previously set values,
296185029Spjd   which can be incorrect if a variable is modified in a loop.  */
297185029Spjd
298185029Spjdstatic int nonzero_sign_valid;
299185029Spjd
300185029Spjd/* These arrays are maintained in parallel with reg_last_set_value
301185029Spjd   and are used to store the mode in which the register was last set,
302185029Spjd   the bits that were known to be zero when it was last set, and the
303185029Spjd   number of sign bits copies it was known to have when it was last set.  */
304185029Spjd
305185029Spjdstatic enum machine_mode *reg_last_set_mode;
306185029Spjdstatic unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
307185029Spjdstatic char *reg_last_set_sign_bit_copies;
308185029Spjd
309185029Spjd/* Record one modification to rtl structure
310185029Spjd   to be undone by storing old_contents into *where.
311185029Spjd   is_int is 1 if the contents are an int.  */
312185029Spjd
313185029Spjdstruct undo
314185029Spjd{
315185029Spjd  struct undo *next;
316185029Spjd  int is_int;
317185029Spjd  union {rtx r; unsigned int i;} old_contents;
318185029Spjd  union {rtx *r; unsigned int *i;} where;
319185029Spjd};
320185029Spjd
321185029Spjd/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
322185029Spjd   num_undo says how many are currently recorded.
323185029Spjd
324168404Spjd   other_insn is nonzero if we have modified some other insn in the process
325168404Spjd   of working on subst_insn.  It must be verified too.  */
326219089Spjd
327209962Smmstruct undobuf
328168404Spjd{
329168404Spjd  struct undo *undos;
330168404Spjd  struct undo *frees;
331209962Smm  rtx other_insn;
332168404Spjd};
333209962Smm
334168404Spjdstatic struct undobuf undobuf;
335209962Smm
336168404Spjd/* Number of times the pseudo being substituted for
337168404Spjd   was found and replaced.  */
338168404Spjd
339168404Spjdstatic int n_occurrences;
340168404Spjd
341168404Spjdstatic void do_SUBST			PARAMS ((rtx *, rtx));
342209962Smmstatic void do_SUBST_INT		PARAMS ((unsigned int *,
343209962Smm						 unsigned int));
344168404Spjdstatic void init_reg_last_arrays	PARAMS ((void));
345219089Spjdstatic void setup_incoming_promotions   PARAMS ((void));
346219089Spjdstatic void set_nonzero_bits_and_sign_copies  PARAMS ((rtx, rtx, void *));
347219089Spjdstatic int cant_combine_insn_p	PARAMS ((rtx));
348209962Smmstatic int can_combine_p	PARAMS ((rtx, rtx, rtx, rtx, rtx *, rtx *));
349219089Spjdstatic int sets_function_arg_p	PARAMS ((rtx));
350219089Spjdstatic int combinable_i3pat	PARAMS ((rtx, rtx *, rtx, rtx, int, rtx *));
351219089Spjdstatic int contains_muldiv	PARAMS ((rtx));
352219089Spjdstatic rtx try_combine		PARAMS ((rtx, rtx, rtx, int *));
353219089Spjdstatic void undo_all		PARAMS ((void));
354219089Spjdstatic void undo_commit		PARAMS ((void));
355219089Spjdstatic rtx *find_split_point	PARAMS ((rtx *, rtx));
356219089Spjdstatic rtx subst		PARAMS ((rtx, rtx, rtx, int, int));
357219089Spjdstatic rtx combine_simplify_rtx	PARAMS ((rtx, enum machine_mode, int, int));
358219089Spjdstatic rtx simplify_if_then_else  PARAMS ((rtx));
359219089Spjdstatic rtx simplify_set		PARAMS ((rtx));
360219089Spjdstatic rtx simplify_logical	PARAMS ((rtx, int));
361219089Spjdstatic rtx expand_compound_operation  PARAMS ((rtx));
362219089Spjdstatic rtx expand_field_assignment  PARAMS ((rtx));
363219089Spjdstatic rtx make_extraction	PARAMS ((enum machine_mode, rtx, HOST_WIDE_INT,
364219089Spjd					 rtx, unsigned HOST_WIDE_INT, int,
365219089Spjd					 int, int));
366219089Spjdstatic rtx extract_left_shift	PARAMS ((rtx, int));
367219089Spjdstatic rtx make_compound_operation  PARAMS ((rtx, enum rtx_code));
368219089Spjdstatic int get_pos_from_mask	PARAMS ((unsigned HOST_WIDE_INT,
369219089Spjd					 unsigned HOST_WIDE_INT *));
370219089Spjdstatic rtx force_to_mode	PARAMS ((rtx, enum machine_mode,
371219089Spjd					 unsigned HOST_WIDE_INT, rtx, int));
372219089Spjdstatic rtx if_then_else_cond	PARAMS ((rtx, rtx *, rtx *));
373219089Spjdstatic rtx known_cond		PARAMS ((rtx, enum rtx_code, rtx, rtx));
374219089Spjdstatic int rtx_equal_for_field_assignment_p PARAMS ((rtx, rtx));
375219089Spjdstatic rtx make_field_assignment  PARAMS ((rtx));
376219089Spjdstatic rtx apply_distributive_law  PARAMS ((rtx));
377219089Spjdstatic rtx simplify_and_const_int  PARAMS ((rtx, enum machine_mode, rtx,
378219089Spjd					    unsigned HOST_WIDE_INT));
379219089Spjdstatic unsigned HOST_WIDE_INT nonzero_bits  PARAMS ((rtx, enum machine_mode));
380219089Spjdstatic unsigned int num_sign_bit_copies  PARAMS ((rtx, enum machine_mode));
381219089Spjdstatic int merge_outer_ops	PARAMS ((enum rtx_code *, HOST_WIDE_INT *,
382219089Spjd					 enum rtx_code, HOST_WIDE_INT,
383219089Spjd					 enum machine_mode, int *));
384209962Smmstatic rtx simplify_shift_const	PARAMS ((rtx, enum rtx_code, enum machine_mode,
385209962Smm					 rtx, int));
386209962Smmstatic int recog_for_combine	PARAMS ((rtx *, rtx, rtx *));
387168404Spjdstatic rtx gen_lowpart_for_combine  PARAMS ((enum machine_mode, rtx));
388209962Smmstatic rtx gen_binary		PARAMS ((enum rtx_code, enum machine_mode,
389209962Smm					 rtx, rtx));
390209962Smmstatic enum rtx_code simplify_comparison  PARAMS ((enum rtx_code, rtx *, rtx *));
391168404Spjdstatic void update_table_tick	PARAMS ((rtx));
392168404Spjdstatic void record_value_for_reg  PARAMS ((rtx, rtx, rtx));
393168404Spjdstatic void check_promoted_subreg PARAMS ((rtx, rtx));
394209962Smmstatic void record_dead_and_set_regs_1  PARAMS ((rtx, rtx, void *));
395209962Smmstatic void record_dead_and_set_regs  PARAMS ((rtx));
396209962Smmstatic int get_last_value_validate  PARAMS ((rtx *, rtx, int, int));
397209962Smmstatic rtx get_last_value	PARAMS ((rtx));
398185029Spjdstatic int use_crosses_set_p	PARAMS ((rtx, int));
399185029Spjdstatic void reg_dead_at_p_1	PARAMS ((rtx, rtx, void *));
400168404Spjdstatic int reg_dead_at_p	PARAMS ((rtx, rtx));
401185029Spjdstatic void move_deaths		PARAMS ((rtx, rtx, int, rtx, rtx *));
402168404Spjdstatic int reg_bitfield_target_p  PARAMS ((rtx, rtx));
403185029Spjdstatic void distribute_notes	PARAMS ((rtx, rtx, rtx, rtx, rtx, rtx));
404185029Spjdstatic void distribute_links	PARAMS ((rtx));
405185029Spjdstatic void mark_used_regs_combine PARAMS ((rtx));
406185029Spjdstatic int insn_cuid		PARAMS ((rtx));
407185029Spjdstatic void record_promoted_value PARAMS ((rtx, rtx));
408185029Spjdstatic rtx reversed_comparison  PARAMS ((rtx, enum machine_mode, rtx, rtx));
409168404Spjdstatic enum rtx_code combine_reversed_comparison_code PARAMS ((rtx));
410168404Spjd
411168404Spjd/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
412209962Smm   insn.  The substitution can be undone by undo_all.  If INTO is already
413209962Smm   set to NEWVAL, do not record this change.  Because computing NEWVAL might
414209962Smm   also call SUBST, we have to compute it before we put anything into
415209962Smm   the undo table.  */
416209962Smm
417209962Smmstatic void
418209962Smmdo_SUBST (into, newval)
419209962Smm     rtx *into, newval;
420209962Smm{
421209962Smm  struct undo *buf;
422209962Smm  rtx oldval = *into;
423209962Smm
424209962Smm  if (oldval == newval)
425209962Smm    return;
426209962Smm
427209962Smm  /* We'd like to catch as many invalid transformations here as
428168404Spjd     possible.  Unfortunately, there are way too many mode changes
429168404Spjd     that are perfectly valid, so we'd waste too much effort for
430168404Spjd     little gain doing the checks here.  Focus on catching invalid
431168404Spjd     transformations involving integer constants.  */
432168404Spjd  if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
433168404Spjd      && GET_CODE (newval) == CONST_INT)
434168404Spjd    {
435168404Spjd      /* Sanity check that we're replacing oldval with a CONST_INT
436168404Spjd	 that is a valid sign-extension for the original mode.  */
437168404Spjd      if (INTVAL (newval) != trunc_int_for_mode (INTVAL (newval),
438168404Spjd						 GET_MODE (oldval)))
439168404Spjd	abort ();
440168404Spjd
441209962Smm      /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
442209962Smm	 CONST_INT is not valid, because after the replacement, the
443168404Spjd	 original mode would be gone.  Unfortunately, we can't tell
444219089Spjd	 when do_SUBST is called to replace the operand thereof, so we
445209962Smm	 perform this test on oldval instead, checking whether an
446168404Spjd	 invalid replacement took place before we got here.  */
447168404Spjd      if ((GET_CODE (oldval) == SUBREG
448168404Spjd	   && GET_CODE (SUBREG_REG (oldval)) == CONST_INT)
449168404Spjd	  || (GET_CODE (oldval) == ZERO_EXTEND
450168404Spjd	      && GET_CODE (XEXP (oldval, 0)) == CONST_INT))
451168404Spjd	abort ();
452168404Spjd     }
453168404Spjd
454168404Spjd  if (undobuf.frees)
455168404Spjd    buf = undobuf.frees, undobuf.frees = buf->next;
456168404Spjd  else
457168404Spjd    buf = (struct undo *) xmalloc (sizeof (struct undo));
458168404Spjd
459168404Spjd  buf->is_int = 0;
460168404Spjd  buf->where.r = into;
461168404Spjd  buf->old_contents.r = oldval;
462168404Spjd  *into = newval;
463168404Spjd
464168404Spjd  buf->next = undobuf.undos, undobuf.undos = buf;
465168404Spjd}
466168404Spjd
467219089Spjd#define SUBST(INTO, NEWVAL)	do_SUBST(&(INTO), (NEWVAL))
468219089Spjd
469219089Spjd/* Similar to SUBST, but NEWVAL is an int expression.  Note that substitution
470209962Smm   for the value of a HOST_WIDE_INT value (including CONST_INT) is
471209962Smm   not safe.  */
472209962Smm
473209962Smmstatic void
474209962Smmdo_SUBST_INT (into, newval)
475209962Smm     unsigned int *into, newval;
476209962Smm{
477209962Smm  struct undo *buf;
478209962Smm  unsigned int oldval = *into;
479209962Smm
480209962Smm  if (oldval == newval)
481209962Smm    return;
482209962Smm
483209962Smm  if (undobuf.frees)
484209962Smm    buf = undobuf.frees, undobuf.frees = buf->next;
485209962Smm  else
486209962Smm    buf = (struct undo *) xmalloc (sizeof (struct undo));
487209962Smm
488209962Smm  buf->is_int = 1;
489209962Smm  buf->where.i = into;
490209962Smm  buf->old_contents.i = oldval;
491209962Smm  *into = newval;
492209962Smm
493209962Smm  buf->next = undobuf.undos, undobuf.undos = buf;
494209962Smm}
495209962Smm
496209962Smm#define SUBST_INT(INTO, NEWVAL)  do_SUBST_INT(&(INTO), (NEWVAL))
497209962Smm
498209962Smm/* Main entry point for combiner.  F is the first insn of the function.
499168404Spjd   NREGS is the first unused pseudo-reg number.
500168404Spjd
501168404Spjd   Return non-zero if the combiner has turned an indirect jump
502228103Smm   instruction into a direct jump.  */
503209962Smmint
504209962Smmcombine_instructions (f, nregs)
505209962Smm     rtx f;
506209962Smm     unsigned int nregs;
507209962Smm{
508209962Smm  rtx insn, next;
509209962Smm#ifdef HAVE_cc0
510209962Smm  rtx prev;
511209962Smm#endif
512209962Smm  int i;
513209962Smm  rtx links, nextlinks;
514209962Smm
515209962Smm  int new_direct_jump_p = 0;
516209962Smm
517209962Smm  combine_attempts = 0;
518209962Smm  combine_merges = 0;
519228103Smm  combine_extras = 0;
520230438Spjd  combine_successes = 0;
521230438Spjd
522230438Spjd  combine_max_regno = nregs;
523230438Spjd
524230438Spjd  reg_nonzero_bits = ((unsigned HOST_WIDE_INT *)
525230438Spjd		      xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT)));
526230438Spjd  reg_sign_bit_copies
527230438Spjd    = (unsigned char *) xcalloc (nregs, sizeof (unsigned char));
528230438Spjd
529230438Spjd  reg_last_death = (rtx *) xmalloc (nregs * sizeof (rtx));
530230438Spjd  reg_last_set = (rtx *) xmalloc (nregs * sizeof (rtx));
531230438Spjd  reg_last_set_value = (rtx *) xmalloc (nregs * sizeof (rtx));
532230438Spjd  reg_last_set_table_tick = (int *) xmalloc (nregs * sizeof (int));
533230438Spjd  reg_last_set_label = (int *) xmalloc (nregs * sizeof (int));
534230438Spjd  reg_last_set_invalid = (char *) xmalloc (nregs * sizeof (char));
535230438Spjd  reg_last_set_mode
536228103Smm    = (enum machine_mode *) xmalloc (nregs * sizeof (enum machine_mode));
537228103Smm  reg_last_set_nonzero_bits
538228103Smm    = (unsigned HOST_WIDE_INT *) xmalloc (nregs * sizeof (HOST_WIDE_INT));
539228103Smm  reg_last_set_sign_bit_copies
540228103Smm    = (char *) xmalloc (nregs * sizeof (char));
541228103Smm
542228103Smm  init_reg_last_arrays ();
543228103Smm
544228103Smm  init_recog_no_volatile ();
545228103Smm
546228103Smm  /* Compute maximum uid value so uid_cuid can be allocated.  */
547228103Smm
548228103Smm  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
549228103Smm    if (INSN_UID (insn) > i)
550228103Smm      i = INSN_UID (insn);
551228103Smm
552228103Smm  uid_cuid = (int *) xmalloc ((i + 1) * sizeof (int));
553228103Smm  max_uid_cuid = i;
554228103Smm
555228103Smm  nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
556228103Smm
557228103Smm  /* Don't use reg_nonzero_bits when computing it.  This can cause problems
558228103Smm     when, for example, we have j <<= 1 in a loop.  */
559228103Smm
560228103Smm  nonzero_sign_valid = 0;
561228103Smm
562228103Smm  /* Compute the mapping from uids to cuids.
563228103Smm     Cuids are numbers assigned to insns, like uids,
564228103Smm     except that cuids increase monotonically through the code.
565228103Smm
566228103Smm     Scan all SETs and see if we can deduce anything about what
567228103Smm     bits are known to be zero for some registers and how many copies
568228103Smm     of the sign bit are known to exist for those registers.
569228103Smm
570228103Smm     Also set any known values so that we can use it while searching
571228103Smm     for what bits are known to be set.  */
572228103Smm
573228103Smm  label_tick = 1;
574228103Smm
575228103Smm  /* We need to initialize it here, because record_dead_and_set_regs may call
576228103Smm     get_last_value.  */
577228103Smm  subst_prev_insn = NULL_RTX;
578228103Smm
579228103Smm  setup_incoming_promotions ();
580228103Smm
581228103Smm  refresh_blocks = sbitmap_alloc (n_basic_blocks);
582168404Spjd  sbitmap_zero (refresh_blocks);
583168404Spjd  need_refresh = 0;
584168404Spjd
585168404Spjd  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
586168404Spjd    {
587168404Spjd      uid_cuid[INSN_UID (insn)] = ++i;
588168404Spjd      subst_low_cuid = i;
589168404Spjd      subst_insn = insn;
590168404Spjd
591168404Spjd      if (INSN_P (insn))
592168404Spjd	{
593168404Spjd	  note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
594168404Spjd		       NULL);
595168404Spjd	  record_dead_and_set_regs (insn);
596168404Spjd
597168404Spjd#ifdef AUTO_INC_DEC
598168404Spjd	  for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
599185029Spjd	    if (REG_NOTE_KIND (links) == REG_INC)
600168404Spjd	      set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
601168404Spjd						NULL);
602168404Spjd#endif
603168404Spjd	}
604168404Spjd
605168404Spjd      if (GET_CODE (insn) == CODE_LABEL)
606168404Spjd	label_tick++;
607168404Spjd    }
608168404Spjd
609168404Spjd  nonzero_sign_valid = 1;
610168404Spjd
611168404Spjd  /* Now scan all the insns in forward order.  */
612168404Spjd
613168404Spjd  this_basic_block = -1;
614168404Spjd  label_tick = 1;
615240870Spjd  last_call_cuid = 0;
616240870Spjd  mem_last_set = 0;
617240870Spjd  init_reg_last_arrays ();
618240870Spjd  setup_incoming_promotions ();
619240870Spjd
620240870Spjd  for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
621240870Spjd    {
622240870Spjd      next = 0;
623240870Spjd
624240870Spjd      /* If INSN starts a new basic block, update our basic block number.  */
625240870Spjd      if (this_basic_block + 1 < n_basic_blocks
626240870Spjd	  && BLOCK_HEAD (this_basic_block + 1) == insn)
627240870Spjd	this_basic_block++;
628240870Spjd
629240870Spjd      if (GET_CODE (insn) == CODE_LABEL)
630240870Spjd	label_tick++;
631168404Spjd
632168404Spjd      else if (INSN_P (insn))
633168404Spjd	{
634168404Spjd	  /* See if we know about function return values before this
635168404Spjd	     insn based upon SUBREG flags.  */
636168404Spjd	  check_promoted_subreg (insn, PATTERN (insn));
637168404Spjd
638168404Spjd	  /* Try this insn with each insn it links back to.  */
639168404Spjd
640168404Spjd	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
641168404Spjd	    if ((next = try_combine (insn, XEXP (links, 0),
642168404Spjd				     NULL_RTX, &new_direct_jump_p)) != 0)
643168404Spjd	      goto retry;
644168404Spjd
645168404Spjd	  /* Try each sequence of three linked insns ending with this one.  */
646168404Spjd
647168404Spjd	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
648168404Spjd	    {
649168404Spjd	      rtx link = XEXP (links, 0);
650219089Spjd
651168404Spjd	      /* If the linked insn has been replaced by a note, then there
652168404Spjd		 is no point in pursuing this chain any further.  */
653168404Spjd	      if (GET_CODE (link) == NOTE)
654209962Smm		continue;
655209962Smm
656209962Smm	      for (nextlinks = LOG_LINKS (link);
657209962Smm		   nextlinks;
658209962Smm		   nextlinks = XEXP (nextlinks, 1))
659209962Smm		if ((next = try_combine (insn, link,
660209962Smm					 XEXP (nextlinks, 0),
661209962Smm					 &new_direct_jump_p)) != 0)
662209962Smm		  goto retry;
663209962Smm	    }
664209962Smm
665209962Smm#ifdef HAVE_cc0
666209962Smm	  /* Try to combine a jump insn that uses CC0
667209962Smm	     with a preceding insn that sets CC0, and maybe with its
668209962Smm	     logical predecessor as well.
669209962Smm	     This is how we make decrement-and-branch insns.
670209962Smm	     We need this special code because data flow connections
671209962Smm	     via CC0 do not get entered in LOG_LINKS.  */
672209962Smm
673209962Smm	  if (GET_CODE (insn) == JUMP_INSN
674209962Smm	      && (prev = prev_nonnote_insn (insn)) != 0
675209962Smm	      && GET_CODE (prev) == INSN
676209962Smm	      && sets_cc0_p (PATTERN (prev)))
677209962Smm	    {
678209962Smm	      if ((next = try_combine (insn, prev,
679209962Smm				       NULL_RTX, &new_direct_jump_p)) != 0)
680209962Smm		goto retry;
681209962Smm
682209962Smm	      for (nextlinks = LOG_LINKS (prev); nextlinks;
683209962Smm		   nextlinks = XEXP (nextlinks, 1))
684209962Smm		if ((next = try_combine (insn, prev,
685209962Smm					 XEXP (nextlinks, 0),
686209962Smm					 &new_direct_jump_p)) != 0)
687209962Smm		  goto retry;
688209962Smm	    }
689209962Smm
690209962Smm	  /* Do the same for an insn that explicitly references CC0.  */
691209962Smm	  if (GET_CODE (insn) == INSN
692209962Smm	      && (prev = prev_nonnote_insn (insn)) != 0
693209962Smm	      && GET_CODE (prev) == INSN
694209962Smm	      && sets_cc0_p (PATTERN (prev))
695209962Smm	      && GET_CODE (PATTERN (insn)) == SET
696209962Smm	      && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
697209962Smm	    {
698209962Smm	      if ((next = try_combine (insn, prev,
699209962Smm				       NULL_RTX, &new_direct_jump_p)) != 0)
700209962Smm		goto retry;
701209962Smm
702209962Smm	      for (nextlinks = LOG_LINKS (prev); nextlinks;
703209962Smm		   nextlinks = XEXP (nextlinks, 1))
704209962Smm		if ((next = try_combine (insn, prev,
705209962Smm					 XEXP (nextlinks, 0),
706209962Smm					 &new_direct_jump_p)) != 0)
707209962Smm		  goto retry;
708209962Smm	    }
709209962Smm
710209962Smm	  /* Finally, see if any of the insns that this insn links to
711209962Smm	     explicitly references CC0.  If so, try this insn, that insn,
712209962Smm	     and its predecessor if it sets CC0.  */
713209962Smm	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
714209962Smm	    if (GET_CODE (XEXP (links, 0)) == INSN
715209962Smm		&& GET_CODE (PATTERN (XEXP (links, 0))) == SET
716209962Smm		&& reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
717209962Smm		&& (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
718209962Smm		&& GET_CODE (prev) == INSN
719209962Smm		&& sets_cc0_p (PATTERN (prev))
720209962Smm		&& (next = try_combine (insn, XEXP (links, 0),
721209962Smm					prev, &new_direct_jump_p)) != 0)
722209962Smm	      goto retry;
723185029Spjd#endif
724209962Smm
725209962Smm	  /* Try combining an insn with two different insns whose results it
726209962Smm	     uses.  */
727209962Smm	  for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
728209962Smm	    for (nextlinks = XEXP (links, 1); nextlinks;
729209962Smm		 nextlinks = XEXP (nextlinks, 1))
730209962Smm	      if ((next = try_combine (insn, XEXP (links, 0),
731209962Smm				       XEXP (nextlinks, 0),
732209962Smm				       &new_direct_jump_p)) != 0)
733209962Smm		goto retry;
734209962Smm
735209962Smm	  if (GET_CODE (insn) != NOTE)
736209962Smm	    record_dead_and_set_regs (insn);
737209962Smm
738209962Smm	retry:
739209962Smm	  ;
740209962Smm	}
741209962Smm    }
742209962Smm
743209962Smm  delete_noop_moves (f);
744209962Smm
745209962Smm  if (need_refresh)
746209962Smm    {
747209962Smm      update_life_info (refresh_blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
748209962Smm			PROP_DEATH_NOTES);
749209962Smm    }
750209962Smm
751209962Smm  /* Clean up.  */
752209962Smm  sbitmap_free (refresh_blocks);
753209962Smm  free (reg_nonzero_bits);
754209962Smm  free (reg_sign_bit_copies);
755209962Smm  free (reg_last_death);
756209962Smm  free (reg_last_set);
757209962Smm  free (reg_last_set_value);
758209962Smm  free (reg_last_set_table_tick);
759209962Smm  free (reg_last_set_label);
760209962Smm  free (reg_last_set_invalid);
761209962Smm  free (reg_last_set_mode);
762209962Smm  free (reg_last_set_nonzero_bits);
763209962Smm  free (reg_last_set_sign_bit_copies);
764209962Smm  free (uid_cuid);
765209962Smm
766209962Smm  {
767209962Smm    struct undo *undo, *next;
768209962Smm    for (undo = undobuf.frees; undo; undo = next)
769209962Smm      {
770209962Smm	next = undo->next;
771209962Smm	free (undo);
772209962Smm      }
773209962Smm    undobuf.frees = 0;
774209962Smm  }
775209962Smm
776209962Smm  total_attempts += combine_attempts;
777209962Smm  total_merges += combine_merges;
778209962Smm  total_extras += combine_extras;
779209962Smm  total_successes += combine_successes;
780209962Smm
781209962Smm  nonzero_sign_valid = 0;
782209962Smm
783209962Smm  /* Make recognizer allow volatile MEMs again.  */
784209962Smm  init_recog ();
785209962Smm
786209962Smm  return new_direct_jump_p;
787209962Smm}
788209962Smm
789209962Smm/* Wipe the reg_last_xxx arrays in preparation for another pass.  */
790185029Spjd
791168404Spjdstatic void
792185029Spjdinit_reg_last_arrays ()
793168404Spjd{
794185029Spjd  unsigned int nregs = combine_max_regno;
795168404Spjd
796168404Spjd  memset ((char *) reg_last_death, 0, nregs * sizeof (rtx));
797185029Spjd  memset ((char *) reg_last_set, 0, nregs * sizeof (rtx));
798185029Spjd  memset ((char *) reg_last_set_value, 0, nregs * sizeof (rtx));
799168404Spjd  memset ((char *) reg_last_set_table_tick, 0, nregs * sizeof (int));
800168404Spjd  memset ((char *) reg_last_set_label, 0, nregs * sizeof (int));
801168404Spjd  memset (reg_last_set_invalid, 0, nregs * sizeof (char));
802168404Spjd  memset ((char *) reg_last_set_mode, 0, nregs * sizeof (enum machine_mode));
803185029Spjd  memset ((char *) reg_last_set_nonzero_bits, 0, nregs * sizeof (HOST_WIDE_INT));
804168404Spjd  memset (reg_last_set_sign_bit_copies, 0, nregs * sizeof (char));
805168404Spjd}
806185029Spjd
807168404Spjd/* Set up any promoted values for incoming argument registers.  */
808185029Spjd
809168404Spjdstatic void
810185029Spjdsetup_incoming_promotions ()
811168404Spjd{
812168404Spjd#ifdef PROMOTE_FUNCTION_ARGS
813185029Spjd  unsigned int regno;
814185029Spjd  rtx reg;
815185029Spjd  enum machine_mode mode;
816185029Spjd  int unsignedp;
817168404Spjd  rtx first = get_insns ();
818168404Spjd
819168404Spjd#ifndef OUTGOING_REGNO
820168404Spjd#define OUTGOING_REGNO(N) N
821168404Spjd#endif
822168404Spjd  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
823168404Spjd    /* Check whether this register can hold an incoming pointer
824168404Spjd       argument.  FUNCTION_ARG_REGNO_P tests outgoing register
825168404Spjd       numbers, so translate if necessary due to register windows.  */
826168404Spjd    if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
827185029Spjd	&& (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
828185029Spjd      {
829168404Spjd	record_value_for_reg
830168404Spjd	  (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
831168404Spjd				       : SIGN_EXTEND),
832168404Spjd				      GET_MODE (reg),
833185029Spjd				      gen_rtx_CLOBBER (mode, const0_rtx)));
834168404Spjd      }
835185029Spjd#endif
836185029Spjd}
837168404Spjd
838168404Spjd/* Called via note_stores.  If X is a pseudo that is narrower than
839168404Spjd   HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
840168404Spjd
841168404Spjd   If we are setting only a portion of X and we can't figure out what
842168404Spjd   portion, assume all bits will be used since we don't know what will
843209962Smm   be happening.
844209962Smm
845209962Smm   Similarly, set how many bits of X are known to be copies of the sign bit
846209962Smm   at all locations in the function.  This is the smallest number implied
847168404Spjd   by any set of X.  */
848168404Spjd
849185029Spjdstatic void
850168404Spjdset_nonzero_bits_and_sign_copies (x, set, data)
851209962Smm     rtx x;
852209962Smm     rtx set;
853185029Spjd     void *data ATTRIBUTE_UNUSED;
854209962Smm{
855185029Spjd  unsigned int num;
856185029Spjd
857185029Spjd  if (GET_CODE (x) == REG
858185029Spjd      && REGNO (x) >= FIRST_PSEUDO_REGISTER
859185029Spjd      /* If this register is undefined at the start of the file, we can't
860185029Spjd	 say what its contents were.  */
861185029Spjd      && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, REGNO (x))
862168404Spjd      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
863168404Spjd    {
864185029Spjd      if (set == 0 || GET_CODE (set) == CLOBBER)
865185029Spjd	{
866185029Spjd	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
867185029Spjd	  reg_sign_bit_copies[REGNO (x)] = 1;
868185029Spjd	  return;
869185029Spjd	}
870185029Spjd
871185029Spjd      /* If this is a complex assignment, see if we can convert it into a
872168404Spjd	 simple assignment.  */
873168404Spjd      set = expand_field_assignment (set);
874168404Spjd
875168404Spjd      /* If this is a simple assignment, or we have a paradoxical SUBREG,
876168404Spjd	 set what we know about X.  */
877168404Spjd
878168404Spjd      if (SET_DEST (set) == x
879168404Spjd	  || (GET_CODE (SET_DEST (set)) == SUBREG
880209962Smm	      && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
881209962Smm		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
882209962Smm	      && SUBREG_REG (SET_DEST (set)) == x))
883209962Smm	{
884185029Spjd	  rtx src = SET_SRC (set);
885185029Spjd
886185029Spjd#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
887185029Spjd	  /* If X is narrower than a word and SRC is a non-negative
888185029Spjd	     constant that would appear negative in the mode of X,
889185029Spjd	     sign-extend it for use in reg_nonzero_bits because some
890168404Spjd	     machines (maybe most) will actually do the sign-extension
891209962Smm	     and this is the conservative approach.
892209962Smm
893209962Smm	     ??? For 2.5, try to tighten up the MD files in this regard
894209962Smm	     instead of this kludge.  */
895209962Smm
896209962Smm	  if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
897209962Smm	      && GET_CODE (src) == CONST_INT
898209962Smm	      && INTVAL (src) > 0
899209962Smm	      && 0 != (INTVAL (src)
900209962Smm		       & ((HOST_WIDE_INT) 1
901209962Smm			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
902209962Smm	    src = GEN_INT (INTVAL (src)
903209962Smm			   | ((HOST_WIDE_INT) (-1)
904209962Smm			      << GET_MODE_BITSIZE (GET_MODE (x))));
905209962Smm#endif
906209962Smm
907209962Smm	  /* Don't call nonzero_bits if it cannot change anything.  */
908209962Smm	  if (reg_nonzero_bits[REGNO (x)] != ~(unsigned HOST_WIDE_INT) 0)
909209962Smm	    reg_nonzero_bits[REGNO (x)]
910209962Smm	      |= nonzero_bits (src, nonzero_bits_mode);
911209962Smm	  num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
912209962Smm	  if (reg_sign_bit_copies[REGNO (x)] == 0
913209962Smm	      || reg_sign_bit_copies[REGNO (x)] > num)
914209962Smm	    reg_sign_bit_copies[REGNO (x)] = num;
915209962Smm	}
916209962Smm      else
917209962Smm	{
918209962Smm	  reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
919209962Smm	  reg_sign_bit_copies[REGNO (x)] = 1;
920209962Smm	}
921209962Smm    }
922209962Smm}
923209962Smm
924209962Smm/* See if INSN can be combined into I3.  PRED and SUCC are optionally
925209962Smm   insns that were previously combined into I3 or that will be combined
926209962Smm   into the merger of INSN and I3.
927209962Smm
928209962Smm   Return 0 if the combination is not allowed for any reason.
929209962Smm
930209962Smm   If the combination is allowed, *PDEST will be set to the single
931209962Smm   destination of INSN and *PSRC to the single source, and this function
932209962Smm   will return 1.  */
933209962Smm
934209962Smmstatic int
935209962Smmcan_combine_p (insn, i3, pred, succ, pdest, psrc)
936209962Smm     rtx insn;
937209962Smm     rtx i3;
938209962Smm     rtx pred ATTRIBUTE_UNUSED;
939209962Smm     rtx succ;
940209962Smm     rtx *pdest, *psrc;
941209962Smm{
942209962Smm  int i;
943209962Smm  rtx set = 0, src, dest;
944219089Spjd  rtx p;
945219089Spjd#ifdef AUTO_INC_DEC
946219089Spjd  rtx link;
947219089Spjd#endif
948219089Spjd  int all_adjacent = (succ ? (next_active_insn (insn) == succ
949209962Smm			      && next_active_insn (succ) == i3)
950219089Spjd		      : next_active_insn (insn) == i3);
951219089Spjd
952209962Smm  /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
953209962Smm     or a PARALLEL consisting of such a SET and CLOBBERs.
954209962Smm
955209962Smm     If INSN has CLOBBER parallel parts, ignore them for our processing.
956209962Smm     By definition, these happen during the execution of the insn.  When it
957209962Smm     is merged with another insn, all bets are off.  If they are, in fact,
958209962Smm     needed and aren't also supplied in I3, they may be added by
959209962Smm     recog_for_combine.  Otherwise, it won't match.
960209962Smm
961228103Smm     We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
962228103Smm     note.
963228103Smm
964228103Smm     Get the source and destination of INSN.  If more than one, can't
965228103Smm     combine.  */
966228103Smm
967209962Smm  if (GET_CODE (PATTERN (insn)) == SET)
968209962Smm    set = PATTERN (insn);
969209962Smm  else if (GET_CODE (PATTERN (insn)) == PARALLEL
970209962Smm	   && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
971209962Smm    {
972209962Smm      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
973209962Smm	{
974209962Smm	  rtx elt = XVECEXP (PATTERN (insn), 0, i);
975209962Smm
976168404Spjd	  switch (GET_CODE (elt))
977168404Spjd	    {
978168404Spjd	    /* This is important to combine floating point insns
979168404Spjd	       for the SH4 port.  */
980168404Spjd	    case USE:
981168404Spjd	      /* Combining an isolated USE doesn't make sense.
982168404Spjd		 We depend here on combinable_i3pat to reject them.  */
983168404Spjd	      /* The code below this loop only verifies that the inputs of
984168404Spjd		 the SET in INSN do not change.  We call reg_set_between_p
985185029Spjd		 to verify that the REG in the USE does not change between
986168404Spjd		 I3 and INSN.
987168404Spjd		 If the USE in INSN was for a pseudo register, the matching
988168404Spjd		 insn pattern will likely match any register; combining this
989168404Spjd		 with any other USE would only be safe if we knew that the
990168404Spjd		 used registers have identical values, or if there was
991168404Spjd		 something to tell them apart, e.g. different modes.  For
992168404Spjd		 now, we forgo such complicated tests and simply disallow
993185029Spjd		 combining of USES of pseudo registers with any other USE.  */
994185029Spjd	      if (GET_CODE (XEXP (elt, 0)) == REG
995185029Spjd		  && GET_CODE (PATTERN (i3)) == PARALLEL)
996185029Spjd		{
997168404Spjd		  rtx i3pat = PATTERN (i3);
998185029Spjd		  int i = XVECLEN (i3pat, 0) - 1;
999168404Spjd		  unsigned int regno = REGNO (XEXP (elt, 0));
1000185029Spjd
1001185029Spjd		  do
1002185029Spjd		    {
1003185029Spjd		      rtx i3elt = XVECEXP (i3pat, 0, i);
1004168404Spjd
1005185029Spjd		      if (GET_CODE (i3elt) == USE
1006185029Spjd			  && GET_CODE (XEXP (i3elt, 0)) == REG
1007185029Spjd			  && (REGNO (XEXP (i3elt, 0)) == regno
1008185029Spjd			      ? reg_set_between_p (XEXP (elt, 0),
1009168404Spjd						   PREV_INSN (insn), i3)
1010185029Spjd			      : regno >= FIRST_PSEUDO_REGISTER))
1011185029Spjd			return 0;
1012168404Spjd		    }
1013168404Spjd		  while (--i >= 0);
1014168404Spjd		}
1015168404Spjd	      break;
1016168404Spjd
1017168404Spjd	      /* We can ignore CLOBBERs.  */
1018168404Spjd	    case CLOBBER:
1019168404Spjd	      break;
1020168404Spjd
1021168404Spjd	    case SET:
1022168404Spjd	      /* Ignore SETs whose result isn't used but not those that
1023168404Spjd		 have side-effects.  */
1024168404Spjd	      if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1025168404Spjd		  && ! side_effects_p (elt))
1026168404Spjd		break;
1027168404Spjd
1028168404Spjd	      /* If we have already found a SET, this is a second one and
1029168404Spjd		 so we cannot combine with this insn.  */
1030168404Spjd	      if (set)
1031168404Spjd		return 0;
1032168404Spjd
1033219089Spjd	      set = elt;
1034219089Spjd	      break;
1035219089Spjd
1036219089Spjd	    default:
1037219089Spjd	      /* Anything else means we can't combine.  */
1038219089Spjd	      return 0;
1039219089Spjd	    }
1040219089Spjd	}
1041219089Spjd
1042219089Spjd      if (set == 0
1043219089Spjd	  /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1044219089Spjd	     so don't do anything with it.  */
1045219089Spjd	  || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1046219089Spjd	return 0;
1047219089Spjd    }
1048219089Spjd  else
1049219089Spjd    return 0;
1050219089Spjd
1051219089Spjd  if (set == 0)
1052219089Spjd    return 0;
1053168404Spjd
1054168404Spjd  set = expand_field_assignment (set);
1055219089Spjd  src = SET_SRC (set), dest = SET_DEST (set);
1056219089Spjd
1057219089Spjd  /* Don't eliminate a store in the stack pointer.  */
1058219089Spjd  if (dest == stack_pointer_rtx
1059219089Spjd      /* If we couldn't eliminate a field assignment, we can't combine.  */
1060219089Spjd      || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
1061219089Spjd      /* Don't combine with an insn that sets a register to itself if it has
1062219089Spjd	 a REG_EQUAL note.  This may be part of a REG_NO_CONFLICT sequence.  */
1063219089Spjd      || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1064219089Spjd      /* Can't merge an ASM_OPERANDS.  */
1065219089Spjd      || GET_CODE (src) == ASM_OPERANDS
1066219089Spjd      /* Can't merge a function call.  */
1067219089Spjd      || GET_CODE (src) == CALL
1068219089Spjd      /* Don't eliminate a function call argument.  */
1069219089Spjd      || (GET_CODE (i3) == CALL_INSN
1070219089Spjd	  && (find_reg_fusage (i3, USE, dest)
1071219089Spjd	      || (GET_CODE (dest) == REG
1072219089Spjd		  && REGNO (dest) < FIRST_PSEUDO_REGISTER
1073219089Spjd		  && global_regs[REGNO (dest)])))
1074219089Spjd      /* Don't substitute into an incremented register.  */
1075219089Spjd      || FIND_REG_INC_NOTE (i3, dest)
1076219089Spjd      || (succ && FIND_REG_INC_NOTE (succ, dest))
1077168404Spjd#if 0
1078168404Spjd      /* Don't combine the end of a libcall into anything.  */
1079219089Spjd      /* ??? This gives worse code, and appears to be unnecessary, since no
1080219089Spjd	 pass after flow uses REG_LIBCALL/REG_RETVAL notes.  Local-alloc does
1081219089Spjd	 use REG_RETVAL notes for noconflict blocks, but other code here
1082219089Spjd	 makes sure that those insns don't disappear.  */
1083219089Spjd      || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1084219089Spjd#endif
1085219089Spjd      /* Make sure that DEST is not used after SUCC but before I3.  */
1086219089Spjd      || (succ && ! all_adjacent
1087219089Spjd	  && reg_used_between_p (dest, succ, i3))
1088219089Spjd      /* Make sure that the value that is to be substituted for the register
1089219089Spjd	 does not use any registers whose values alter in between.  However,
1090219089Spjd	 If the insns are adjacent, a use can't cross a set even though we
1091219089Spjd	 think it might (this can happen for a sequence of insns each setting
1092219089Spjd	 the same destination; reg_last_set of that register might point to
1093168404Spjd	 a NOTE).  If INSN has a REG_EQUIV note, the register is always
1094185029Spjd	 equivalent to the memory so the substitution is valid even if there
1095185029Spjd	 are intervening stores.  Also, don't move a volatile asm or
1096185029Spjd	 UNSPEC_VOLATILE across any other insns.  */
1097168404Spjd      || (! all_adjacent
1098168404Spjd	  && (((GET_CODE (src) != MEM
1099168404Spjd		|| ! find_reg_note (insn, REG_EQUIV, src))
1100168404Spjd	       && use_crosses_set_p (src, INSN_CUID (insn)))
1101185029Spjd	      || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1102185029Spjd	      || GET_CODE (src) == UNSPEC_VOLATILE))
1103185029Spjd      /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1104185029Spjd	 better register allocation by not doing the combine.  */
1105185029Spjd      || find_reg_note (i3, REG_NO_CONFLICT, dest)
1106185029Spjd      || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1107185029Spjd      /* Don't combine across a CALL_INSN, because that would possibly
1108185029Spjd	 change whether the life span of some REGs crosses calls or not,
1109185029Spjd	 and it is a pain to update that information.
1110185029Spjd	 Exception: if source is a constant, moving it later can't hurt.
1111185029Spjd	 Accept that special case, because it helps -fforce-addr a lot.  */
1112185029Spjd      || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1113185029Spjd    return 0;
1114185029Spjd
1115185029Spjd  /* DEST must either be a REG or CC0.  */
1116168404Spjd  if (GET_CODE (dest) == REG)
1117168404Spjd    {
1118168404Spjd      /* If register alignment is being enforced for multi-word items in all
1119185029Spjd	 cases except for parameters, it is possible to have a register copy
1120185029Spjd	 insn referencing a hard register that is not allowed to contain the
1121168404Spjd	 mode being copied and which would not be valid as an operand of most
1122168404Spjd	 insns.  Eliminate this problem by not combining with such an insn.
1123185029Spjd
1124168404Spjd	 Also, on some machines we don't want to extend the life of a hard
1125168404Spjd	 register.  */
1126185029Spjd
1127185029Spjd      if (GET_CODE (src) == REG
1128185029Spjd	  && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1129168404Spjd	       && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1130168404Spjd	      /* Don't extend the life of a hard register unless it is
1131168404Spjd		 user variable (if we have few registers) or it can't
1132168404Spjd		 fit into the desired register (meaning something special
1133168404Spjd		 is going on).
1134168404Spjd		 Also avoid substituting a return register into I3, because
1135185029Spjd		 reload can't handle a conflict with constraints of other
1136168404Spjd		 inputs.  */
1137168404Spjd	      || (REGNO (src) < FIRST_PSEUDO_REGISTER
1138168404Spjd		  && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1139185029Spjd	return 0;
1140168404Spjd    }
1141168404Spjd  else if (GET_CODE (dest) != CC0)
1142168404Spjd    return 0;
1143168404Spjd
1144168404Spjd  /* Don't substitute for a register intended as a clobberable operand.
1145168404Spjd     Similarly, don't substitute an expression containing a register that
1146168404Spjd     will be clobbered in I3.  */
1147168404Spjd  if (GET_CODE (PATTERN (i3)) == PARALLEL)
1148168404Spjd    for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1149168404Spjd      if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
1150185029Spjd	  && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1151185029Spjd				       src)
1152168404Spjd	      || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
1153168404Spjd	return 0;
1154168404Spjd
1155168404Spjd  /* If INSN contains anything volatile, or is an `asm' (whether volatile
1156168404Spjd     or not), reject, unless nothing volatile comes between it and I3 */
1157168404Spjd
1158168404Spjd  if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1159168404Spjd    {
1160168404Spjd      /* Make sure succ doesn't contain a volatile reference.  */
1161168404Spjd      if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1162209962Smm        return 0;
1163168404Spjd
1164168404Spjd      for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1165168404Spjd        if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1166168404Spjd	  return 0;
1167168404Spjd    }
1168168404Spjd
1169168404Spjd  /* If INSN is an asm, and DEST is a hard register, reject, since it has
1170168404Spjd     to be an explicit register variable, and was chosen for a reason.  */
1171168404Spjd
1172185029Spjd  if (GET_CODE (src) == ASM_OPERANDS
1173185029Spjd      && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1174185029Spjd    return 0;
1175168404Spjd
1176185029Spjd  /* If there are any volatile insns between INSN and I3, reject, because
1177185029Spjd     they might affect machine state.  */
1178185029Spjd
1179185029Spjd  for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1180185029Spjd    if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1181168404Spjd      return 0;
1182185029Spjd
1183185029Spjd  /* If INSN or I2 contains an autoincrement or autodecrement,
1184185029Spjd     make sure that register is not used between there and I3,
1185185029Spjd     and not already used in I3 either.
1186185029Spjd     Also insist that I3 not be a jump; if it were one
1187185029Spjd     and the incremented register were spilled, we would lose.  */
1188185029Spjd
1189185029Spjd#ifdef AUTO_INC_DEC
1190185029Spjd  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1191185029Spjd    if (REG_NOTE_KIND (link) == REG_INC
1192185029Spjd	&& (GET_CODE (i3) == JUMP_INSN
1193185029Spjd	    || reg_used_between_p (XEXP (link, 0), insn, i3)
1194185029Spjd	    || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1195185029Spjd      return 0;
1196185029Spjd#endif
1197185029Spjd
1198185029Spjd#ifdef HAVE_cc0
1199185029Spjd  /* Don't combine an insn that follows a CC0-setting insn.
1200185029Spjd     An insn that uses CC0 must not be separated from the one that sets it.
1201185029Spjd     We do, however, allow I2 to follow a CC0-setting insn if that insn
1202185029Spjd     is passed as I1; in that case it will be deleted also.
1203185029Spjd     We also allow combining in this case if all the insns are adjacent
1204185029Spjd     because that would leave the two CC0 insns adjacent as well.
1205185029Spjd     It would be more logical to test whether CC0 occurs inside I1 or I2,
1206185029Spjd     but that would be much slower, and this ought to be equivalent.  */
1207185029Spjd
1208185029Spjd  p = prev_nonnote_insn (insn);
1209185029Spjd  if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1210185029Spjd      && ! all_adjacent)
1211185029Spjd    return 0;
1212185029Spjd#endif
1213185029Spjd
1214185029Spjd  /* If we get here, we have passed all the tests and the combination is
1215185029Spjd     to be allowed.  */
1216185029Spjd
1217185029Spjd  *pdest = dest;
1218185029Spjd  *psrc = src;
1219185029Spjd
1220185029Spjd  return 1;
1221185029Spjd}
1222185029Spjd
1223185029Spjd/* Check if PAT is an insn - or a part of it - used to set up an
1224185029Spjd   argument for a function in a hard register.  */
1225185029Spjd
1226185029Spjdstatic int
1227185029Spjdsets_function_arg_p (pat)
1228185029Spjd     rtx pat;
1229185029Spjd{
1230168404Spjd  int i;
1231185029Spjd  rtx inner_dest;
1232168404Spjd
1233185029Spjd  switch (GET_CODE (pat))
1234185029Spjd    {
1235185029Spjd    case INSN:
1236185029Spjd      return sets_function_arg_p (PATTERN (pat));
1237185029Spjd
1238185029Spjd    case PARALLEL:
1239168404Spjd      for (i = XVECLEN (pat, 0); --i >= 0;)
1240168404Spjd	if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1241168404Spjd	  return 1;
1242168404Spjd
1243168404Spjd      break;
1244168404Spjd
1245168404Spjd    case SET:
1246168404Spjd      inner_dest = SET_DEST (pat);
1247168404Spjd      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1248168404Spjd	     || GET_CODE (inner_dest) == SUBREG
1249168404Spjd	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
1250168404Spjd	inner_dest = XEXP (inner_dest, 0);
1251168404Spjd
1252168404Spjd      return (GET_CODE (inner_dest) == REG
1253168404Spjd	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1254185029Spjd	      && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1255168404Spjd
1256168404Spjd    default:
1257168404Spjd      break;
1258168404Spjd    }
1259168404Spjd
1260168404Spjd  return 0;
1261168404Spjd}
1262168404Spjd
1263168404Spjd/* LOC is the location within I3 that contains its pattern or the component
1264168404Spjd   of a PARALLEL of the pattern.  We validate that it is valid for combining.
1265168404Spjd
1266168404Spjd   One problem is if I3 modifies its output, as opposed to replacing it
1267168404Spjd   entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1268168404Spjd   so would produce an insn that is not equivalent to the original insns.
1269168404Spjd
1270168404Spjd   Consider:
1271168404Spjd
1272168404Spjd         (set (reg:DI 101) (reg:DI 100))
1273168404Spjd	 (set (subreg:SI (reg:DI 101) 0) <foo>)
1274168404Spjd
1275168404Spjd   This is NOT equivalent to:
1276168404Spjd
1277168404Spjd         (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1278168404Spjd		    (set (reg:DI 101) (reg:DI 100))])
1279168404Spjd
1280168404Spjd   Not only does this modify 100 (in which case it might still be valid
1281168404Spjd   if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1282168404Spjd
1283168404Spjd   We can also run into a problem if I2 sets a register that I1
1284168404Spjd   uses and I1 gets directly substituted into I3 (not via I2).  In that
1285168404Spjd   case, we would be getting the wrong value of I2DEST into I3, so we
1286168404Spjd   must reject the combination.  This case occurs when I2 and I1 both
1287168404Spjd   feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1288168404Spjd   If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1289168404Spjd   of a SET must prevent combination from occurring.
1290168404Spjd
1291168404Spjd   Before doing the above check, we first try to expand a field assignment
1292185029Spjd   into a set of logical operations.
1293185029Spjd
1294185029Spjd   If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1295185029Spjd   we place a register that is both set and used within I3.  If more than one
1296185029Spjd   such register is detected, we fail.
1297185029Spjd
1298185029Spjd   Return 1 if the combination is valid, zero otherwise.  */
1299185029Spjd
1300185029Spjdstatic int
1301185029Spjdcombinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1302185029Spjd     rtx i3;
1303185029Spjd     rtx *loc;
1304185029Spjd     rtx i2dest;
1305185029Spjd     rtx i1dest;
1306185029Spjd     int i1_not_in_src;
1307185029Spjd     rtx *pi3dest_killed;
1308185029Spjd{
1309185029Spjd  rtx x = *loc;
1310185029Spjd
1311219089Spjd  if (GET_CODE (x) == SET)
1312185029Spjd    {
1313219089Spjd      rtx set = expand_field_assignment (x);
1314219089Spjd      rtx dest = SET_DEST (set);
1315219089Spjd      rtx src = SET_SRC (set);
1316219089Spjd      rtx inner_dest = dest;
1317219089Spjd
1318219089Spjd#if 0
1319219089Spjd      rtx inner_src = src;
1320219089Spjd#endif
1321219089Spjd
1322219089Spjd      SUBST (*loc, set);
1323219089Spjd
1324219089Spjd      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1325219089Spjd	     || GET_CODE (inner_dest) == SUBREG
1326219089Spjd	     || GET_CODE (inner_dest) == ZERO_EXTRACT)
1327185029Spjd	inner_dest = XEXP (inner_dest, 0);
1328168404Spjd
1329168404Spjd  /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1330168404Spjd     was added.  */
1331219089Spjd#if 0
1332219089Spjd      while (GET_CODE (inner_src) == STRICT_LOW_PART
1333219089Spjd	     || GET_CODE (inner_src) == SUBREG
1334219089Spjd	     || GET_CODE (inner_src) == ZERO_EXTRACT)
1335219089Spjd	inner_src = XEXP (inner_src, 0);
1336219089Spjd
1337219089Spjd      /* If it is better that two different modes keep two different pseudos,
1338219089Spjd	 avoid combining them.  This avoids producing the following pattern
1339219089Spjd	 on a 386:
1340219089Spjd	  (set (subreg:SI (reg/v:QI 21) 0)
1341219089Spjd	       (lshiftrt:SI (reg/v:SI 20)
1342219089Spjd	           (const_int 24)))
1343219089Spjd	 If that were made, reload could not handle the pair of
1344219089Spjd	 reg 20/21, since it would try to get any GENERAL_REGS
1345219089Spjd	 but some of them don't handle QImode.  */
1346219089Spjd
1347219089Spjd      if (rtx_equal_p (inner_src, i2dest)
1348219089Spjd	  && GET_CODE (inner_dest) == REG
1349219089Spjd	  && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1350219089Spjd	return 0;
1351219089Spjd#endif
1352168404Spjd
1353219089Spjd      /* Check for the case where I3 modifies its output, as
1354219089Spjd	 discussed above.  */
1355219089Spjd      if ((inner_dest != dest
1356219089Spjd	   && (reg_overlap_mentioned_p (i2dest, inner_dest)
1357219089Spjd	       || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1358185029Spjd
1359219089Spjd	  /* This is the same test done in can_combine_p except we can't test
1360219089Spjd	     all_adjacent; we don't have to, since this instruction will stay
1361219089Spjd	     in place, thus we are not considering increasing the lifetime of
1362219089Spjd	     INNER_DEST.
1363219089Spjd
1364219089Spjd	     Also, if this insn sets a function argument, combining it with
1365219089Spjd	     something that might need a spill could clobber a previous
1366219089Spjd	     function argument; the all_adjacent test in can_combine_p also
1367219089Spjd	     checks this; here, we do a more specific test for this case.  */
1368219089Spjd
1369219089Spjd	  || (GET_CODE (inner_dest) == REG
1370219089Spjd	      && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1371219089Spjd	      && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1372219089Spjd					GET_MODE (inner_dest))))
1373219089Spjd	  || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1374219089Spjd	return 0;
1375219089Spjd
1376219089Spjd      /* If DEST is used in I3, it is being killed in this insn,
1377219089Spjd	 so record that for later.
1378219089Spjd	 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1379219089Spjd	 STACK_POINTER_REGNUM, since these are always considered to be
1380219089Spjd	 live.  Similarly for ARG_POINTER_REGNUM if it is fixed.  */
1381219089Spjd      if (pi3dest_killed && GET_CODE (dest) == REG
1382219089Spjd	  && reg_referenced_p (dest, PATTERN (i3))
1383219089Spjd	  && REGNO (dest) != FRAME_POINTER_REGNUM
1384168404Spjd#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1385219089Spjd	  && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1386219089Spjd#endif
1387219089Spjd#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1388219089Spjd	  && (REGNO (dest) != ARG_POINTER_REGNUM
1389219089Spjd	      || ! fixed_regs [REGNO (dest)])
1390219089Spjd#endif
1391219089Spjd	  && REGNO (dest) != STACK_POINTER_REGNUM)
1392219089Spjd	{
1393219089Spjd	  if (*pi3dest_killed)
1394219089Spjd	    return 0;
1395219089Spjd
1396219089Spjd	  *pi3dest_killed = dest;
1397219089Spjd	}
1398219089Spjd    }
1399219089Spjd
1400219089Spjd  else if (GET_CODE (x) == PARALLEL)
1401219089Spjd    {
1402219089Spjd      int i;
1403219089Spjd
1404219089Spjd      for (i = 0; i < XVECLEN (x, 0); i++)
1405219089Spjd	if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1406219089Spjd				i1_not_in_src, pi3dest_killed))
1407219089Spjd	  return 0;
1408219089Spjd    }
1409219089Spjd
1410219089Spjd  return 1;
1411219089Spjd}
1412219089Spjd
1413219089Spjd/* Return 1 if X is an arithmetic expression that contains a multiplication
1414219089Spjd   and division.  We don't count multiplications by powers of two here.  */
1415219089Spjd
1416219089Spjdstatic int
1417219089Spjdcontains_muldiv (x)
1418219089Spjd     rtx x;
1419219089Spjd{
1420219089Spjd  switch (GET_CODE (x))
1421219089Spjd    {
1422219089Spjd    case MOD:  case DIV:  case UMOD:  case UDIV:
1423219089Spjd      return 1;
1424219089Spjd
1425219089Spjd    case MULT:
1426219089Spjd      return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1427219089Spjd		&& exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1428219089Spjd    default:
1429219089Spjd      switch (GET_RTX_CLASS (GET_CODE (x)))
1430219089Spjd	{
1431219089Spjd	case 'c':  case '<':  case '2':
1432219089Spjd	  return contains_muldiv (XEXP (x, 0))
1433219089Spjd	    || contains_muldiv (XEXP (x, 1));
1434168404Spjd
1435168404Spjd	case '1':
1436168404Spjd	  return contains_muldiv (XEXP (x, 0));
1437168404Spjd
1438168404Spjd	default:
1439168404Spjd	  return 0;
1440168404Spjd	}
1441168404Spjd    }
1442168404Spjd}
1443168404Spjd
1444168404Spjd/* Determine whether INSN can be used in a combination.  Return nonzero if
1445168404Spjd   not.  This is used in try_combine to detect early some cases where we
1446168404Spjd   can't perform combinations.  */
1447168404Spjd
1448168404Spjdstatic int
1449168404Spjdcant_combine_insn_p (insn)
1450241655Smm     rtx insn;
1451219089Spjd{
1452168404Spjd  rtx set;
1453168404Spjd  rtx src, dest;
1454168404Spjd
1455168404Spjd  /* If this isn't really an insn, we can't do anything.
1456168404Spjd     This can occur when flow deletes an insn that it has merged into an
1457168404Spjd     auto-increment address.  */
1458168404Spjd  if (! INSN_P (insn))
1459168404Spjd    return 1;
1460168404Spjd
1461168404Spjd  /* Never combine loads and stores involving hard regs.  The register
1462168404Spjd     allocator can usually handle such reg-reg moves by tying.  If we allow
1463185029Spjd     the combiner to make substitutions of hard regs, we risk aborting in
1464168404Spjd     reload on machines that have SMALL_REGISTER_CLASSES.
1465168404Spjd     As an exception, we allow combinations involving fixed regs; these are
1466185029Spjd     not available to the register allocator so there's no risk involved.  */
1467168404Spjd
1468168404Spjd  set = single_set (insn);
1469168404Spjd  if (! set)
1470168404Spjd    return 0;
1471168404Spjd  src = SET_SRC (set);
1472168404Spjd  dest = SET_DEST (set);
1473168404Spjd  if (GET_CODE (src) == SUBREG)
1474197867Strasz    src = SUBREG_REG (src);
1475168404Spjd  if (GET_CODE (dest) == SUBREG)
1476197867Strasz    dest = SUBREG_REG (dest);
1477197867Strasz  if (REG_P (src) && REG_P (dest)
1478197867Strasz      && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1479219089Spjd	   && ! fixed_regs[REGNO (src)])
1480168404Spjd	  || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1481168404Spjd	      && ! fixed_regs[REGNO (dest)])))
1482168404Spjd    return 1;
1483168404Spjd
1484168404Spjd  return 0;
1485168404Spjd}
1486219089Spjd
1487219089Spjd/* Try to combine the insns I1 and I2 into I3.
1488219089Spjd   Here I1 and I2 appear earlier than I3.
1489219089Spjd   I1 can be zero; then we combine just I2 into I3.
1490219089Spjd
1491185029Spjd   If we are combining three insns and the resulting insn is not recognized,
1492168404Spjd   try splitting it into two insns.  If that happens, I2 and I3 are retained
1493168404Spjd   and I1 is pseudo-deleted by turning it into a NOTE.  Otherwise, I1 and I2
1494168404Spjd   are pseudo-deleted.
1495168404Spjd
1496168404Spjd   Return 0 if the combination does not work.  Then nothing is changed.
1497168404Spjd   If we did the combination, return the insn at which combine should
1498168404Spjd   resume scanning.
1499168404Spjd
1500168404Spjd   Set NEW_DIRECT_JUMP_P to a non-zero value if try_combine creates a
1501168404Spjd   new direct jump instruction.  */
1502185029Spjd
1503238391Smmstatic rtx
1504238391Smmtry_combine (i3, i2, i1, new_direct_jump_p)
1505238391Smm     rtx i3, i2, i1;
1506185029Spjd     int *new_direct_jump_p;
1507238391Smm{
1508238391Smm  /* New patterns for I3 and I2, respectively.  */
1509238391Smm  rtx newpat, newi2pat = 0;
1510238391Smm  int substed_i2 = 0, substed_i1 = 0;
1511238391Smm  /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead.  */
1512238391Smm  int added_sets_1, added_sets_2;
1513238391Smm  /* Total number of SETs to put into I3.  */
1514185029Spjd  int total_sets;
1515185029Spjd  /* Nonzero is I2's body now appears in I3.  */
1516168404Spjd  int i2_is_used;
1517168404Spjd  /* INSN_CODEs for new I3, new I2, and user of condition code.  */
1518168404Spjd  int insn_code_number, i2_code_number = 0, other_code_number = 0;
1519168404Spjd  /* Contains I3 if the destination of I3 is used in its source, which means
1520168404Spjd     that the old life of I3 is being killed.  If that usage is placed into
1521168404Spjd     I2 and not in I3, a REG_DEAD note must be made.  */
1522168404Spjd  rtx i3dest_killed = 0;
1523185029Spjd  /* SET_DEST and SET_SRC of I2 and I1.  */
1524168404Spjd  rtx i2dest, i2src, i1dest = 0, i1src = 0;
1525168404Spjd  /* PATTERN (I2), or a copy of it in certain cases.  */
1526185029Spjd  rtx i2pat;
1527209962Smm  /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC.  */
1528168404Spjd  int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1529219089Spjd  int i1_feeds_i3 = 0;
1530219089Spjd  /* Notes that must be added to REG_NOTES in I3 and I2.  */
1531219089Spjd  rtx new_i3_notes, new_i2_notes;
1532219089Spjd  /* Notes that we substituted I3 into I2 instead of the normal case.  */
1533219089Spjd  int i3_subst_into_i2 = 0;
1534219089Spjd  /* Notes that I1, I2 or I3 is a MULT operation.  */
1535219089Spjd  int have_mult = 0;
1536219089Spjd
1537219089Spjd  int maxreg;
1538219089Spjd  rtx temp;
1539219089Spjd  rtx link;
1540219089Spjd  int i;
1541219089Spjd
1542219089Spjd  /* Exit early if one of the insns involved can't be used for
1543219089Spjd     combinations.  */
1544219089Spjd  if (cant_combine_insn_p (i3)
1545168404Spjd      || cant_combine_insn_p (i2)
1546168404Spjd      || (i1 && cant_combine_insn_p (i1))
1547185029Spjd      /* We also can't do anything if I3 has a
1548185029Spjd	 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1549185029Spjd	 libcall.  */
1550168404Spjd#if 0
1551168404Spjd      /* ??? This gives worse code, and appears to be unnecessary, since no
1552168404Spjd	 pass after flow uses REG_LIBCALL/REG_RETVAL notes.  */
1553168404Spjd      || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1554185029Spjd#endif
1555168404Spjd      )
1556168404Spjd    return 0;
1557168404Spjd
1558168404Spjd  combine_attempts++;
1559168404Spjd  undobuf.other_insn = 0;
1560168404Spjd
1561168404Spjd  /* Reset the hard register usage information.  */
1562168404Spjd  CLEAR_HARD_REG_SET (newpat_used_regs);
1563168404Spjd
1564168404Spjd  /* If I1 and I2 both feed I3, they can be in any order.  To simplify the
1565168404Spjd     code below, set I1 to be the earlier of the two insns.  */
1566168404Spjd  if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1567219089Spjd    temp = i1, i1 = i2, i2 = temp;
1568219089Spjd
1569168404Spjd  added_links_insn = 0;
1570168404Spjd
1571219089Spjd  /* First check for one important special-case that the code below will
1572168404Spjd     not handle.  Namely, the case where I1 is zero, I2 is a PARALLEL
1573168404Spjd     and I3 is a SET whose SET_SRC is a SET_DEST in I2.  In that case,
1574168404Spjd     we may be able to replace that destination with the destination of I3.
1575168404Spjd     This occurs in the common code where we compute both a quotient and
1576168404Spjd     remainder into a structure, in which case we want to do the computation
1577168404Spjd     directly into the structure to avoid register-register copies.
1578168404Spjd
1579168404Spjd     Note that this case handles both multiple sets in I2 and also
1580168404Spjd     cases where I2 has a number of CLOBBER or PARALLELs.
1581168404Spjd
1582168404Spjd     We make very conservative checks below and only try to handle the
1583219089Spjd     most common cases of this.  For example, we only handle the case
1584185029Spjd     where I2 and I3 are adjacent to avoid making difficult register
1585168404Spjd     usage tests.  */
1586168404Spjd
1587168404Spjd  if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1588168404Spjd      && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1589168404Spjd      && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1590168404Spjd      && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1591168404Spjd      && GET_CODE (PATTERN (i2)) == PARALLEL
1592168404Spjd      && ! side_effects_p (SET_DEST (PATTERN (i3)))
1593168404Spjd      /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1594168404Spjd	 below would need to check what is inside (and reg_overlap_mentioned_p
1595168404Spjd	 doesn't support those codes anyway).  Don't allow those destinations;
1596168404Spjd	 the resulting insn isn't likely to be recognized anyway.  */
1597168404Spjd      && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1598185029Spjd      && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1599168404Spjd      && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1600168404Spjd				    SET_DEST (PATTERN (i3)))
1601168404Spjd      && next_real_insn (i2) == i3)
1602168404Spjd    {
1603168404Spjd      rtx p2 = PATTERN (i2);
1604168404Spjd
1605168404Spjd      /* Make sure that the destination of I3,
1606168404Spjd	 which we are going to substitute into one output of I2,
1607168404Spjd	 is not used within another output of I2.  We must avoid making this:
1608168404Spjd	 (parallel [(set (mem (reg 69)) ...)
1609168404Spjd		    (set (reg 69) ...)])
1610219089Spjd	 which is not well-defined as to order of actions.
1611168404Spjd	 (Besides, reload can't handle output reloads for this.)
1612168404Spjd
1613168404Spjd	 The problem can also happen if the dest of I3 is a memory ref,
1614168404Spjd	 if another dest in I2 is an indirect memory ref.  */
1615168404Spjd      for (i = 0; i < XVECLEN (p2, 0); i++)
1616168404Spjd	if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1617168404Spjd	     || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1618168404Spjd	    && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1619168404Spjd					SET_DEST (XVECEXP (p2, 0, i))))
1620219089Spjd	  break;
1621168404Spjd
1622168404Spjd      if (i == XVECLEN (p2, 0))
1623168404Spjd	for (i = 0; i < XVECLEN (p2, 0); i++)
1624168404Spjd	  if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1625168404Spjd	       || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1626168404Spjd	      && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1627168404Spjd	    {
1628168404Spjd	      combine_merges++;
1629168404Spjd
1630168404Spjd	      subst_insn = i3;
1631168404Spjd	      subst_low_cuid = INSN_CUID (i2);
1632168404Spjd
1633168404Spjd	      added_sets_2 = added_sets_1 = 0;
1634168404Spjd	      i2dest = SET_SRC (PATTERN (i3));
1635168404Spjd
1636185029Spjd	      /* Replace the dest in I2 with our dest and make the resulting
1637168404Spjd		 insn the new pattern for I3.  Then skip to where we
1638168404Spjd		 validate the pattern.  Everything was set up above.  */
1639168404Spjd	      SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1640168404Spjd		     SET_DEST (PATTERN (i3)));
1641168404Spjd
1642168404Spjd	      newpat = p2;
1643168404Spjd	      i3_subst_into_i2 = 1;
1644168404Spjd	      goto validate_replacement;
1645168404Spjd	    }
1646168404Spjd    }
1647168404Spjd
1648168404Spjd  /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1649168404Spjd     one of those words to another constant, merge them by making a new
1650185029Spjd     constant.  */
1651168404Spjd  if (i1 == 0
1652168404Spjd      && (temp = single_set (i2)) != 0
1653168404Spjd      && (GET_CODE (SET_SRC (temp)) == CONST_INT
1654168404Spjd	  || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1655168404Spjd      && GET_CODE (SET_DEST (temp)) == REG
1656168404Spjd      && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1657168404Spjd      && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1658168404Spjd      && GET_CODE (PATTERN (i3)) == SET
1659168404Spjd      && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1660168404Spjd      && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1661168404Spjd      && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1662168404Spjd      && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1663168404Spjd      && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1664168404Spjd    {
1665168404Spjd      HOST_WIDE_INT lo, hi;
1666168404Spjd
1667168404Spjd      if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1668168404Spjd	lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1669168404Spjd      else
1670168404Spjd	{
1671168404Spjd	  lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1672168404Spjd	  hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1673168404Spjd	}
1674168404Spjd
1675168404Spjd      if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
1676168404Spjd	{
1677168404Spjd	  /* We don't handle the case of the target word being wider
1678168404Spjd	     than a host wide int.  */
1679168404Spjd	  if (HOST_BITS_PER_WIDE_INT < BITS_PER_WORD)
1680168404Spjd	    abort ();
1681185029Spjd
1682185029Spjd	  lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
1683168404Spjd	  lo |= (INTVAL (SET_SRC (PATTERN (i3)))
1684205198Sdelphij		 & (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1685205198Sdelphij	}
1686168404Spjd      else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1687168404Spjd	hi = INTVAL (SET_SRC (PATTERN (i3)));
1688168404Spjd      else if (HOST_BITS_PER_WIDE_INT >= 2 * BITS_PER_WORD)
1689168404Spjd	{
1690168404Spjd	  int sign = -(int) ((unsigned HOST_WIDE_INT) lo
1691168404Spjd			     >> (HOST_BITS_PER_WIDE_INT - 1));
1692168404Spjd
1693168404Spjd	  lo &= ~ (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1694168404Spjd		   (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1695168404Spjd	  lo |= (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1696168404Spjd		 (INTVAL (SET_SRC (PATTERN (i3)))));
1697168404Spjd	  if (hi == sign)
1698168404Spjd	    hi = lo < 0 ? -1 : 0;
1699168404Spjd	}
1700168404Spjd      else
1701168404Spjd	/* We don't handle the case of the higher word not fitting
1702185029Spjd	   entirely in either hi or lo.  */
1703185029Spjd	abort ();
1704168404Spjd
1705205198Sdelphij      combine_merges++;
1706205198Sdelphij      subst_insn = i3;
1707168404Spjd      subst_low_cuid = INSN_CUID (i2);
1708168404Spjd      added_sets_2 = added_sets_1 = 0;
1709168404Spjd      i2dest = SET_DEST (temp);
1710168404Spjd
1711168404Spjd      SUBST (SET_SRC (temp),
1712168404Spjd	     immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1713168404Spjd
1714168404Spjd      newpat = PATTERN (i2);
1715219089Spjd      goto validate_replacement;
1716219089Spjd    }
1717219089Spjd
1718219089Spjd#ifndef HAVE_cc0
1719219089Spjd  /* If we have no I1 and I2 looks like:
1720219089Spjd	(parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1721219089Spjd		   (set Y OP)])
1722219089Spjd     make up a dummy I1 that is
1723219089Spjd	(set Y OP)
1724219089Spjd     and change I2 to be
1725219089Spjd        (set (reg:CC X) (compare:CC Y (const_int 0)))
1726219089Spjd
1727219089Spjd     (We can ignore any trailing CLOBBERs.)
1728219089Spjd
1729219089Spjd     This undoes a previous combination and allows us to match a branch-and-
1730219089Spjd     decrement insn.  */
1731219089Spjd
1732219089Spjd  if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1733219089Spjd      && XVECLEN (PATTERN (i2), 0) >= 2
1734219089Spjd      && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1735168404Spjd      && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1736168404Spjd	  == MODE_CC)
1737168404Spjd      && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1738168404Spjd      && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1739168404Spjd      && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1740168404Spjd      && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1741168404Spjd      && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1742168404Spjd		      SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1743168404Spjd    {
1744168404Spjd      for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1745185029Spjd	if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1746168404Spjd	  break;
1747168404Spjd
1748185029Spjd      if (i == 1)
1749185029Spjd	{
1750168404Spjd	  /* We make I1 with the same INSN_UID as I2.  This gives it
1751168404Spjd	     the same INSN_CUID for value tracking.  Our fake I1 will
1752168404Spjd	     never appear in the insn stream so giving it the same INSN_UID
1753219089Spjd	     as I2 will not cause a problem.  */
1754168404Spjd
1755168404Spjd	  subst_prev_insn = i1
1756168404Spjd	    = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1757168404Spjd			    XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1758168404Spjd			    NULL_RTX);
1759168404Spjd
1760168404Spjd	  SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1761168404Spjd	  SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1762168404Spjd		 SET_DEST (PATTERN (i1)));
1763168404Spjd	}
1764168404Spjd    }
1765168404Spjd#endif
1766168404Spjd
1767168404Spjd  /* Verify that I2 and I1 are valid for combining.  */
1768168404Spjd  if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1769168404Spjd      || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1770168404Spjd    {
1771168404Spjd      undo_all ();
1772168404Spjd      return 0;
1773168404Spjd    }
1774168404Spjd
1775168404Spjd  /* Record whether I2DEST is used in I2SRC and similarly for the other
1776168404Spjd     cases.  Knowing this will help in register status updating below.  */
1777168404Spjd  i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1778168404Spjd  i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1779168404Spjd  i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1780168404Spjd
1781168404Spjd  /* See if I1 directly feeds into I3.  It does if I1DEST is not used
1782168404Spjd     in I2SRC.  */
1783168404Spjd  i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1784168404Spjd
1785168404Spjd  /* Ensure that I3's pattern can be the destination of combines.  */
1786168404Spjd  if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1787185029Spjd			  i1 && i2dest_in_i1src && i1_feeds_i3,
1788185029Spjd			  &i3dest_killed))
1789185029Spjd    {
1790185029Spjd      undo_all ();
1791185029Spjd      return 0;
1792168404Spjd    }
1793168404Spjd
1794168404Spjd  /* See if any of the insns is a MULT operation.  Unless one is, we will
1795168404Spjd     reject a combination that is, since it must be slower.  Be conservative
1796168404Spjd     here.  */
1797168404Spjd  if (GET_CODE (i2src) == MULT
1798168404Spjd      || (i1 != 0 && GET_CODE (i1src) == MULT)
1799168404Spjd      || (GET_CODE (PATTERN (i3)) == SET
1800168404Spjd	  && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1801209962Smm    have_mult = 1;
1802209962Smm
1803168404Spjd  /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1804209962Smm     We used to do this EXCEPT in one case: I3 has a post-inc in an
1805209962Smm     output operand.  However, that exception can give rise to insns like
1806168404Spjd	mov r3,(r3)+
1807168404Spjd     which is a famous insn on the PDP-11 where the value of r3 used as the
1808168404Spjd     source was model-dependent.  Avoid this sort of thing.  */
1809168404Spjd
1810168404Spjd#if 0
1811168404Spjd  if (!(GET_CODE (PATTERN (i3)) == SET
1812168404Spjd	&& GET_CODE (SET_SRC (PATTERN (i3))) == REG
1813168404Spjd	&& GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1814168404Spjd	&& (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1815168404Spjd	    || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1816168404Spjd    /* It's not the exception.  */
1817168404Spjd#endif
1818168404Spjd#ifdef AUTO_INC_DEC
1819168404Spjd    for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1820168404Spjd      if (REG_NOTE_KIND (link) == REG_INC
1821168404Spjd	  && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1822168404Spjd	      || (i1 != 0
1823168404Spjd		  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1824168404Spjd	{
1825168404Spjd	  undo_all ();
1826185029Spjd	  return 0;
1827168404Spjd	}
1828168404Spjd#endif
1829219089Spjd
1830219089Spjd  /* See if the SETs in I1 or I2 need to be kept around in the merged
1831219089Spjd     instruction: whenever the value set there is still needed past I3.
1832168404Spjd     For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1833168404Spjd
1834168404Spjd     For the SET in I1, we have two cases:  If I1 and I2 independently
1835185029Spjd     feed into I3, the set in I1 needs to be kept around if I1DEST dies
1836168404Spjd     or is set in I3.  Otherwise (if I1 feeds I2 which feeds I3), the set
1837168404Spjd     in I1 needs to be kept around unless I1DEST dies or is set in either
1838168404Spjd     I2 or I3.  We can distinguish these cases by seeing if I2SRC mentions
1839185029Spjd     I1DEST.  If so, we know I1 feeds into I2.  */
1840168404Spjd
1841168404Spjd  added_sets_2 = ! dead_or_set_p (i3, i2dest);
1842168404Spjd
1843168404Spjd  added_sets_1
1844219089Spjd    = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1845168404Spjd	       : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1846185029Spjd
1847168404Spjd  /* If the set in I2 needs to be kept around, we must make a copy of
1848185029Spjd     PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1849168404Spjd     PATTERN (I2), we are only substituting for the original I1DEST, not into
1850219089Spjd     an already-substituted copy.  This also prevents making self-referential
1851219089Spjd     rtx.  If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1852219089Spjd     I2DEST.  */
1853168404Spjd
1854219089Spjd  i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1855168404Spjd	   ? gen_rtx_SET (VOIDmode, i2dest, i2src)
1856168404Spjd	   : PATTERN (i2));
1857168404Spjd
1858168404Spjd  if (added_sets_2)
1859168404Spjd    i2pat = copy_rtx (i2pat);
1860168404Spjd
1861168404Spjd  combine_merges++;
1862168404Spjd
1863168404Spjd  /* Substitute in the latest insn for the regs set by the earlier ones.  */
1864168404Spjd
1865185029Spjd  maxreg = max_reg_num ();
1866185029Spjd
1867185029Spjd  subst_insn = i3;
1868185029Spjd
1869185029Spjd  /* It is possible that the source of I2 or I1 may be performing an
1870185029Spjd     unneeded operation, such as a ZERO_EXTEND of something that is known
1871185029Spjd     to have the high part zero.  Handle that case by letting subst look at
1872185029Spjd     the innermost one of them.
1873185029Spjd
1874185029Spjd     Another way to do this would be to have a function that tries to
1875219089Spjd     simplify a single insn instead of merging two or more insns.  We don't
1876185029Spjd     do this because of the potential of infinite loops and because
1877185029Spjd     of the potential extra memory required.  However, doing it the way
1878185029Spjd     we are is a bit of a kludge and doesn't catch all cases.
1879185029Spjd
1880185029Spjd     But only do this if -fexpensive-optimizations since it slows things down
1881219089Spjd     and doesn't usually win.  */
1882185029Spjd
1883185029Spjd  if (flag_expensive_optimizations)
1884185029Spjd    {
1885185029Spjd      /* Pass pc_rtx so no substitutions are done, just simplifications.
1886185029Spjd	 The cases that we are interested in here do not involve the few
1887185029Spjd	 cases were is_replaced is checked.  */
1888253819Sdelphij      if (i1)
1889253819Sdelphij	{
1890253819Sdelphij	  subst_low_cuid = INSN_CUID (i1);
1891253819Sdelphij	  i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1892168404Spjd	}
1893185029Spjd      else
1894185029Spjd	{
1895185029Spjd	  subst_low_cuid = INSN_CUID (i2);
1896185029Spjd	  i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1897185029Spjd	}
1898209962Smm    }
1899185029Spjd
1900219089Spjd#ifndef HAVE_cc0
1901185029Spjd  /* Many machines that don't use CC0 have insns that can both perform an
1902185029Spjd     arithmetic operation and set the condition code.  These operations will
1903219089Spjd     be represented as a PARALLEL with the first element of the vector
1904219089Spjd     being a COMPARE of an arithmetic operation with the constant zero.
1905185029Spjd     The second element of the vector will set some pseudo to the result
1906185029Spjd     of the same arithmetic operation.  If we simplify the COMPARE, we won't
1907185029Spjd     match such a pattern and so will generate an extra insn.   Here we test
1908185029Spjd     for this case, where both the comparison and the operation result are
1909185029Spjd     needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1910185029Spjd     I2SRC.  Later we will make the PARALLEL that contains I2.  */
1911185029Spjd
1912185029Spjd  if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1913185029Spjd      && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1914185029Spjd      && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1915168404Spjd      && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1916168404Spjd    {
1917168404Spjd#ifdef EXTRA_CC_MODES
1918168404Spjd      rtx *cc_use;
1919168404Spjd      enum machine_mode compare_mode;
1920168404Spjd#endif
1921168404Spjd
1922168404Spjd      newpat = PATTERN (i3);
1923168404Spjd      SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1924185029Spjd
1925168404Spjd      i2_is_used = 1;
1926168404Spjd
1927185029Spjd#ifdef EXTRA_CC_MODES
1928168404Spjd      /* See if a COMPARE with the operand we substituted in should be done
1929168404Spjd	 with the mode that is currently being used.  If not, do the same
1930168404Spjd	 processing we do in `subst' for a SET; namely, if the destination
1931185029Spjd	 is used only once, try to replace it with a register of the proper
1932168404Spjd	 mode and also replace the COMPARE.  */
1933185029Spjd      if (undobuf.other_insn == 0
1934219089Spjd	  && (cc_use = find_single_use (SET_DEST (newpat), i3,
1935219089Spjd					&undobuf.other_insn))
1936168404Spjd	  && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1937168404Spjd					      i2src, const0_rtx))
1938185029Spjd	      != GET_MODE (SET_DEST (newpat))))
1939168404Spjd	{
1940168404Spjd	  unsigned int regno = REGNO (SET_DEST (newpat));
1941185029Spjd	  rtx new_dest = gen_rtx_REG (compare_mode, regno);
1942168404Spjd
1943168404Spjd	  if (regno < FIRST_PSEUDO_REGISTER
1944168404Spjd	      || (REG_N_SETS (regno) == 1 && ! added_sets_2
1945168404Spjd		  && ! REG_USERVAR_P (SET_DEST (newpat))))
1946168404Spjd	    {
1947219089Spjd	      if (regno >= FIRST_PSEUDO_REGISTER)
1948219089Spjd		SUBST (regno_reg_rtx[regno], new_dest);
1949219089Spjd
1950219089Spjd	      SUBST (SET_DEST (newpat), new_dest);
1951219089Spjd	      SUBST (XEXP (*cc_use, 0), new_dest);
1952219089Spjd	      SUBST (SET_SRC (newpat),
1953219089Spjd		     gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
1954219089Spjd	    }
1955219089Spjd	  else
1956219089Spjd	    undobuf.other_insn = 0;
1957219089Spjd	}
1958219089Spjd#endif
1959219089Spjd    }
1960219089Spjd  else
1961219089Spjd#endif
1962219089Spjd    {
1963219089Spjd      n_occurrences = 0;		/* `subst' counts here */
1964219089Spjd
1965219089Spjd      /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1966219089Spjd	 need to make a unique copy of I2SRC each time we substitute it
1967219089Spjd	 to avoid self-referential rtl.  */
1968219089Spjd
1969219089Spjd      subst_low_cuid = INSN_CUID (i2);
1970219089Spjd      newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1971219089Spjd		      ! i1_feeds_i3 && i1dest_in_i1src);
1972219089Spjd      substed_i2 = 1;
1973219089Spjd
1974219089Spjd      /* Record whether i2's body now appears within i3's body.  */
1975219089Spjd      i2_is_used = n_occurrences;
1976219089Spjd    }
1977219089Spjd
1978219089Spjd  /* If we already got a failure, don't try to do more.  Otherwise,
1979219089Spjd     try to substitute in I1 if we have it.  */
1980219089Spjd
1981219089Spjd  if (i1 && GET_CODE (newpat) != CLOBBER)
1982228103Smm    {
1983228103Smm      /* Before we can do this substitution, we must redo the test done
1984228103Smm	 above (see detailed comments there) that ensures  that I1DEST
1985228103Smm	 isn't mentioned in any SETs in NEWPAT that are field assignments.  */
1986228103Smm
1987228103Smm      if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1988228103Smm			      0, (rtx*) 0))
1989228103Smm	{
1990228103Smm	  undo_all ();
1991228103Smm	  return 0;
1992228103Smm	}
1993228103Smm
1994228103Smm      n_occurrences = 0;
1995228103Smm      subst_low_cuid = INSN_CUID (i1);
1996228103Smm      newpat = subst (newpat, i1dest, i1src, 0, 0);
1997228103Smm      substed_i1 = 1;
1998228103Smm    }
1999228103Smm
2000228103Smm  /* Fail if an autoincrement side-effect has been duplicated.  Be careful
2001228103Smm     to count all the ways that I2SRC and I1SRC can be used.  */
2002228103Smm  if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
2003228103Smm       && i2_is_used + added_sets_2 > 1)
2004228103Smm      || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2005228103Smm	  && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
2006228103Smm	      > 1))
2007228103Smm      /* Fail if we tried to make a new register (we used to abort, but there's
2008228103Smm	 really no reason to).  */
2009228103Smm      || max_reg_num () != maxreg
2010228103Smm      /* Fail if we couldn't do something and have a CLOBBER.  */
2011228103Smm      || GET_CODE (newpat) == CLOBBER
2012228103Smm      /* Fail if this new pattern is a MULT and we didn't have one before
2013228103Smm	 at the outer level.  */
2014228103Smm      || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
2015228103Smm	  && ! have_mult))
2016228103Smm    {
2017228103Smm      undo_all ();
2018228103Smm      return 0;
2019228103Smm    }
2020228103Smm
2021228103Smm  /* If the actions of the earlier insns must be kept
2022228103Smm     in addition to substituting them into the latest one,
2023228103Smm     we must make a new PARALLEL for the latest insn
2024248571Smm     to hold additional the SETs.  */
2025228103Smm
2026228103Smm  if (added_sets_1 || added_sets_2)
2027228103Smm    {
2028228103Smm      combine_extras++;
2029228103Smm
2030228103Smm      if (GET_CODE (newpat) == PARALLEL)
2031228103Smm	{
2032228103Smm	  rtvec old = XVEC (newpat, 0);
2033228103Smm	  total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
2034228103Smm	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2035228103Smm	  memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
2036228103Smm		  sizeof (old->elem[0]) * old->num_elem);
2037228103Smm	}
2038228103Smm      else
2039228103Smm	{
2040228103Smm	  rtx old = newpat;
2041228103Smm	  total_sets = 1 + added_sets_1 + added_sets_2;
2042228103Smm	  newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2043228103Smm	  XVECEXP (newpat, 0, 0) = old;
2044228103Smm	}
2045228103Smm
2046228103Smm      if (added_sets_1)
2047228103Smm	XVECEXP (newpat, 0, --total_sets)
2048228103Smm	  = (GET_CODE (PATTERN (i1)) == PARALLEL
2049228103Smm	     ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
2050228103Smm
2051228103Smm      if (added_sets_2)
2052228103Smm	{
2053228103Smm	  /* If there is no I1, use I2's body as is.  We used to also not do
2054228103Smm	     the subst call below if I2 was substituted into I3,
2055228103Smm	     but that could lose a simplification.  */
2056228103Smm	  if (i1 == 0)
2057228103Smm	    XVECEXP (newpat, 0, --total_sets) = i2pat;
2058228103Smm	  else
2059228103Smm	    /* See comment where i2pat is assigned.  */
2060228103Smm	    XVECEXP (newpat, 0, --total_sets)
2061228103Smm	      = subst (i2pat, i1dest, i1src, 0, 0);
2062228103Smm	}
2063228103Smm    }
2064228103Smm
2065228103Smm  /* We come here when we are replacing a destination in I2 with the
2066228103Smm     destination of I3.  */
2067228103Smm validate_replacement:
2068228103Smm
2069228103Smm  /* Note which hard regs this insn has as inputs.  */
2070228103Smm  mark_used_regs_combine (newpat);
2071228103Smm
2072228103Smm  /* Is the result of combination a valid instruction?  */
2073228103Smm  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2074228103Smm
2075228103Smm  /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2076228103Smm     the second SET's destination is a register that is unused.  In that case,
2077228103Smm     we just need the first SET.   This can occur when simplifying a divmod
2078228103Smm     insn.  We *must* test for this case here because the code below that
2079228103Smm     splits two independent SETs doesn't handle this case correctly when it
2080228103Smm     updates the register status.  Also check the case where the first
2081228103Smm     SET's destination is unused.  That would not cause incorrect code, but
2082228103Smm     does cause an unneeded insn to remain.  */
2083228103Smm
2084228103Smm  if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2085228103Smm      && XVECLEN (newpat, 0) == 2
2086228103Smm      && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2087228103Smm      && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2088228103Smm      && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
2089228103Smm      && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
2090228103Smm      && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
2091228103Smm      && asm_noperands (newpat) < 0)
2092228103Smm    {
2093168404Spjd      newpat = XVECEXP (newpat, 0, 0);
2094168404Spjd      insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2095168404Spjd    }
2096168404Spjd
2097168404Spjd  else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2098168404Spjd	   && XVECLEN (newpat, 0) == 2
2099168404Spjd	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2100168404Spjd	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2101168404Spjd	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
2102185029Spjd	   && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
2103168404Spjd	   && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
2104168404Spjd	   && asm_noperands (newpat) < 0)
2105168404Spjd    {
2106168404Spjd      newpat = XVECEXP (newpat, 0, 1);
2107168404Spjd      insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2108219089Spjd    }
2109168404Spjd
2110168404Spjd  /* If we were combining three insns and the result is a simple SET
2111168404Spjd     with no ASM_OPERANDS that wasn't recognized, try to split it into two
2112168404Spjd     insns.  There are two ways to do this.  It can be split using a
2113168404Spjd     machine-specific method (like when you have an addition of a large
2114168404Spjd     constant) or by combine in the function find_split_point.  */
2115168404Spjd
2116219089Spjd  if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2117219089Spjd      && asm_noperands (newpat) < 0)
2118219089Spjd    {
2119168404Spjd      rtx m_split, *split;
2120185029Spjd      rtx ni2dest = i2dest;
2121168404Spjd
2122168404Spjd      /* See if the MD file can split NEWPAT.  If it can't, see if letting it
2123168404Spjd	 use I2DEST as a scratch register will help.  In the latter case,
2124168404Spjd	 convert I2DEST to the mode of the source of NEWPAT if we can.  */
2125168404Spjd
2126168404Spjd      m_split = split_insns (newpat, i3);
2127168404Spjd
2128168404Spjd      /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2129168404Spjd	 inputs of NEWPAT.  */
2130168404Spjd
2131168404Spjd      /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2132168404Spjd	 possible to try that as a scratch reg.  This would require adding
2133168404Spjd	 more code to make it work though.  */
2134168404Spjd
2135168404Spjd      if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
2136168404Spjd	{
2137168404Spjd	  /* If I2DEST is a hard register or the only use of a pseudo,
2138168404Spjd	     we can change its mode.  */
2139168404Spjd	  if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
2140168404Spjd	      && GET_MODE (SET_DEST (newpat)) != VOIDmode
2141168404Spjd	      && GET_CODE (i2dest) == REG
2142168404Spjd	      && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2143168404Spjd		  || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2144168404Spjd		      && ! REG_USERVAR_P (i2dest))))
2145168404Spjd	    ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
2146168404Spjd				   REGNO (i2dest));
2147168404Spjd
2148168404Spjd	  m_split = split_insns (gen_rtx_PARALLEL
2149168404Spjd				 (VOIDmode,
2150168404Spjd				  gen_rtvec (2, newpat,
2151168404Spjd					     gen_rtx_CLOBBER (VOIDmode,
2152185029Spjd							      ni2dest))),
2153168404Spjd				 i3);
2154168404Spjd	  /* If the split with the mode-changed register didn't work, try
2155185029Spjd	     the original register.  */
2156185029Spjd	  if (! m_split && ni2dest != i2dest)
2157185029Spjd	    {
2158219089Spjd	      ni2dest = i2dest;
2159168404Spjd	      m_split = split_insns (gen_rtx_PARALLEL
2160219089Spjd				     (VOIDmode,
2161219089Spjd				      gen_rtvec (2, newpat,
2162219089Spjd						 gen_rtx_CLOBBER (VOIDmode,
2163219089Spjd								  i2dest))),
2164219089Spjd				     i3);
2165219089Spjd	    }
2166219089Spjd	}
2167219089Spjd
2168219089Spjd      /* If we've split a jump pattern, we'll wind up with a sequence even
2169219089Spjd	 with one instruction.  We can handle that below, so extract it.  */
2170219089Spjd      if (m_split && GET_CODE (m_split) == SEQUENCE
2171219089Spjd	  && XVECLEN (m_split, 0) == 1)
2172219089Spjd	m_split = PATTERN (XVECEXP (m_split, 0, 0));
2173219089Spjd
2174185029Spjd      if (m_split && GET_CODE (m_split) != SEQUENCE)
2175185029Spjd	{
2176185029Spjd	  insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2177185029Spjd	  if (insn_code_number >= 0)
2178185029Spjd	    newpat = m_split;
2179185029Spjd	}
2180185029Spjd      else if (m_split && GET_CODE (m_split) == SEQUENCE
2181185029Spjd	       && XVECLEN (m_split, 0) == 2
2182185029Spjd	       && (next_real_insn (i2) == i3
2183185029Spjd		   || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
2184185029Spjd					   INSN_CUID (i2))))
2185185029Spjd	{
2186185029Spjd	  rtx i2set, i3set;
2187185029Spjd	  rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
2188185029Spjd	  newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
2189185029Spjd
2190185029Spjd	  i3set = single_set (XVECEXP (m_split, 0, 1));
2191185029Spjd	  i2set = single_set (XVECEXP (m_split, 0, 0));
2192185029Spjd
2193185029Spjd	  /* In case we changed the mode of I2DEST, replace it in the
2194168404Spjd	     pseudo-register table here.  We can't do it above in case this
2195168404Spjd	     code doesn't get executed and we do a split the other way.  */
2196168404Spjd
2197168404Spjd	  if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2198168404Spjd	    SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2199168404Spjd
2200168404Spjd	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2201168404Spjd
2202168404Spjd	  /* If I2 or I3 has multiple SETs, we won't know how to track
2203168404Spjd	     register status, so don't use these insns.  If I2's destination
2204168404Spjd	     is used between I2 and I3, we also can't use these insns.  */
2205168404Spjd
2206168404Spjd	  if (i2_code_number >= 0 && i2set && i3set
2207168404Spjd	      && (next_real_insn (i2) == i3
2208168404Spjd		  || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
2209168404Spjd	    insn_code_number = recog_for_combine (&newi3pat, i3,
2210168404Spjd						  &new_i3_notes);
2211168404Spjd	  if (insn_code_number >= 0)
2212168404Spjd	    newpat = newi3pat;
2213168404Spjd
2214168404Spjd	  /* It is possible that both insns now set the destination of I3.
2215168404Spjd	     If so, we must show an extra use of it.  */
2216168404Spjd
2217168404Spjd	  if (insn_code_number >= 0)
2218168404Spjd	    {
2219168404Spjd	      rtx new_i3_dest = SET_DEST (i3set);
2220168404Spjd	      rtx new_i2_dest = SET_DEST (i2set);
2221228103Smm
2222228103Smm	      while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2223228103Smm		     || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2224228103Smm		     || GET_CODE (new_i3_dest) == SUBREG)
2225228103Smm		new_i3_dest = XEXP (new_i3_dest, 0);
2226168404Spjd
2227185029Spjd	      while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2228168404Spjd		     || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2229185029Spjd		     || GET_CODE (new_i2_dest) == SUBREG)
2230185029Spjd		new_i2_dest = XEXP (new_i2_dest, 0);
2231168404Spjd
2232168404Spjd	      if (GET_CODE (new_i3_dest) == REG
2233168404Spjd		  && GET_CODE (new_i2_dest) == REG
2234168404Spjd		  && REGNO (new_i3_dest) == REGNO (new_i2_dest))
2235168404Spjd		REG_N_SETS (REGNO (new_i2_dest))++;
2236168404Spjd	    }
2237168404Spjd	}
2238168404Spjd
2239168404Spjd      /* If we can split it and use I2DEST, go ahead and see if that
2240168404Spjd	 helps things be recognized.  Verify that none of the registers
2241168404Spjd	 are set between I2 and I3.  */
2242168404Spjd      if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
2243168404Spjd#ifdef HAVE_cc0
2244168404Spjd	  && GET_CODE (i2dest) == REG
2245168404Spjd#endif
2246168404Spjd	  /* We need I2DEST in the proper mode.  If it is a hard register
2247168404Spjd	     or the only use of a pseudo, we can change its mode.  */
2248168404Spjd	  && (GET_MODE (*split) == GET_MODE (i2dest)
2249168404Spjd	      || GET_MODE (*split) == VOIDmode
2250168404Spjd	      || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
2251168404Spjd	      || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
2252168404Spjd		  && ! REG_USERVAR_P (i2dest)))
2253168404Spjd	  && (next_real_insn (i2) == i3
2254223623Smm	      || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2255168404Spjd	  /* We can't overwrite I2DEST if its value is still used by
2256168404Spjd	     NEWPAT.  */
2257168404Spjd	  && ! reg_referenced_p (i2dest, newpat))
2258219089Spjd	{
2259219089Spjd	  rtx newdest = i2dest;
2260219089Spjd	  enum rtx_code split_code = GET_CODE (*split);
2261168404Spjd	  enum machine_mode split_mode = GET_MODE (*split);
2262168404Spjd
2263168404Spjd	  /* Get NEWDEST as a register in the proper mode.  We have already
2264168404Spjd	     validated that we can do this.  */
2265168404Spjd	  if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2266168404Spjd	    {
2267168404Spjd	      newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
2268168404Spjd
2269168404Spjd	      if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2270168404Spjd		SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2271168404Spjd	    }
2272168404Spjd
2273168404Spjd	  /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2274168404Spjd	     an ASHIFT.  This can occur if it was inside a PLUS and hence
2275168404Spjd	     appeared to be a memory address.  This is a kludge.  */
2276168404Spjd	  if (split_code == MULT
2277168404Spjd	      && GET_CODE (XEXP (*split, 1)) == CONST_INT
2278168404Spjd	      && INTVAL (XEXP (*split, 1)) > 0
2279168404Spjd	      && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2280168404Spjd	    {
2281168404Spjd	      SUBST (*split, gen_rtx_ASHIFT (split_mode,
2282168404Spjd					     XEXP (*split, 0), GEN_INT (i)));
2283168404Spjd	      /* Update split_code because we may not have a multiply
2284168404Spjd		 anymore.  */
2285168404Spjd	      split_code = GET_CODE (*split);
2286168404Spjd	    }
2287168404Spjd
2288168404Spjd#ifdef INSN_SCHEDULING
2289168404Spjd	  /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2290168404Spjd	     be written as a ZERO_EXTEND.  */
2291168404Spjd	  if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2292168404Spjd	    SUBST (*split, gen_rtx_ZERO_EXTEND  (split_mode,
2293168404Spjd						 SUBREG_REG (*split)));
2294168404Spjd#endif
2295168404Spjd
2296168404Spjd	  newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
2297168404Spjd	  SUBST (*split, newdest);
2298168404Spjd	  i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2299168404Spjd
2300168404Spjd	  /* If the split point was a MULT and we didn't have one before,
2301168404Spjd	     don't use one now.  */
2302168404Spjd	  if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2303168404Spjd	    insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2304168404Spjd	}
2305219089Spjd    }
2306219089Spjd
2307219089Spjd  /* Check for a case where we loaded from memory in a narrow mode and
2308219089Spjd     then sign extended it, but we need both registers.  In that case,
2309219089Spjd     we have a PARALLEL with both loads from the same memory location.
2310219089Spjd     We can split this into a load from memory followed by a register-register
2311219089Spjd     copy.  This saves at least one insn, more if register allocation can
2312219089Spjd     eliminate the copy.
2313219089Spjd
2314219089Spjd     We cannot do this if the destination of the second assignment is
2315219089Spjd     a register that we have already assumed is zero-extended.  Similarly
2316219089Spjd     for a SUBREG of such a register.  */
2317219089Spjd
2318219089Spjd  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2319219089Spjd	   && GET_CODE (newpat) == PARALLEL
2320219089Spjd	   && XVECLEN (newpat, 0) == 2
2321219089Spjd	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2322219089Spjd	   && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2323219089Spjd	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2324219089Spjd	   && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2325219089Spjd			   XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2326219089Spjd	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2327219089Spjd				   INSN_CUID (i2))
2328219089Spjd	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2329219089Spjd	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2330219089Spjd	   && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2331219089Spjd		 (GET_CODE (temp) == REG
2332219089Spjd		  && reg_nonzero_bits[REGNO (temp)] != 0
2333219089Spjd		  && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2334219089Spjd		  && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2335219089Spjd		  && (reg_nonzero_bits[REGNO (temp)]
2336219089Spjd		      != GET_MODE_MASK (word_mode))))
2337219089Spjd	   && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2338219089Spjd		 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2339219089Spjd		     (GET_CODE (temp) == REG
2340219089Spjd		      && reg_nonzero_bits[REGNO (temp)] != 0
2341219089Spjd		      && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2342219089Spjd		      && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2343219089Spjd		      && (reg_nonzero_bits[REGNO (temp)]
2344219089Spjd			  != GET_MODE_MASK (word_mode)))))
2345219089Spjd	   && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2346219089Spjd					 SET_SRC (XVECEXP (newpat, 0, 1)))
2347236705Smm	   && ! find_reg_note (i3, REG_UNUSED,
2348236705Smm			       SET_DEST (XVECEXP (newpat, 0, 0))))
2349236705Smm    {
2350236705Smm      rtx ni2dest;
2351236705Smm
2352236705Smm      newi2pat = XVECEXP (newpat, 0, 0);
2353236705Smm      ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2354236705Smm      newpat = XVECEXP (newpat, 0, 1);
2355236705Smm      SUBST (SET_SRC (newpat),
2356236705Smm	     gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
2357236705Smm      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2358168404Spjd
2359185029Spjd      if (i2_code_number >= 0)
2360185029Spjd	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2361185029Spjd
2362185029Spjd      if (insn_code_number >= 0)
2363185029Spjd	{
2364185029Spjd	  rtx insn;
2365185029Spjd	  rtx link;
2366185029Spjd
2367185029Spjd	  /* If we will be able to accept this, we have made a change to the
2368185029Spjd	     destination of I3.  This can invalidate a LOG_LINKS pointing
2369185029Spjd	     to I3.  No other part of combine.c makes such a transformation.
2370185029Spjd
2371185029Spjd	     The new I3 will have a destination that was previously the
2372185029Spjd	     destination of I1 or I2 and which was used in i2 or I3.  Call
2373185029Spjd	     distribute_links to make a LOG_LINK from the next use of
2374185029Spjd	     that destination.  */
2375185029Spjd
2376185029Spjd	  PATTERN (i3) = newpat;
2377185029Spjd	  distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
2378185029Spjd
2379185029Spjd	  /* I3 now uses what used to be its destination and which is
2380185029Spjd	     now I2's destination.  That means we need a LOG_LINK from
2381185029Spjd	     I3 to I2.  But we used to have one, so we still will.
2382185029Spjd
2383185029Spjd	     However, some later insn might be using I2's dest and have
2384185029Spjd	     a LOG_LINK pointing at I3.  We must remove this link.
2385185029Spjd	     The simplest way to remove the link is to point it at I1,
2386185029Spjd	     which we know will be a NOTE.  */
2387185029Spjd
2388168404Spjd	  for (insn = NEXT_INSN (i3);
2389168404Spjd	       insn && (this_basic_block == n_basic_blocks - 1
2390168404Spjd			|| insn != BLOCK_HEAD (this_basic_block + 1));
2391168404Spjd	       insn = NEXT_INSN (insn))
2392168404Spjd	    {
2393168404Spjd	      if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
2394168404Spjd		{
2395168404Spjd		  for (link = LOG_LINKS (insn); link;
2396168404Spjd		       link = XEXP (link, 1))
2397168404Spjd		    if (XEXP (link, 0) == i3)
2398168404Spjd		      XEXP (link, 0) = i1;
2399168404Spjd
2400168404Spjd		  break;
2401168404Spjd		}
2402168404Spjd	    }
2403168404Spjd	}
2404168404Spjd    }
2405168404Spjd
2406185029Spjd  /* Similarly, check for a case where we have a PARALLEL of two independent
2407168404Spjd     SETs but we started with three insns.  In this case, we can do the sets
2408168404Spjd     as two separate insns.  This case occurs when some SET allows two
2409168404Spjd     other insns to combine, but the destination of that SET is still live.  */
2410168404Spjd
2411185029Spjd  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2412185029Spjd	   && GET_CODE (newpat) == PARALLEL
2413185029Spjd	   && XVECLEN (newpat, 0) == 2
2414185029Spjd	   && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2415185029Spjd	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2416209962Smm	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2417185029Spjd	   && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2418185029Spjd	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2419185029Spjd	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2420168404Spjd	   && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2421168404Spjd				   INSN_CUID (i2))
2422168404Spjd	   /* Don't pass sets with (USE (MEM ...)) dests to the following.  */
2423168404Spjd	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2424168404Spjd	   && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2425185029Spjd	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2426168404Spjd				  XVECEXP (newpat, 0, 0))
2427168404Spjd	   && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2428168404Spjd				  XVECEXP (newpat, 0, 1))
2429168404Spjd	   && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2430168404Spjd		 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
2431168404Spjd    {
2432185029Spjd      /* Normally, it doesn't matter which of the two is done first,
2433168404Spjd	 but it does if one references cc0.  In that case, it has to
2434168404Spjd	 be first.  */
2435168404Spjd#ifdef HAVE_cc0
2436185029Spjd      if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2437168404Spjd	{
2438168404Spjd	  newi2pat = XVECEXP (newpat, 0, 0);
2439185029Spjd	  newpat = XVECEXP (newpat, 0, 1);
2440168404Spjd	}
2441168404Spjd      else
2442168404Spjd#endif
2443168404Spjd	{
2444168404Spjd	  newi2pat = XVECEXP (newpat, 0, 1);
2445168404Spjd	  newpat = XVECEXP (newpat, 0, 0);
2446168404Spjd	}
2447168404Spjd
2448168404Spjd      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2449209962Smm
2450209962Smm      if (i2_code_number >= 0)
2451209962Smm	insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2452209962Smm    }
2453209962Smm
2454209962Smm  /* If it still isn't recognized, fail and change things back the way they
2455209962Smm     were.  */
2456209962Smm  if ((insn_code_number < 0
2457209962Smm       /* Is the result a reasonable ASM_OPERANDS?  */
2458219089Spjd       && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2459209962Smm    {
2460209962Smm      undo_all ();
2461209962Smm      return 0;
2462209962Smm    }
2463209962Smm
2464209962Smm  /* If we had to change another insn, make sure it is valid also.  */
2465209962Smm  if (undobuf.other_insn)
2466209962Smm    {
2467209962Smm      rtx other_pat = PATTERN (undobuf.other_insn);
2468209962Smm      rtx new_other_notes;
2469209962Smm      rtx note, next;
2470209962Smm
2471209962Smm      CLEAR_HARD_REG_SET (newpat_used_regs);
2472209962Smm
2473209962Smm      other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2474209962Smm					     &new_other_notes);
2475209962Smm
2476209962Smm      if (other_code_number < 0 && ! check_asm_operands (other_pat))
2477209962Smm	{
2478209962Smm	  undo_all ();
2479209962Smm	  return 0;
2480209962Smm	}
2481209962Smm
2482209962Smm      PATTERN (undobuf.other_insn) = other_pat;
2483168404Spjd
2484209962Smm      /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2485209962Smm	 are still valid.  Then add any non-duplicate notes added by
2486209962Smm	 recog_for_combine.  */
2487209962Smm      for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2488209962Smm	{
2489209962Smm	  next = XEXP (note, 1);
2490209962Smm
2491209962Smm	  if (REG_NOTE_KIND (note) == REG_UNUSED
2492209962Smm	      && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2493209962Smm	    {
2494209962Smm	      if (GET_CODE (XEXP (note, 0)) == REG)
2495209962Smm		REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2496209962Smm
2497209962Smm	      remove_note (undobuf.other_insn, note);
2498209962Smm	    }
2499209962Smm	}
2500209962Smm
2501209962Smm      for (note = new_other_notes; note; note = XEXP (note, 1))
2502209962Smm	if (GET_CODE (XEXP (note, 0)) == REG)
2503209962Smm	  REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
2504209962Smm
2505209962Smm      distribute_notes (new_other_notes, undobuf.other_insn,
2506209962Smm			undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2507209962Smm    }
2508209962Smm#ifdef HAVE_cc0
2509209962Smm  /* If I2 is the setter CC0 and I3 is the user CC0 then check whether
2510209962Smm     they are adjacent to each other or not.  */
2511209962Smm  {
2512209962Smm    rtx p = prev_nonnote_insn (i3);
2513209962Smm    if (p && p != i2 && GET_CODE (p) == INSN && newi2pat
2514209962Smm	&& sets_cc0_p (newi2pat))
2515209962Smm      {
2516209962Smm	undo_all ();
2517209962Smm	return 0;
2518209962Smm      }
2519209962Smm  }
2520209962Smm#endif
2521209962Smm
2522209962Smm  /* We now know that we can do this combination.  Merge the insns and
2523209962Smm     update the status of registers and LOG_LINKS.  */
2524209962Smm
2525209962Smm  {
2526209962Smm    rtx i3notes, i2notes, i1notes = 0;
2527209962Smm    rtx i3links, i2links, i1links = 0;
2528209962Smm    rtx midnotes = 0;
2529209962Smm    unsigned int regno;
2530209962Smm    /* Compute which registers we expect to eliminate.  newi2pat may be setting
2531209962Smm       either i3dest or i2dest, so we must check it.  Also, i1dest may be the
2532209962Smm       same as i3dest, in which case newi2pat may be setting i1dest.  */
2533209962Smm    rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2534209962Smm		   || i2dest_in_i2src || i2dest_in_i1src
2535209962Smm		   ? 0 : i2dest);
2536209962Smm    rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2537209962Smm		   || (newi2pat && reg_set_p (i1dest, newi2pat))
2538209962Smm		   ? 0 : i1dest);
2539219089Spjd
2540209962Smm    /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2541209962Smm       clear them.  */
2542209962Smm    i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2543209962Smm    i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2544209962Smm    if (i1)
2545209962Smm      i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2546209962Smm
2547209962Smm    /* Ensure that we do not have something that should not be shared but
2548209962Smm       occurs multiple times in the new insns.  Check this by first
2549209962Smm       resetting all the `used' flags and then copying anything is shared.  */
2550209962Smm
2551209962Smm    reset_used_flags (i3notes);
2552209962Smm    reset_used_flags (i2notes);
2553209962Smm    reset_used_flags (i1notes);
2554209962Smm    reset_used_flags (newpat);
2555209962Smm    reset_used_flags (newi2pat);
2556209962Smm    if (undobuf.other_insn)
2557209962Smm      reset_used_flags (PATTERN (undobuf.other_insn));
2558209962Smm
2559209962Smm    i3notes = copy_rtx_if_shared (i3notes);
2560209962Smm    i2notes = copy_rtx_if_shared (i2notes);
2561209962Smm    i1notes = copy_rtx_if_shared (i1notes);
2562209962Smm    newpat = copy_rtx_if_shared (newpat);
2563209962Smm    newi2pat = copy_rtx_if_shared (newi2pat);
2564209962Smm    if (undobuf.other_insn)
2565209962Smm      reset_used_flags (PATTERN (undobuf.other_insn));
2566209962Smm
2567209962Smm    INSN_CODE (i3) = insn_code_number;
2568209962Smm    PATTERN (i3) = newpat;
2569209962Smm
2570209962Smm    if (GET_CODE (i3) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (i3))
2571209962Smm      {
2572209962Smm	rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
2573209962Smm
2574209962Smm	reset_used_flags (call_usage);
2575209962Smm	call_usage = copy_rtx (call_usage);
2576209962Smm
2577209962Smm	if (substed_i2)
2578209962Smm	  replace_rtx (call_usage, i2dest, i2src);
2579209962Smm
2580209962Smm	if (substed_i1)
2581209962Smm	  replace_rtx (call_usage, i1dest, i1src);
2582209962Smm
2583209962Smm	CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
2584209962Smm      }
2585209962Smm
2586209962Smm    if (undobuf.other_insn)
2587209962Smm      INSN_CODE (undobuf.other_insn) = other_code_number;
2588209962Smm
2589209962Smm    /* We had one special case above where I2 had more than one set and
2590209962Smm       we replaced a destination of one of those sets with the destination
2591209962Smm       of I3.  In that case, we have to update LOG_LINKS of insns later
2592209962Smm       in this basic block.  Note that this (expensive) case is rare.
2593209962Smm
2594209962Smm       Also, in this case, we must pretend that all REG_NOTEs for I2
2595209962Smm       actually came from I3, so that REG_UNUSED notes from I2 will be
2596209962Smm       properly handled.  */
2597209962Smm
2598209962Smm    if (i3_subst_into_i2)
2599209962Smm      {
2600209962Smm	for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2601209962Smm	  if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != USE
2602209962Smm	      && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2603209962Smm	      && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2604209962Smm	      && ! find_reg_note (i2, REG_UNUSED,
2605209962Smm				  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2606209962Smm	    for (temp = NEXT_INSN (i2);
2607209962Smm		 temp && (this_basic_block == n_basic_blocks - 1
2608209962Smm			  || BLOCK_HEAD (this_basic_block) != temp);
2609228103Smm		 temp = NEXT_INSN (temp))
2610209962Smm	      if (temp != i3 && INSN_P (temp))
2611209962Smm		for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2612209962Smm		  if (XEXP (link, 0) == i2)
2613209962Smm		    XEXP (link, 0) = i3;
2614209962Smm
2615209962Smm	if (i3notes)
2616209962Smm	  {
2617209962Smm	    rtx link = i3notes;
2618209962Smm	    while (XEXP (link, 1))
2619209962Smm	      link = XEXP (link, 1);
2620209962Smm	    XEXP (link, 1) = i2notes;
2621209962Smm	  }
2622209962Smm	else
2623209962Smm	  i3notes = i2notes;
2624209962Smm	i2notes = 0;
2625209962Smm      }
2626209962Smm
2627209962Smm    LOG_LINKS (i3) = 0;
2628209962Smm    REG_NOTES (i3) = 0;
2629209962Smm    LOG_LINKS (i2) = 0;
2630209962Smm    REG_NOTES (i2) = 0;
2631209962Smm
2632209962Smm    if (newi2pat)
2633209962Smm      {
2634209962Smm	INSN_CODE (i2) = i2_code_number;
2635209962Smm	PATTERN (i2) = newi2pat;
2636209962Smm      }
2637209962Smm    else
2638209962Smm      {
2639209962Smm	PUT_CODE (i2, NOTE);
2640209962Smm	NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2641209962Smm	NOTE_SOURCE_FILE (i2) = 0;
2642209962Smm      }
2643209962Smm
2644209962Smm    if (i1)
2645209962Smm      {
2646209962Smm	LOG_LINKS (i1) = 0;
2647209962Smm	REG_NOTES (i1) = 0;
2648209962Smm	PUT_CODE (i1, NOTE);
2649209962Smm	NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2650209962Smm	NOTE_SOURCE_FILE (i1) = 0;
2651209962Smm      }
2652209962Smm
2653209962Smm    /* Get death notes for everything that is now used in either I3 or
2654209962Smm       I2 and used to die in a previous insn.  If we built two new
2655209962Smm       patterns, move from I1 to I2 then I2 to I3 so that we get the
2656209962Smm       proper movement on registers that I2 modifies.  */
2657209962Smm
2658209962Smm    if (newi2pat)
2659209962Smm      {
2660209962Smm	move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2661228103Smm	move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2662228103Smm      }
2663228103Smm    else
2664168404Spjd      move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2665228103Smm		   i3, &midnotes);
2666228103Smm
2667228103Smm    /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3.  */
2668168404Spjd    if (i3notes)
2669228103Smm      distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2670168404Spjd			elim_i2, elim_i1);
2671228103Smm    if (i2notes)
2672228103Smm      distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2673228103Smm			elim_i2, elim_i1);
2674228103Smm    if (i1notes)
2675228103Smm      distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2676228103Smm			elim_i2, elim_i1);
2677209962Smm    if (midnotes)
2678228103Smm      distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2679228103Smm			elim_i2, elim_i1);
2680228103Smm
2681228103Smm    /* Distribute any notes added to I2 or I3 by recog_for_combine.  We
2682228103Smm       know these are REG_UNUSED and want them to go to the desired insn,
2683228103Smm       so we always pass it as i3.  We have not counted the notes in
2684228103Smm       reg_n_deaths yet, so we need to do so now.  */
2685228103Smm
2686209962Smm    if (newi2pat && new_i2_notes)
2687228103Smm      {
2688228103Smm	for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2689228103Smm	  if (GET_CODE (XEXP (temp, 0)) == REG)
2690228103Smm	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2691228103Smm
2692228103Smm	distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2693209962Smm      }
2694209962Smm
2695168404Spjd    if (new_i3_notes)
2696228103Smm      {
2697228103Smm	for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2698168404Spjd	  if (GET_CODE (XEXP (temp, 0)) == REG)
2699228103Smm	    REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2700228103Smm
2701168404Spjd	distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2702228103Smm      }
2703185029Spjd
2704228103Smm    /* If I3DEST was used in I3SRC, it really died in I3.  We may need to
2705228103Smm       put a REG_DEAD note for it somewhere.  If NEWI2PAT exists and sets
2706209962Smm       I3DEST, the death must be somewhere before I2, not I3.  If we passed I3
2707228103Smm       in that case, it might delete I2.  Similarly for I2 and I1.
2708228103Smm       Show an additional death due to the REG_DEAD note we make here.  If
2709228103Smm       we discard it in distribute_notes, we will decrement it again.  */
2710228103Smm
2711168404Spjd    if (i3dest_killed)
2712228103Smm      {
2713168404Spjd	if (GET_CODE (i3dest_killed) == REG)
2714168404Spjd	  REG_N_DEATHS (REGNO (i3dest_killed))++;
2715168404Spjd
2716228103Smm	if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
2717168404Spjd	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2718228103Smm					       NULL_RTX),
2719228103Smm			    NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
2720168404Spjd	else
2721228103Smm	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2722228103Smm					       NULL_RTX),
2723168404Spjd			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2724228103Smm			    elim_i2, elim_i1);
2725228103Smm      }
2726228103Smm
2727228103Smm    if (i2dest_in_i2src)
2728228103Smm      {
2729228103Smm	if (GET_CODE (i2dest) == REG)
2730228103Smm	  REG_N_DEATHS (REGNO (i2dest))++;
2731168404Spjd
2732168404Spjd	if (newi2pat && reg_set_p (i2dest, newi2pat))
2733168404Spjd	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2734219089Spjd			    NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2735219089Spjd	else
2736219089Spjd	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2737219089Spjd			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2738219089Spjd			    NULL_RTX, NULL_RTX);
2739219089Spjd      }
2740219089Spjd
2741219089Spjd    if (i1dest_in_i1src)
2742219089Spjd      {
2743219089Spjd	if (GET_CODE (i1dest) == REG)
2744219089Spjd	  REG_N_DEATHS (REGNO (i1dest))++;
2745219089Spjd
2746219089Spjd	if (newi2pat && reg_set_p (i1dest, newi2pat))
2747219089Spjd	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2748219089Spjd			    NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2749219089Spjd	else
2750219089Spjd	  distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2751219089Spjd			    NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2752219089Spjd			    NULL_RTX, NULL_RTX);
2753219089Spjd      }
2754219089Spjd
2755168404Spjd    distribute_links (i3links);
2756228103Smm    distribute_links (i2links);
2757228103Smm    distribute_links (i1links);
2758168404Spjd
2759168404Spjd    if (GET_CODE (i2dest) == REG)
2760168404Spjd      {
2761168404Spjd	rtx link;
2762228103Smm	rtx i2_insn = 0, i2_val = 0, set;
2763168404Spjd
2764228103Smm	/* The insn that used to set this register doesn't exist, and
2765228103Smm	   this life of the register may not exist either.  See if one of
2766228103Smm	   I3's links points to an insn that sets I2DEST.  If it does,
2767168404Spjd	   that is now the last known value for I2DEST. If we don't update
2768228103Smm	   this and I2 set the register to a value that depended on its old
2769168404Spjd	   contents, we will get confused.  If this insn is used, thing
2770168404Spjd	   will be set correctly in combine_instructions.  */
2771168404Spjd
2772168404Spjd	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2773168404Spjd	  if ((set = single_set (XEXP (link, 0))) != 0
2774185029Spjd	      && rtx_equal_p (i2dest, SET_DEST (set)))
2775185029Spjd	    i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2776185029Spjd
2777185029Spjd	record_value_for_reg (i2dest, i2_insn, i2_val);
2778185029Spjd
2779185029Spjd	/* If the reg formerly set in I2 died only once and that was in I3,
2780168404Spjd	   zero its use count so it won't make `reload' do any work.  */
2781168404Spjd	if (! added_sets_2
2782185029Spjd	    && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2783185029Spjd	    && ! i2dest_in_i2src)
2784168404Spjd	  {
2785168404Spjd	    regno = REGNO (i2dest);
2786168404Spjd	    REG_N_SETS (regno)--;
2787168404Spjd	  }
2788168404Spjd      }
2789168404Spjd
2790219089Spjd    if (i1 && GET_CODE (i1dest) == REG)
2791168404Spjd      {
2792209962Smm	rtx link;
2793209962Smm	rtx i1_insn = 0, i1_val = 0, set;
2794168404Spjd
2795168404Spjd	for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2796168404Spjd	  if ((set = single_set (XEXP (link, 0))) != 0
2797168404Spjd	      && rtx_equal_p (i1dest, SET_DEST (set)))
2798168404Spjd	    i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2799168404Spjd
2800168404Spjd	record_value_for_reg (i1dest, i1_insn, i1_val);
2801168404Spjd
2802168404Spjd	regno = REGNO (i1dest);
2803168404Spjd	if (! added_sets_1 && ! i1dest_in_i1src)
2804168404Spjd	  REG_N_SETS (regno)--;
2805168404Spjd      }
2806168404Spjd
2807168404Spjd    /* Update reg_nonzero_bits et al for any changes that may have been made
2808168404Spjd       to this insn.  The order of set_nonzero_bits_and_sign_copies() is
2809168404Spjd       important.  Because newi2pat can affect nonzero_bits of newpat */
2810168404Spjd    if (newi2pat)
2811168404Spjd      note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
2812168404Spjd    note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
2813168404Spjd
2814168404Spjd    /* Set new_direct_jump_p if a new return or simple jump instruction
2815185029Spjd       has been created.
2816185029Spjd
2817185029Spjd       If I3 is now an unconditional jump, ensure that it has a
2818185029Spjd       BARRIER following it since it may have initially been a
2819185029Spjd       conditional jump.  It may also be the last nonnote insn.  */
2820185029Spjd
2821185029Spjd    if (GET_CODE (newpat) == RETURN || any_uncondjump_p (i3))
2822185029Spjd      {
2823185029Spjd	*new_direct_jump_p = 1;
2824185029Spjd
2825185029Spjd	if ((temp = next_nonnote_insn (i3)) == NULL_RTX
2826168404Spjd	    || GET_CODE (temp) != BARRIER)
2827168404Spjd	  emit_barrier_after (i3);
2828168404Spjd      }
2829185029Spjd    /* An NOOP jump does not need barrier, but it does need cleaning up
2830168404Spjd       of CFG.  */
2831168404Spjd    if (GET_CODE (newpat) == SET
2832168404Spjd	&& SET_SRC (newpat) == pc_rtx
2833219089Spjd	&& SET_DEST (newpat) == pc_rtx)
2834219089Spjd      *new_direct_jump_p = 1;
2835219089Spjd  }
2836219089Spjd
2837168404Spjd  combine_successes++;
2838219089Spjd  undo_commit ();
2839168404Spjd
2840168404Spjd  /* Clear this here, so that subsequent get_last_value calls are not
2841168404Spjd     affected.  */
2842168404Spjd  subst_prev_insn = NULL_RTX;
2843168404Spjd
2844168404Spjd  if (added_links_insn
2845168404Spjd      && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2846168404Spjd      && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2847168404Spjd    return added_links_insn;
2848168404Spjd  else
2849168404Spjd    return newi2pat ? i2 : i3;
2850168404Spjd}
2851168404Spjd
2852168404Spjd/* Undo all the modifications recorded in undobuf.  */
2853168404Spjd
2854185029Spjdstatic void
2855185029Spjdundo_all ()
2856168404Spjd{
2857168404Spjd  struct undo *undo, *next;
2858168404Spjd
2859168404Spjd  for (undo = undobuf.undos; undo; undo = next)
2860185029Spjd    {
2861185029Spjd      next = undo->next;
2862185029Spjd      if (undo->is_int)
2863185029Spjd	*undo->where.i = undo->old_contents.i;
2864185029Spjd      else
2865185029Spjd	*undo->where.r = undo->old_contents.r;
2866185029Spjd
2867185029Spjd      undo->next = undobuf.frees;
2868185029Spjd      undobuf.frees = undo;
2869185029Spjd    }
2870185029Spjd
2871185029Spjd  undobuf.undos = 0;
2872185029Spjd
2873185029Spjd  /* Clear this here, so that subsequent get_last_value calls are not
2874185029Spjd     affected.  */
2875185029Spjd  subst_prev_insn = NULL_RTX;
2876185029Spjd}
2877185029Spjd
2878185029Spjd/* We've committed to accepting the changes we made.  Move all
2879185029Spjd   of the undos to the free list.  */
2880185029Spjd
2881185029Spjdstatic void
2882185029Spjdundo_commit ()
2883185029Spjd{
2884185029Spjd  struct undo *undo, *next;
2885185029Spjd
2886185029Spjd  for (undo = undobuf.undos; undo; undo = next)
2887185029Spjd    {
2888185029Spjd      next = undo->next;
2889185029Spjd      undo->next = undobuf.frees;
2890185029Spjd      undobuf.frees = undo;
2891185029Spjd    }
2892185029Spjd  undobuf.undos = 0;
2893185029Spjd}
2894185029Spjd
2895185029Spjd
2896185029Spjd/* Find the innermost point within the rtx at LOC, possibly LOC itself,
2897185029Spjd   where we have an arithmetic expression and return that point.  LOC will
2898185029Spjd   be inside INSN.
2899185029Spjd
2900185029Spjd   try_combine will call this function to see if an insn can be split into
2901185029Spjd   two insns.  */
2902185029Spjd
2903185029Spjdstatic rtx *
2904185029Spjdfind_split_point (loc, insn)
2905185029Spjd     rtx *loc;
2906185029Spjd     rtx insn;
2907185029Spjd{
2908185029Spjd  rtx x = *loc;
2909185029Spjd  enum rtx_code code = GET_CODE (x);
2910185029Spjd  rtx *split;
2911185029Spjd  unsigned HOST_WIDE_INT len = 0;
2912185029Spjd  HOST_WIDE_INT pos = 0;
2913185029Spjd  int unsignedp = 0;
2914185029Spjd  rtx inner = NULL_RTX;
2915185029Spjd
2916185029Spjd  /* First special-case some codes.  */
2917185029Spjd  switch (code)
2918185029Spjd    {
2919185029Spjd    case SUBREG:
2920185029Spjd#ifdef INSN_SCHEDULING
2921185029Spjd      /* If we are making a paradoxical SUBREG invalid, it becomes a split
2922185029Spjd	 point.  */
2923185029Spjd      if (GET_CODE (SUBREG_REG (x)) == MEM)
2924185029Spjd	return loc;
2925185029Spjd#endif
2926185029Spjd      return find_split_point (&SUBREG_REG (x), insn);
2927185029Spjd
2928185029Spjd    case MEM:
2929185029Spjd#ifdef HAVE_lo_sum
2930185029Spjd      /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2931185029Spjd	 using LO_SUM and HIGH.  */
2932185029Spjd      if (GET_CODE (XEXP (x, 0)) == CONST
2933185029Spjd	  || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2934185029Spjd	{
2935185029Spjd	  SUBST (XEXP (x, 0),
2936185029Spjd		 gen_rtx_LO_SUM (Pmode,
2937185029Spjd				 gen_rtx_HIGH (Pmode, XEXP (x, 0)),
2938185029Spjd				 XEXP (x, 0)));
2939185029Spjd	  return &XEXP (XEXP (x, 0), 0);
2940185029Spjd	}
2941185029Spjd#endif
2942185029Spjd
2943185029Spjd      /* If we have a PLUS whose second operand is a constant and the
2944185029Spjd	 address is not valid, perhaps will can split it up using
2945185029Spjd	 the machine-specific way to split large constants.  We use
2946185029Spjd	 the first pseudo-reg (one of the virtual regs) as a placeholder;
2947185029Spjd	 it will not remain in the result.  */
2948185029Spjd      if (GET_CODE (XEXP (x, 0)) == PLUS
2949185029Spjd	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2950185029Spjd	  && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2951185029Spjd	{
2952185029Spjd	  rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2953185029Spjd	  rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
2954185029Spjd				 subst_insn);
2955185029Spjd
2956185029Spjd	  /* This should have produced two insns, each of which sets our
2957185029Spjd	     placeholder.  If the source of the second is a valid address,
2958185029Spjd	     we can make put both sources together and make a split point
2959185029Spjd	     in the middle.  */
2960185029Spjd
2961185029Spjd	  if (seq && XVECLEN (seq, 0) == 2
2962185029Spjd	      && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2963185029Spjd	      && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2964185029Spjd	      && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2965185029Spjd	      && ! reg_mentioned_p (reg,
2966185029Spjd				    SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2967185029Spjd	      && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2968219089Spjd	      && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2969185029Spjd	      && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2970185029Spjd	      && memory_address_p (GET_MODE (x),
2971185029Spjd				   SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2972185029Spjd	    {
2973185029Spjd	      rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2974185029Spjd	      rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2975185029Spjd
2976185029Spjd	      /* Replace the placeholder in SRC2 with SRC1.  If we can
2977185029Spjd		 find where in SRC2 it was placed, that can become our
2978185029Spjd		 split point and we can replace this address with SRC2.
2979185029Spjd		 Just try two obvious places.  */
2980185029Spjd
2981185029Spjd	      src2 = replace_rtx (src2, reg, src1);
2982168404Spjd	      split = 0;
2983168404Spjd	      if (XEXP (src2, 0) == src1)
2984168404Spjd		split = &XEXP (src2, 0);
2985168404Spjd	      else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2986168404Spjd		       && XEXP (XEXP (src2, 0), 0) == src1)
2987168404Spjd		split = &XEXP (XEXP (src2, 0), 0);
2988168404Spjd
2989168404Spjd	      if (split)
2990168404Spjd		{
2991168404Spjd		  SUBST (XEXP (x, 0), src2);
2992168404Spjd		  return split;
2993248571Smm		}
2994168404Spjd	    }
2995168404Spjd
2996168404Spjd	  /* If that didn't work, perhaps the first operand is complex and
2997168404Spjd	     needs to be computed separately, so make a split point there.
2998168404Spjd	     This will occur on machines that just support REG + CONST
2999185029Spjd	     and have a constant moved through some previous computation.  */
3000168404Spjd
3001168404Spjd	  else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
3002168404Spjd		   && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
3003185029Spjd			 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
3004168404Spjd			     == 'o')))
3005168404Spjd	    return &XEXP (XEXP (x, 0), 0);
3006168404Spjd	}
3007168404Spjd      break;
3008168404Spjd
3009168404Spjd    case SET:
3010168404Spjd#ifdef HAVE_cc0
3011168404Spjd      /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
3012168404Spjd	 ZERO_EXTRACT, the most likely reason why this doesn't match is that
3013248571Smm	 we need to put the operand into a register.  So split at that
3014168404Spjd	 point.  */
3015168404Spjd
3016168404Spjd      if (SET_DEST (x) == cc0_rtx
3017168404Spjd	  && GET_CODE (SET_SRC (x)) != COMPARE
3018168404Spjd	  && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
3019168404Spjd	  && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
3020248571Smm	  && ! (GET_CODE (SET_SRC (x)) == SUBREG
3021168404Spjd		&& GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
3022248571Smm	return &SET_SRC (x);
3023168404Spjd#endif
3024185029Spjd
3025168404Spjd      /* See if we can split SET_SRC as it stands.  */
3026168404Spjd      split = find_split_point (&SET_SRC (x), insn);
3027168404Spjd      if (split && split != &SET_SRC (x))
3028168404Spjd	return split;
3029168404Spjd
3030168404Spjd      /* See if we can split SET_DEST as it stands.  */
3031168404Spjd      split = find_split_point (&SET_DEST (x), insn);
3032168404Spjd      if (split && split != &SET_DEST (x))
3033168404Spjd	return split;
3034168404Spjd
3035168404Spjd      /* See if this is a bitfield assignment with everything constant.  If
3036168404Spjd	 so, this is an IOR of an AND, so split it into that.  */
3037168404Spjd      if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
3038168404Spjd	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
3039168404Spjd	      <= HOST_BITS_PER_WIDE_INT)
3040168404Spjd	  && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
3041168404Spjd	  && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
3042168404Spjd	  && GET_CODE (SET_SRC (x)) == CONST_INT
3043168404Spjd	  && ((INTVAL (XEXP (SET_DEST (x), 1))
3044168404Spjd	       + INTVAL (XEXP (SET_DEST (x), 2)))
3045168404Spjd	      <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
3046168404Spjd	  && ! side_effects_p (XEXP (SET_DEST (x), 0)))
3047168404Spjd	{
3048168404Spjd	  HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
3049168404Spjd	  unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
3050168404Spjd	  unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
3051168404Spjd	  rtx dest = XEXP (SET_DEST (x), 0);
3052168404Spjd	  enum machine_mode mode = GET_MODE (dest);
3053168404Spjd	  unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
3054168404Spjd
3055168404Spjd	  if (BITS_BIG_ENDIAN)
3056168404Spjd	    pos = GET_MODE_BITSIZE (mode) - len - pos;
3057168404Spjd
3058168404Spjd	  if (src == mask)
3059168404Spjd	    SUBST (SET_SRC (x),
3060168404Spjd		   gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
3061168404Spjd	  else
3062168404Spjd	    SUBST (SET_SRC (x),
3063168404Spjd		   gen_binary (IOR, mode,
3064168404Spjd			       gen_binary (AND, mode, dest,
3065168404Spjd					   GEN_INT
3066168404Spjd					   (
3067168404Spjd					    trunc_int_for_mode
3068168404Spjd					    (~(mask << pos)
3069168404Spjd					     & GET_MODE_MASK (mode), mode))),
3070168404Spjd			       GEN_INT (src << pos)));
3071168404Spjd
3072168404Spjd	  SUBST (SET_DEST (x), dest);
3073168404Spjd
3074248571Smm	  split = find_split_point (&SET_SRC (x), insn);
3075248571Smm	  if (split && split != &SET_SRC (x))
3076168404Spjd	    return split;
3077168404Spjd	}
3078168404Spjd
3079168404Spjd      /* Otherwise, see if this is an operation that we can split into two.
3080168404Spjd	 If so, try to split that.  */
3081168404Spjd      code = GET_CODE (SET_SRC (x));
3082168404Spjd
3083168404Spjd      switch (code)
3084168404Spjd	{
3085168404Spjd	case AND:
3086168404Spjd	  /* If we are AND'ing with a large constant that is only a single
3087168404Spjd	     bit and the result is only being used in a context where we
3088168404Spjd	     need to know if it is zero or non-zero, replace it with a bit
3089168404Spjd	     extraction.  This will avoid the large constant, which might
3090168404Spjd	     have taken more than one insn to make.  If the constant were
3091168404Spjd	     not a valid argument to the AND but took only one insn to make,
3092168404Spjd	     this is no worse, but if it took more than one insn, it will
3093168404Spjd	     be better.  */
3094168404Spjd
3095168404Spjd	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3096168404Spjd	      && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
3097168404Spjd	      && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3098168404Spjd	      && GET_CODE (SET_DEST (x)) == REG
3099168404Spjd	      && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
3100168404Spjd	      && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3101168404Spjd	      && XEXP (*split, 0) == SET_DEST (x)
3102168404Spjd	      && XEXP (*split, 1) == const0_rtx)
3103185029Spjd	    {
3104185029Spjd	      rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3105185029Spjd						XEXP (SET_SRC (x), 0),
3106185029Spjd						pos, NULL_RTX, 1, 1, 0, 0);
3107185029Spjd	      if (extraction != 0)
3108168404Spjd		{
3109168404Spjd		  SUBST (SET_SRC (x), extraction);
3110168404Spjd		  return find_split_point (loc, insn);
3111168404Spjd		}
3112168404Spjd	    }
3113168404Spjd	  break;
3114168404Spjd
3115168404Spjd	case NE:
3116168404Spjd	  /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3117168404Spjd	     is known to be on, this can be converted into a NEG of a shift.  */
3118168404Spjd	  if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3119168404Spjd	      && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
3120168404Spjd	      && 1 <= (pos = exact_log2
3121168404Spjd		       (nonzero_bits (XEXP (SET_SRC (x), 0),
3122168404Spjd				      GET_MODE (XEXP (SET_SRC (x), 0))))))
3123168404Spjd	    {
3124168404Spjd	      enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3125168404Spjd
3126168404Spjd	      SUBST (SET_SRC (x),
3127168404Spjd		     gen_rtx_NEG (mode,
3128238422Smm				  gen_rtx_LSHIFTRT (mode,
3129238422Smm						    XEXP (SET_SRC (x), 0),
3130168404Spjd						    GEN_INT (pos))));
3131168404Spjd
3132219089Spjd	      split = find_split_point (&SET_SRC (x), insn);
3133168404Spjd	      if (split && split != &SET_SRC (x))
3134168404Spjd		return split;
3135168404Spjd	    }
3136168404Spjd	  break;
3137168404Spjd
3138168404Spjd	case SIGN_EXTEND:
3139168404Spjd	  inner = XEXP (SET_SRC (x), 0);
3140168404Spjd
3141168404Spjd	  /* We can't optimize if either mode is a partial integer
3142168404Spjd	     mode as we don't know how many bits are significant
3143168404Spjd	     in those modes.  */
3144219089Spjd	  if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3145238422Smm	      || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3146238422Smm	    break;
3147168404Spjd
3148168404Spjd	  pos = 0;
3149168404Spjd	  len = GET_MODE_BITSIZE (GET_MODE (inner));
3150168404Spjd	  unsignedp = 0;
3151168404Spjd	  break;
3152168404Spjd
3153168404Spjd	case SIGN_EXTRACT:
3154168404Spjd	case ZERO_EXTRACT:
3155168404Spjd	  if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3156168404Spjd	      && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3157168404Spjd	    {
3158228103Smm	      inner = XEXP (SET_SRC (x), 0);
3159228103Smm	      len = INTVAL (XEXP (SET_SRC (x), 1));
3160168404Spjd	      pos = INTVAL (XEXP (SET_SRC (x), 2));
3161168404Spjd
3162168404Spjd	      if (BITS_BIG_ENDIAN)
3163219089Spjd		pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
3164168404Spjd	      unsignedp = (code == ZERO_EXTRACT);
3165168404Spjd	    }
3166168404Spjd	  break;
3167219089Spjd
3168168404Spjd	default:
3169228103Smm	  break;
3170228103Smm	}
3171168404Spjd
3172251646Sdelphij      if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3173228103Smm	{
3174168404Spjd	  enum machine_mode mode = GET_MODE (SET_SRC (x));
3175228103Smm
3176228103Smm	  /* For unsigned, we have a choice of a shift followed by an
3177168404Spjd	     AND or two shifts.  Use two shifts for field sizes where the
3178168404Spjd	     constant might be too large.  We assume here that we can
3179168404Spjd	     always at least get 8-bit constants in an AND insn, which is
3180168404Spjd	     true for every current RISC.  */
3181168404Spjd
3182168404Spjd	  if (unsignedp && len <= 8)
3183168404Spjd	    {
3184219089Spjd	      SUBST (SET_SRC (x),
3185168404Spjd		     gen_rtx_AND (mode,
3186168404Spjd				  gen_rtx_LSHIFTRT
3187168404Spjd				  (mode, gen_lowpart_for_combine (mode, inner),
3188168404Spjd				   GEN_INT (pos)),
3189168404Spjd				  GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
3190228103Smm
3191228103Smm	      split = find_split_point (&SET_SRC (x), insn);
3192168404Spjd	      if (split && split != &SET_SRC (x))
3193251646Sdelphij		return split;
3194228103Smm	    }
3195168404Spjd	  else
3196228103Smm	    {
3197228103Smm	      SUBST (SET_SRC (x),
3198248571Smm		     gen_rtx_fmt_ee
3199168404Spjd		     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
3200228103Smm		      gen_rtx_ASHIFT (mode,
3201228103Smm				      gen_lowpart_for_combine (mode, inner),
3202228103Smm				      GEN_INT (GET_MODE_BITSIZE (mode)
3203168404Spjd					       - len - pos)),
3204228103Smm		      GEN_INT (GET_MODE_BITSIZE (mode) - len)));
3205248571Smm
3206228103Smm	      split = find_split_point (&SET_SRC (x), insn);
3207228103Smm	      if (split && split != &SET_SRC (x))
3208248571Smm		return split;
3209228103Smm	    }
3210228103Smm	}
3211248571Smm
3212228103Smm      /* See if this is a simple operation with a constant as the second
3213248571Smm	 operand.  It might be that this constant is out of range and hence
3214168404Spjd	 could be used as a split point.  */
3215248571Smm      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3216248571Smm	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3217248571Smm	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
3218251646Sdelphij	  && CONSTANT_P (XEXP (SET_SRC (x), 1))
3219168404Spjd	  && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
3220248571Smm	      || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3221248571Smm		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
3222168404Spjd		      == 'o'))))
3223248571Smm	return &XEXP (SET_SRC (x), 1);
3224248571Smm
3225248571Smm      /* Finally, see if this is a simple operation with its first operand
3226248571Smm	 not in a register.  The operation might require this operand in a
3227248571Smm	 register, so return it as a split point.  We can always do this
3228248571Smm	 because if the first operand were another operation, we would have
3229248571Smm	 already found it as a split point.  */
3230248571Smm      if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3231168404Spjd	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3232248571Smm	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
3233168404Spjd	   || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
3234248571Smm	  && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3235248571Smm	return &XEXP (SET_SRC (x), 0);
3236248571Smm
3237248571Smm      return 0;
3238168404Spjd
3239248571Smm    case AND:
3240248571Smm    case IOR:
3241168404Spjd      /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3242168404Spjd	 it is better to write this as (not (ior A B)) so we can split it.
3243168404Spjd	 Similarly for IOR.  */
3244248571Smm      if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3245168404Spjd	{
3246168404Spjd	  SUBST (*loc,
3247168404Spjd		 gen_rtx_NOT (GET_MODE (x),
3248168404Spjd			      gen_rtx_fmt_ee (code == IOR ? AND : IOR,
3249168404Spjd					      GET_MODE (x),
3250168404Spjd					      XEXP (XEXP (x, 0), 0),
3251168404Spjd					      XEXP (XEXP (x, 1), 0))));
3252168404Spjd	  return find_split_point (loc, insn);
3253168404Spjd	}
3254168404Spjd
3255168404Spjd      /* Many RISC machines have a large set of logical insns.  If the
3256168404Spjd	 second operand is a NOT, put it first so we will try to split the
3257168404Spjd	 other operand first.  */
3258168404Spjd      if (GET_CODE (XEXP (x, 1)) == NOT)
3259168404Spjd	{
3260168404Spjd	  rtx tem = XEXP (x, 0);
3261168404Spjd	  SUBST (XEXP (x, 0), XEXP (x, 1));
3262168404Spjd	  SUBST (XEXP (x, 1), tem);
3263168404Spjd	}
3264228103Smm      break;
3265185029Spjd
3266168404Spjd    default:
3267168404Spjd      break;
3268168404Spjd    }
3269185029Spjd
3270168404Spjd  /* Otherwise, select our actions depending on our rtx class.  */
3271168404Spjd  switch (GET_RTX_CLASS (code))
3272168404Spjd    {
3273168404Spjd    case 'b':			/* This is ZERO_EXTRACT and SIGN_EXTRACT.  */
3274168404Spjd    case '3':
3275168404Spjd      split = find_split_point (&XEXP (x, 2), insn);
3276168404Spjd      if (split)
3277248571Smm	return split;
3278248571Smm      /* ... fall through ...  */
3279248571Smm    case '2':
3280248571Smm    case 'c':
3281248571Smm    case '<':
3282248571Smm      split = find_split_point (&XEXP (x, 1), insn);
3283185029Spjd      if (split)
3284185029Spjd	return split;
3285168404Spjd      /* ... fall through ...  */
3286168404Spjd    case '1':
3287168404Spjd      /* Some machines have (and (shift ...) ...) insns.  If X is not
3288248571Smm	 an AND, but XEXP (X, 0) is, use it as our split point.  */
3289248571Smm      if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3290168404Spjd	return &XEXP (x, 0);
3291168404Spjd
3292168404Spjd      split = find_split_point (&XEXP (x, 0), insn);
3293168404Spjd      if (split)
3294168404Spjd	return split;
3295168404Spjd      return loc;
3296168404Spjd    }
3297168404Spjd
3298168404Spjd  /* Otherwise, we don't have a split point.  */
3299168404Spjd  return 0;
3300168404Spjd}
3301168404Spjd
3302168404Spjd/* Throughout X, replace FROM with TO, and return the result.
3303168404Spjd   The result is TO if X is FROM;
3304168404Spjd   otherwise the result is X, but its contents may have been modified.
3305168404Spjd   If they were modified, a record was made in undobuf so that
3306168404Spjd   undo_all will (among other things) return X to its original state.
3307168404Spjd
3308168404Spjd   If the number of changes necessary is too much to record to undo,
3309168404Spjd   the excess changes are not made, so the result is invalid.
3310168404Spjd   The changes already made can still be undone.
3311168404Spjd   undobuf.num_undo is incremented for such changes, so by testing that
3312168404Spjd   the caller can tell whether the result is valid.
3313168404Spjd
3314168404Spjd   `n_occurrences' is incremented each time FROM is replaced.
3315168404Spjd
3316168404Spjd   IN_DEST is non-zero if we are processing the SET_DEST of a SET.
3317168404Spjd
3318168404Spjd   UNIQUE_COPY is non-zero if each substitution must be unique.  We do this
3319168404Spjd   by copying if `n_occurrences' is non-zero.  */
3320168404Spjd
3321168404Spjdstatic rtx
3322168404Spjdsubst (x, from, to, in_dest, unique_copy)
3323168404Spjd     rtx x, from, to;
3324168404Spjd     int in_dest;
3325168404Spjd     int unique_copy;
3326168404Spjd{
3327168404Spjd  enum rtx_code code = GET_CODE (x);
3328168404Spjd  enum machine_mode op0_mode = VOIDmode;
3329168404Spjd  const char *fmt;
3330168404Spjd  int len, i;
3331168404Spjd  rtx new;
3332168404Spjd
3333168404Spjd/* Two expressions are equal if they are identical copies of a shared
3334168404Spjd   RTX or if they are both registers with the same register number
3335168404Spjd   and mode.  */
3336168404Spjd
3337168404Spjd#define COMBINE_RTX_EQUAL_P(X,Y)			\
3338168404Spjd  ((X) == (Y)						\
3339168404Spjd   || (GET_CODE (X) == REG && GET_CODE (Y) == REG	\
3340168404Spjd       && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3341168404Spjd
3342168404Spjd  if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3343168404Spjd    {
3344185029Spjd      n_occurrences++;
3345168404Spjd      return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3346168404Spjd    }
3347168404Spjd
3348168404Spjd  /* If X and FROM are the same register but different modes, they will
3349168404Spjd     not have been seen as equal above.  However, flow.c will make a
3350168404Spjd     LOG_LINKS entry for that case.  If we do nothing, we will try to
3351185029Spjd     rerecognize our original insn and, when it succeeds, we will
3352168404Spjd     delete the feeding insn, which is incorrect.
3353168404Spjd
3354185029Spjd     So force this insn not to match in this (rare) case.  */
3355168404Spjd  if (! in_dest && code == REG && GET_CODE (from) == REG
3356168404Spjd      && REGNO (x) == REGNO (from))
3357168404Spjd    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
3358168404Spjd
3359168404Spjd  /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3360168404Spjd     of which may contain things that can be combined.  */
3361219089Spjd  if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3362168404Spjd    return x;
3363219089Spjd
3364219089Spjd  /* It is possible to have a subexpression appear twice in the insn.
3365168404Spjd     Suppose that FROM is a register that appears within TO.
3366168404Spjd     Then, after that subexpression has been scanned once by `subst',
3367168404Spjd     the second time it is scanned, TO may be found.  If we were
3368168404Spjd     to scan TO here, we would find FROM within it and create a
3369168404Spjd     self-referent rtl structure which is completely wrong.  */
3370168404Spjd  if (COMBINE_RTX_EQUAL_P (x, to))
3371168404Spjd    return to;
3372168404Spjd
3373168404Spjd  /* Parallel asm_operands need special attention because all of the
3374248571Smm     inputs are shared across the arms.  Furthermore, unsharing the
3375248571Smm     rtl results in recognition failures.  Failure to handle this case
3376248571Smm     specially can result in circular rtl.
3377248571Smm
3378248571Smm     Solve this by doing a normal pass across the first entry of the
3379248571Smm     parallel, and only processing the SET_DESTs of the subsequent
3380248571Smm     entries.  Ug.  */
3381248571Smm
3382248571Smm  if (code == PARALLEL
3383248571Smm      && GET_CODE (XVECEXP (x, 0, 0)) == SET
3384248571Smm      && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
3385248571Smm    {
3386253819Sdelphij      new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3387253819Sdelphij
3388253819Sdelphij      /* If this substitution failed, this whole thing fails.  */
3389248571Smm      if (GET_CODE (new) == CLOBBER
3390253819Sdelphij	  && XEXP (new, 0) == const0_rtx)
3391248571Smm	return new;
3392253819Sdelphij
3393253819Sdelphij      SUBST (XVECEXP (x, 0, 0), new);
3394248571Smm
3395253819Sdelphij      for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
3396248571Smm	{
3397248571Smm	  rtx dest = SET_DEST (XVECEXP (x, 0, i));
3398248571Smm
3399168404Spjd	  if (GET_CODE (dest) != REG
3400248571Smm	      && GET_CODE (dest) != CC0
3401248571Smm	      && GET_CODE (dest) != PC)
3402168404Spjd	    {
3403168404Spjd	      new = subst (dest, from, to, 0, unique_copy);
3404248571Smm
3405168404Spjd	      /* If this substitution failed, this whole thing fails.  */
3406168404Spjd	      if (GET_CODE (new) == CLOBBER
3407168404Spjd		  && XEXP (new, 0) == const0_rtx)
3408248571Smm		return new;
3409248571Smm
3410168404Spjd	      SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
3411168404Spjd	    }
3412248571Smm	}
3413168404Spjd    }
3414248571Smm  else
3415248571Smm    {
3416248571Smm      len = GET_RTX_LENGTH (code);
3417168404Spjd      fmt = GET_RTX_FORMAT (code);
3418248571Smm
3419248571Smm      /* We don't need to process a SET_DEST that is a register, CC0,
3420248571Smm	 or PC, so set up to skip this common case.  All other cases
3421248571Smm	 where we want to suppress replacing something inside a
3422248571Smm	 SET_SRC are handled via the IN_DEST operand.  */
3423248571Smm      if (code == SET
3424248571Smm	  && (GET_CODE (SET_DEST (x)) == REG
3425248571Smm	      || GET_CODE (SET_DEST (x)) == CC0
3426248571Smm	      || GET_CODE (SET_DEST (x)) == PC))
3427185029Spjd	fmt = "ie";
3428248571Smm
3429248571Smm      /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3430248571Smm	 constant.  */
3431248571Smm      if (fmt[0] == 'e')
3432248571Smm	op0_mode = GET_MODE (XEXP (x, 0));
3433248571Smm
3434248571Smm      for (i = 0; i < len; i++)
3435248571Smm	{
3436248571Smm	  if (fmt[i] == 'E')
3437248571Smm	    {
3438248571Smm	      int j;
3439248571Smm	      for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3440248571Smm		{
3441248571Smm		  if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3442248571Smm		    {
3443248571Smm		      new = (unique_copy && n_occurrences
3444248571Smm			     ? copy_rtx (to) : to);
3445248571Smm		      n_occurrences++;
3446248571Smm		    }
3447185029Spjd		  else
3448248571Smm		    {
3449248571Smm		      new = subst (XVECEXP (x, i, j), from, to, 0,
3450248571Smm				   unique_copy);
3451248571Smm
3452248571Smm		      /* If this substitution failed, this whole thing
3453248571Smm			 fails.  */
3454248571Smm		      if (GET_CODE (new) == CLOBBER
3455185029Spjd			  && XEXP (new, 0) == const0_rtx)
3456248571Smm			return new;
3457248571Smm		    }
3458248571Smm
3459248571Smm		  SUBST (XVECEXP (x, i, j), new);
3460248571Smm		}
3461185029Spjd	    }
3462185029Spjd	  else if (fmt[i] == 'e')
3463248571Smm	    {
3464248571Smm	      /* If this is a register being set, ignore it.  */
3465248571Smm	      new = XEXP (x, i);
3466248571Smm	      if (in_dest
3467168404Spjd		  && (code == SUBREG || code == STRICT_LOW_PART
3468248571Smm		      || code == ZERO_EXTRACT)
3469248571Smm		  && i == 0
3470248571Smm		  && GET_CODE (new) == REG)
3471248571Smm		;
3472248571Smm
3473248571Smm	      else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3474248571Smm		{
3475248571Smm		  /* In general, don't install a subreg involving two
3476248571Smm		     modes not tieable.  It can worsen register
3477248571Smm		     allocation, and can even make invalid reload
3478248571Smm		     insns, since the reg inside may need to be copied
3479248571Smm		     from in the outside mode, and that may be invalid
3480248571Smm		     if it is an fp reg copied in integer mode.
3481248571Smm
3482248571Smm		     We allow two exceptions to this: It is valid if
3483248571Smm		     it is inside another SUBREG and the mode of that
3484248571Smm		     SUBREG and the mode of the inside of TO is
3485248571Smm		     tieable and it is valid if X is a SET that copies
3486248571Smm		     FROM to CC0.  */
3487248571Smm
3488248571Smm		  if (GET_CODE (to) == SUBREG
3489248571Smm		      && ! MODES_TIEABLE_P (GET_MODE (to),
3490248571Smm					    GET_MODE (SUBREG_REG (to)))
3491168404Spjd		      && ! (code == SUBREG
3492168404Spjd			    && MODES_TIEABLE_P (GET_MODE (x),
3493168404Spjd						GET_MODE (SUBREG_REG (to))))
3494168404Spjd#ifdef HAVE_cc0
3495248571Smm		      && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3496248571Smm#endif
3497248571Smm		      )
3498248571Smm		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3499248571Smm
3500168404Spjd#ifdef CLASS_CANNOT_CHANGE_MODE
3501168404Spjd		  if (code == SUBREG
3502248571Smm		      && GET_CODE (to) == REG
3503248571Smm		      && REGNO (to) < FIRST_PSEUDO_REGISTER
3504168404Spjd		      && (TEST_HARD_REG_BIT
3505168404Spjd			  (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
3506168404Spjd			   REGNO (to)))
3507168404Spjd		      && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (to),
3508168404Spjd						     GET_MODE (x)))
3509168404Spjd		    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3510168404Spjd#endif
3511168404Spjd
3512168404Spjd		  new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3513168404Spjd		  n_occurrences++;
3514168404Spjd		}
3515168404Spjd	      else
3516168404Spjd		/* If we are in a SET_DEST, suppress most cases unless we
3517185029Spjd		   have gone inside a MEM, in which case we want to
3518168404Spjd		   simplify the address.  We assume here that things that
3519185029Spjd		   are actually part of the destination have their inner
3520168404Spjd		   parts in the first expression.  This is true for SUBREG,
3521168404Spjd		   STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3522168404Spjd		   things aside from REG and MEM that should appear in a
3523168404Spjd		   SET_DEST.  */
3524168404Spjd		new = subst (XEXP (x, i), from, to,
3525168404Spjd			     (((in_dest
3526168404Spjd				&& (code == SUBREG || code == STRICT_LOW_PART
3527168404Spjd				    || code == ZERO_EXTRACT))
3528168404Spjd			       || code == SET)
3529168404Spjd			      && i == 0), unique_copy);
3530168404Spjd
3531168404Spjd	      /* If we found that we will have to reject this combination,
3532168404Spjd		 indicate that by returning the CLOBBER ourselves, rather than
3533168404Spjd		 an expression containing it.  This will speed things up as
3534185029Spjd		 well as prevent accidents where two CLOBBERs are considered
3535185029Spjd		 to be equal, thus producing an incorrect simplification.  */
3536168404Spjd
3537168404Spjd	      if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3538219089Spjd		return new;
3539168404Spjd
3540168404Spjd	      if (GET_CODE (new) == CONST_INT && GET_CODE (x) == SUBREG)
3541185029Spjd		{
3542185029Spjd		  enum machine_mode mode = GET_MODE (x);
3543185029Spjd		  x = simplify_subreg (mode, new,
3544185029Spjd				       GET_MODE (SUBREG_REG (x)),
3545185029Spjd				       SUBREG_BYTE (x));
3546185029Spjd		  if (! x)
3547185029Spjd		    x = gen_rtx_CLOBBER (mode, const0_rtx);
3548185029Spjd		}
3549185029Spjd	      else if (GET_CODE (new) == CONST_INT
3550185029Spjd		       && GET_CODE (x) == ZERO_EXTEND)
3551219089Spjd		{
3552185029Spjd		  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
3553168404Spjd						new, GET_MODE (XEXP (x, 0)));
3554185029Spjd		  if (! x)
3555185029Spjd		    abort ();
3556185029Spjd		}
3557168404Spjd	      else
3558168404Spjd		SUBST (XEXP (x, i), new);
3559168404Spjd	    }
3560168404Spjd	}
3561168404Spjd    }
3562168404Spjd
3563168404Spjd  /* Try to simplify X.  If the simplification changed the code, it is likely
3564168404Spjd     that further simplification will help, so loop, but limit the number
3565168404Spjd     of repetitions that will be performed.  */
3566168404Spjd
3567168404Spjd  for (i = 0; i < 4; i++)
3568168404Spjd    {
3569168404Spjd      /* If X is sufficiently simple, don't bother trying to do anything
3570168404Spjd	 with it.  */
3571185029Spjd      if (code != CONST_INT && code != REG && code != CLOBBER)
3572168404Spjd	x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest);
3573168404Spjd
3574185029Spjd      if (GET_CODE (x) == code)
3575185029Spjd	break;
3576185029Spjd
3577185029Spjd      code = GET_CODE (x);
3578168404Spjd
3579185029Spjd      /* We no longer know the original mode of operand 0 since we
3580185029Spjd	 have changed the form of X)  */
3581168404Spjd      op0_mode = VOIDmode;
3582168404Spjd    }
3583239774Smm
3584168404Spjd  return x;
3585185029Spjd}
3586168404Spjd
3587168404Spjd/* Simplify X, a piece of RTL.  We just operate on the expression at the
3588168404Spjd   outer level; call `subst' to simplify recursively.  Return the new
3589168404Spjd   expression.
3590185029Spjd
3591185029Spjd   OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3592168404Spjd   will be the iteration even if an expression with a code different from
3593168404Spjd   X is returned; IN_DEST is nonzero if we are inside a SET_DEST.  */
3594168404Spjd
3595168404Spjdstatic rtx
3596168404Spjdcombine_simplify_rtx (x, op0_mode, last, in_dest)
3597168404Spjd     rtx x;
3598185029Spjd     enum machine_mode op0_mode;
3599185029Spjd     int last;
3600185029Spjd     int in_dest;
3601185029Spjd{
3602185029Spjd  enum rtx_code code = GET_CODE (x);
3603185029Spjd  enum machine_mode mode = GET_MODE (x);
3604168404Spjd  rtx temp;
3605168404Spjd  rtx reversed;
3606168404Spjd  int i;
3607185029Spjd
3608185029Spjd  /* If this is a commutative operation, put a constant last and a complex
3609185029Spjd     expression first.  We don't need to do this for comparisons here.  */
3610185029Spjd  if (GET_RTX_CLASS (code) == 'c'
3611185029Spjd      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
3612168404Spjd    {
3613254587Sdelphij      temp = XEXP (x, 0);
3614254587Sdelphij      SUBST (XEXP (x, 0), XEXP (x, 1));
3615185029Spjd      SUBST (XEXP (x, 1), temp);
3616185029Spjd    }
3617185029Spjd
3618185029Spjd  /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3619185029Spjd     sign extension of a PLUS with a constant, reverse the order of the sign
3620168404Spjd     extension and the addition. Note that this not the same as the original
3621185029Spjd     code, but overflow is undefined for signed values.  Also note that the
3622185029Spjd     PLUS will have been partially moved "inside" the sign-extension, so that
3623185029Spjd     the first operand of X will really look like:
3624185029Spjd         (ashiftrt (plus (ashift A C4) C5) C4).
3625185029Spjd     We convert this to
3626185029Spjd         (plus (ashiftrt (ashift A C4) C2) C4)
3627185029Spjd     and replace the first operand of X with that expression.  Later parts
3628185029Spjd     of this function may simplify the expression further.
3629185029Spjd
3630185029Spjd     For example, if we start with (mult (sign_extend (plus A C1)) C2),
3631185029Spjd     we swap the SIGN_EXTEND and PLUS.  Later code will apply the
3632185029Spjd     distributive law to produce (plus (mult (sign_extend X) C1) C3).
3633185029Spjd
3634185029Spjd     We do this to simplify address expressions.  */
3635185029Spjd
3636185029Spjd  if ((code == PLUS || code == MINUS || code == MULT)
3637185029Spjd      && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3638168404Spjd      && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3639168404Spjd      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3640168404Spjd      && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3641168404Spjd      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3642168404Spjd      && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3643168404Spjd      && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3644240870Spjd      && (temp = simplify_binary_operation (ASHIFTRT, mode,
3645240870Spjd					    XEXP (XEXP (XEXP (x, 0), 0), 1),
3646168404Spjd					    XEXP (XEXP (x, 0), 1))) != 0)
3647168404Spjd    {
3648168404Spjd      rtx new
3649168404Spjd	= simplify_shift_const (NULL_RTX, ASHIFT, mode,
3650168676Spjd				XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3651168676Spjd				INTVAL (XEXP (XEXP (x, 0), 1)));
3652168676Spjd
3653168404Spjd      new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3654226676Spjd				  INTVAL (XEXP (XEXP (x, 0), 1)));
3655168404Spjd
3656168404Spjd      SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3657168404Spjd    }
3658168404Spjd
3659168404Spjd  /* If this is a simple operation applied to an IF_THEN_ELSE, try
3660168404Spjd     applying it to the arms of the IF_THEN_ELSE.  This often simplifies
3661168404Spjd     things.  Check for cases where both arms are testing the same
3662168404Spjd     condition.
3663168404Spjd
3664168404Spjd     Don't do anything if all operands are very simple.  */
3665240870Spjd
3666240870Spjd  if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3667240870Spjd	|| GET_RTX_CLASS (code) == '<')
3668240870Spjd       && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3669240870Spjd	    && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3670240870Spjd		  && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3671240870Spjd		      == 'o')))
3672240870Spjd	   || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3673240870Spjd	       && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3674240870Spjd		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3675240870Spjd			 == 'o')))))
3676240870Spjd      || (GET_RTX_CLASS (code) == '1'
3677168404Spjd	  && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3678168404Spjd	       && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3679168404Spjd		     && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3680168404Spjd			 == 'o'))))))
3681168404Spjd    {
3682168404Spjd      rtx cond, true_rtx, false_rtx;
3683168404Spjd
3684168404Spjd      cond = if_then_else_cond (x, &true_rtx, &false_rtx);
3685168404Spjd      if (cond != 0
3686168404Spjd	  /* If everything is a comparison, what we have is highly unlikely
3687168404Spjd	     to be simpler, so don't use it.  */
3688168404Spjd	  && ! (GET_RTX_CLASS (code) == '<'
3689168404Spjd		&& (GET_RTX_CLASS (GET_CODE (true_rtx)) == '<'
3690168404Spjd		    || GET_RTX_CLASS (GET_CODE (false_rtx)) == '<')))
3691168404Spjd	{
3692168404Spjd	  rtx cop1 = const0_rtx;
3693168404Spjd	  enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3694168404Spjd
3695168404Spjd	  if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3696168404Spjd	    return x;
3697168404Spjd
3698168404Spjd	  /* Simplify the alternative arms; this may collapse the true and
3699168404Spjd	     false arms to store-flag values.  */
3700168404Spjd	  true_rtx = subst (true_rtx, pc_rtx, pc_rtx, 0, 0);
3701168404Spjd	  false_rtx = subst (false_rtx, pc_rtx, pc_rtx, 0, 0);
3702168404Spjd
3703168404Spjd	  /* If true_rtx and false_rtx are not general_operands, an if_then_else
3704168404Spjd	     is unlikely to be simpler.  */
3705168404Spjd	  if (general_operand (true_rtx, VOIDmode)
3706168404Spjd	      && general_operand (false_rtx, VOIDmode))
3707168404Spjd	    {
3708168404Spjd	      /* Restarting if we generate a store-flag expression will cause
3709168404Spjd		 us to loop.  Just drop through in this case.  */
3710185029Spjd
3711168404Spjd	      /* If the result values are STORE_FLAG_VALUE and zero, we can
3712168404Spjd		 just make the comparison operation.  */
3713226705Spjd	      if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
3714168676Spjd		x = gen_binary (cond_code, mode, cond, cop1);
3715168676Spjd	      else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
3716168676Spjd		       && reverse_condition (cond_code) != UNKNOWN)
3717168676Spjd		x = gen_binary (reverse_condition (cond_code),
3718168676Spjd				mode, cond, cop1);
3719185029Spjd
3720168404Spjd	      /* Likewise, we can make the negate of a comparison operation
3721168404Spjd		 if the result values are - STORE_FLAG_VALUE and zero.  */
3722168404Spjd	      else if (GET_CODE (true_rtx) == CONST_INT
3723219089Spjd		       && INTVAL (true_rtx) == - STORE_FLAG_VALUE
3724168404Spjd		       && false_rtx == const0_rtx)
3725168404Spjd		x = simplify_gen_unary (NEG, mode,
3726168404Spjd					gen_binary (cond_code, mode, cond,
3727168404Spjd						    cop1),
3728168404Spjd					mode);
3729168404Spjd	      else if (GET_CODE (false_rtx) == CONST_INT
3730168404Spjd		       && INTVAL (false_rtx) == - STORE_FLAG_VALUE
3731168404Spjd		       && true_rtx == const0_rtx)
3732168404Spjd		x = simplify_gen_unary (NEG, mode,
3733168404Spjd					gen_binary (reverse_condition
3734168404Spjd						    (cond_code),
3735168404Spjd						    mode, cond, cop1),
3736219089Spjd					mode);
3737168404Spjd	      else
3738219089Spjd		return gen_rtx_IF_THEN_ELSE (mode,
3739168404Spjd					     gen_binary (cond_code, VOIDmode,
3740168404Spjd							 cond, cop1),
3741168404Spjd					     true_rtx, false_rtx);
3742168404Spjd
3743168404Spjd	      code = GET_CODE (x);
3744168404Spjd	      op0_mode = VOIDmode;
3745168404Spjd	    }
3746168404Spjd	}
3747168404Spjd    }
3748168404Spjd
3749168404Spjd  /* Try to fold this expression in case we have constants that weren't
3750168404Spjd     present before.  */
3751168404Spjd  temp = 0;
3752168404Spjd  switch (GET_RTX_CLASS (code))
3753168404Spjd    {
3754226705Spjd    case '1':
3755226705Spjd      temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3756226705Spjd      break;
3757226705Spjd    case '<':
3758226705Spjd      {
3759226705Spjd	enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
3760226705Spjd	if (cmp_mode == VOIDmode)
3761226705Spjd	  {
3762226705Spjd	    cmp_mode = GET_MODE (XEXP (x, 1));
3763226705Spjd	    if (cmp_mode == VOIDmode)
3764226705Spjd	      cmp_mode = op0_mode;
3765226705Spjd	  }
3766168404Spjd	temp = simplify_relational_operation (code, cmp_mode,
3767226705Spjd					      XEXP (x, 0), XEXP (x, 1));
3768226705Spjd      }
3769185029Spjd#ifdef FLOAT_STORE_FLAG_VALUE
3770185029Spjd      if (temp != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3771185029Spjd	{
3772185029Spjd	  if (temp == const0_rtx)
3773185029Spjd	    temp = CONST0_RTX (mode);
3774168676Spjd	  else
3775168676Spjd	    temp = immed_real_const_1 (FLOAT_STORE_FLAG_VALUE (mode), mode);
3776185029Spjd	}
3777168676Spjd#endif
3778185029Spjd      break;
3779185029Spjd    case 'c':
3780168676Spjd    case '2':
3781168676Spjd      temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3782168676Spjd      break;
3783226676Spjd    case 'b':
3784235216Smm    case '3':
3785235216Smm      temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3786168676Spjd					 XEXP (x, 1), XEXP (x, 2));
3787226676Spjd      break;
3788168676Spjd    }
3789168676Spjd
3790168676Spjd  if (temp)
3791168676Spjd    {
3792168676Spjd      x = temp;
3793168676Spjd      code = GET_CODE (temp);
3794168676Spjd      op0_mode = VOIDmode;
3795168676Spjd      mode = GET_MODE (temp);
3796168676Spjd    }
3797168676Spjd
3798168676Spjd  /* First see if we can apply the inverse distributive law.  */
3799168404Spjd  if (code == PLUS || code == MINUS
3800168404Spjd      || code == AND || code == IOR || code == XOR)
3801168404Spjd    {
3802168404Spjd      x = apply_distributive_law (x);
3803168404Spjd      code = GET_CODE (x);
3804168404Spjd      op0_mode = VOIDmode;
3805168404Spjd    }
3806168404Spjd
3807168404Spjd  /* If CODE is an associative operation not otherwise handled, see if we
3808168404Spjd     can associate some operands.  This can win if they are constants or
3809226705Spjd     if they are logically related (i.e. (a & b) & a).  */
3810226705Spjd  if ((code == PLUS || code == MINUS || code == MULT || code == DIV
3811226676Spjd       || code == AND || code == IOR || code == XOR
3812168676Spjd       || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3813185029Spjd      && ((INTEGRAL_MODE_P (mode) && code != DIV)
3814168676Spjd	  || (flag_unsafe_math_optimizations && FLOAT_MODE_P (mode))))
3815168676Spjd    {
3816168676Spjd      if (GET_CODE (XEXP (x, 0)) == code)
3817168676Spjd	{
3818168676Spjd	  rtx other = XEXP (XEXP (x, 0), 0);
3819185029Spjd	  rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3820168404Spjd	  rtx inner_op1 = XEXP (x, 1);
3821226705Spjd	  rtx inner;
3822168676Spjd
3823168676Spjd	  /* Make sure we pass the constant operand if any as the second
3824168676Spjd	     one if this is a commutative operation.  */
3825185029Spjd	  if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3826168676Spjd	    {
3827168676Spjd	      rtx tem = inner_op0;
3828168676Spjd	      inner_op0 = inner_op1;
3829168676Spjd	      inner_op1 = tem;
3830168404Spjd	    }
3831168404Spjd	  inner = simplify_binary_operation (code == MINUS ? PLUS
3832168404Spjd					     : code == DIV ? MULT
3833168404Spjd					     : code,
3834226705Spjd					     mode, inner_op0, inner_op1);
3835168676Spjd
3836168404Spjd	  /* For commutative operations, try the other pair if that one
3837226705Spjd	     didn't simplify.  */
3838168676Spjd	  if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3839168676Spjd	    {
3840168676Spjd	      other = XEXP (XEXP (x, 0), 1);
3841168404Spjd	      inner = simplify_binary_operation (code, mode,
3842168404Spjd						 XEXP (XEXP (x, 0), 0),
3843168404Spjd						 XEXP (x, 1));
3844168676Spjd	    }
3845168676Spjd
3846168676Spjd	  if (inner)
3847168676Spjd	    return gen_binary (code, mode, other, inner);
3848168676Spjd	}
3849168676Spjd    }
3850168676Spjd
3851168676Spjd  /* A little bit of algebraic simplification here.  */
3852168676Spjd  switch (code)
3853168404Spjd    {
3854168404Spjd    case MEM:
3855168404Spjd      /* Ensure that our address has any ASHIFTs converted to MULT in case
3856219089Spjd	 address-recognizing predicates are called later.  */
3857219089Spjd      temp = make_compound_operation (XEXP (x, 0), MEM);
3858168404Spjd      SUBST (XEXP (x, 0), temp);
3859219089Spjd      break;
3860168676Spjd
3861168676Spjd    case SUBREG:
3862168404Spjd      if (op0_mode == VOIDmode)
3863219089Spjd	op0_mode = GET_MODE (SUBREG_REG (x));
3864168404Spjd
3865219089Spjd      /* simplify_subreg can't use gen_lowpart_for_combine.  */
3866219089Spjd      if (CONSTANT_P (SUBREG_REG (x))
3867219089Spjd	  && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x))
3868219089Spjd	return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3869168404Spjd
3870168404Spjd      if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
3871168404Spjd        break;
3872168404Spjd      {
3873168404Spjd	rtx temp;
3874168404Spjd	temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
3875168404Spjd				SUBREG_BYTE (x));
3876168404Spjd	if (temp)
3877168404Spjd	  return temp;
3878168404Spjd      }
3879168404Spjd
3880219089Spjd      /* Don't change the mode of the MEM if that would change the meaning
3881219089Spjd	 of the address.  */
3882219089Spjd      if (GET_CODE (SUBREG_REG (x)) == MEM
3883168404Spjd	  && (MEM_VOLATILE_P (SUBREG_REG (x))
3884168404Spjd	      || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
3885219089Spjd	return gen_rtx_CLOBBER (mode, const0_rtx);
3886168404Spjd
3887168404Spjd      /* Note that we cannot do any narrowing for non-constants since
3888185029Spjd	 we might have been counting on using the fact that some bits were
3889185029Spjd	 zero.  We now do this in the SET.  */
3890168404Spjd
3891168404Spjd      break;
3892168404Spjd
3893168404Spjd    case NOT:
3894168404Spjd      /* (not (plus X -1)) can become (neg X).  */
3895185029Spjd      if (GET_CODE (XEXP (x, 0)) == PLUS
3896168404Spjd	  && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3897168404Spjd	return gen_rtx_NEG (mode, XEXP (XEXP (x, 0), 0));
3898168404Spjd
3899168404Spjd      /* Similarly, (not (neg X)) is (plus X -1).  */
3900168404Spjd      if (GET_CODE (XEXP (x, 0)) == NEG)
3901168404Spjd	return gen_rtx_PLUS (mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3902168404Spjd
3903168404Spjd      /* (not (xor X C)) for C constant is (xor X D) with D = ~C.  */
3904168404Spjd      if (GET_CODE (XEXP (x, 0)) == XOR
3905168404Spjd	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3906168404Spjd	  && (temp = simplify_unary_operation (NOT, mode,
3907168404Spjd					       XEXP (XEXP (x, 0), 1),
3908168404Spjd					       mode)) != 0)
3909185029Spjd	return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
3910168404Spjd
3911168404Spjd      /* (not (ashift 1 X)) is (rotate ~1 X).  We used to do this for operands
3912168404Spjd	 other than 1, but that is not valid.  We could do a similar
3913168404Spjd	 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3914168404Spjd	 but this doesn't seem common enough to bother with.  */
3915168404Spjd      if (GET_CODE (XEXP (x, 0)) == ASHIFT
3916168404Spjd	  && XEXP (XEXP (x, 0), 0) == const1_rtx)
3917168404Spjd	return gen_rtx_ROTATE (mode, simplify_gen_unary (NOT, mode,
3918168404Spjd							 const1_rtx, mode),
3919168404Spjd			       XEXP (XEXP (x, 0), 1));
3920168404Spjd
3921168404Spjd      if (GET_CODE (XEXP (x, 0)) == SUBREG
3922168404Spjd	  && subreg_lowpart_p (XEXP (x, 0))
3923168404Spjd	  && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3924168404Spjd	      < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3925168404Spjd	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3926168404Spjd	  && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3927168404Spjd	{
3928185029Spjd	  enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3929168404Spjd
3930168404Spjd	  x = gen_rtx_ROTATE (inner_mode,
3931168404Spjd			      simplify_gen_unary (NOT, inner_mode, const1_rtx,
3932168404Spjd						  inner_mode),
3933168404Spjd			      XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3934168404Spjd	  return gen_lowpart_for_combine (mode, x);
3935185029Spjd	}
3936168404Spjd
3937168404Spjd      /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3938168404Spjd	 reversing the comparison code if valid.  */
3939168404Spjd      if (STORE_FLAG_VALUE == -1
3940168404Spjd	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3941168404Spjd	  && (reversed = reversed_comparison (x, mode, XEXP (XEXP (x, 0), 0),
3942168404Spjd					      XEXP (XEXP (x, 0), 1))))
3943168404Spjd	return reversed;
3944168404Spjd
3945168404Spjd      /* (not (ashiftrt foo C)) where C is the number of bits in FOO minus 1
3946168404Spjd	 is (ge foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3947168404Spjd	 perform the above simplification.  */
3948168404Spjd
3949168404Spjd      if (STORE_FLAG_VALUE == -1
3950185029Spjd	  && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3951168404Spjd	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3952168404Spjd	  && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3953168404Spjd	return gen_rtx_GE (mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3954168404Spjd
3955168404Spjd      /* Apply De Morgan's laws to reduce number of patterns for machines
3956219089Spjd	 with negating logical insns (and-not, nand, etc.).  If result has
3957219089Spjd	 only one NOT, put it first, since that is how the patterns are
3958219089Spjd	 coded.  */
3959219089Spjd
3960219089Spjd      if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3961219089Spjd	{
3962219089Spjd	  rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3963219089Spjd	  enum machine_mode op_mode;
3964219089Spjd
3965219089Spjd	  op_mode = GET_MODE (in1);
3966219089Spjd	  in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
3967219089Spjd
3968219089Spjd	  op_mode = GET_MODE (in2);
3969219089Spjd	  if (op_mode == VOIDmode)
3970219089Spjd	    op_mode = mode;
3971219089Spjd	  in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
3972219089Spjd
3973219089Spjd	  if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
3974168404Spjd	    {
3975168404Spjd	      rtx tem = in2;
3976168404Spjd	      in2 = in1; in1 = tem;
3977168404Spjd	    }
3978168404Spjd
3979168404Spjd	  return gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3980185029Spjd				 mode, in1, in2);
3981185029Spjd	}
3982209962Smm      break;
3983209962Smm
3984185029Spjd    case NEG:
3985185029Spjd      /* (neg (plus X 1)) can become (not X).  */
3986185029Spjd      if (GET_CODE (XEXP (x, 0)) == PLUS
3987185029Spjd	  && XEXP (XEXP (x, 0), 1) == const1_rtx)
3988185029Spjd	return gen_rtx_NOT (mode, XEXP (XEXP (x, 0), 0));
3989185029Spjd
3990209962Smm      /* Similarly, (neg (not X)) is (plus X 1).  */
3991209962Smm      if (GET_CODE (XEXP (x, 0)) == NOT)
3992185029Spjd	return plus_constant (XEXP (XEXP (x, 0), 0), 1);
3993185029Spjd
3994185029Spjd      /* (neg (minus X Y)) can become (minus Y X).  */
3995185029Spjd      if (GET_CODE (XEXP (x, 0)) == MINUS
3996185029Spjd	  && (! FLOAT_MODE_P (mode)
3997185029Spjd	      /* x-y != -(y-x) with IEEE floating point.  */
3998185029Spjd	      || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3999185029Spjd	      || flag_unsafe_math_optimizations))
4000205198Sdelphij	return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
4001205198Sdelphij			   XEXP (XEXP (x, 0), 0));
4002205198Sdelphij
4003205198Sdelphij      /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1.  */
4004205198Sdelphij      if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
4005205198Sdelphij	  && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
4006205198Sdelphij	return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
4007205198Sdelphij
4008205198Sdelphij      /* NEG commutes with ASHIFT since it is multiplication.  Only do this
4009205198Sdelphij	 if we can then eliminate the NEG (e.g.,
4010205198Sdelphij	 if the operand is a constant).  */
4011205198Sdelphij
4012205198Sdelphij      if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4013205198Sdelphij	{
4014205198Sdelphij	  temp = simplify_unary_operation (NEG, mode,
4015205198Sdelphij					   XEXP (XEXP (x, 0), 0), mode);
4016205198Sdelphij	  if (temp)
4017206199Sdelphij	    return gen_binary (ASHIFT, mode, temp, XEXP (XEXP (x, 0), 1));
4018219089Spjd	}
4019219089Spjd
4020219089Spjd      temp = expand_compound_operation (XEXP (x, 0));
4021219089Spjd
4022219089Spjd      /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
4023206199Sdelphij	 replaced by (lshiftrt X C).  This will convert
4024206199Sdelphij	 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y).  */
4025205198Sdelphij
4026205198Sdelphij      if (GET_CODE (temp) == ASHIFTRT
4027205198Sdelphij	  && GET_CODE (XEXP (temp, 1)) == CONST_INT
4028205198Sdelphij	  && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
4029205198Sdelphij	return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
4030205198Sdelphij				     INTVAL (XEXP (temp, 1)));
4031209962Smm
4032209962Smm      /* If X has only a single bit that might be nonzero, say, bit I, convert
4033209962Smm	 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
4034209962Smm	 MODE minus 1.  This will convert (neg (zero_extract X 1 Y)) to
4035209962Smm	 (sign_extract X 1 Y).  But only do this if TEMP isn't a register
4036209962Smm	 or a SUBREG of one since we'd be making the expression more
4037209962Smm	 complex if it was just a register.  */
4038209962Smm
4039209962Smm      if (GET_CODE (temp) != REG
4040209962Smm	  && ! (GET_CODE (temp) == SUBREG
4041209962Smm		&& GET_CODE (SUBREG_REG (temp)) == REG)
4042209962Smm	  && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
4043209962Smm	{
4044209962Smm	  rtx temp1 = simplify_shift_const
4045209962Smm	    (NULL_RTX, ASHIFTRT, mode,
4046209962Smm	     simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
4047209962Smm				   GET_MODE_BITSIZE (mode) - 1 - i),
4048209962Smm	     GET_MODE_BITSIZE (mode) - 1 - i);
4049209962Smm
4050209962Smm	  /* If all we did was surround TEMP with the two shifts, we
4051209962Smm	     haven't improved anything, so don't use it.  Otherwise,
4052209962Smm	     we are better off with TEMP1.  */
4053209962Smm	  if (GET_CODE (temp1) != ASHIFTRT
4054209962Smm	      || GET_CODE (XEXP (temp1, 0)) != ASHIFT
4055209962Smm	      || XEXP (XEXP (temp1, 0), 0) != temp)
4056209962Smm	    return temp1;
4057209962Smm	}
4058209962Smm      break;
4059209962Smm
4060209962Smm    case TRUNCATE:
4061209962Smm      /* We can't handle truncation to a partial integer mode here
4062209962Smm	 because we don't know the real bitsize of the partial
4063209962Smm	 integer mode.  */
4064209962Smm      if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
4065209962Smm	break;
4066209962Smm
4067209962Smm      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4068209962Smm	  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4069209962Smm				    GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4070209962Smm	SUBST (XEXP (x, 0),
4071209962Smm	       force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
4072209962Smm			      GET_MODE_MASK (mode), NULL_RTX, 0));
4073209962Smm
4074209962Smm      /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI.  */
4075209962Smm      if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4076209962Smm	   || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4077209962Smm	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4078209962Smm	return XEXP (XEXP (x, 0), 0);
4079209962Smm
4080209962Smm      /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
4081209962Smm	 (OP:SI foo:SI) if OP is NEG or ABS.  */
4082209962Smm      if ((GET_CODE (XEXP (x, 0)) == ABS
4083209962Smm	   || GET_CODE (XEXP (x, 0)) == NEG)
4084209962Smm	  && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
4085209962Smm	      || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
4086209962Smm	  && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4087209962Smm	return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4088209962Smm				   XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4089209962Smm
4090209962Smm      /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
4091209962Smm	 (truncate:SI x).  */
4092209962Smm      if (GET_CODE (XEXP (x, 0)) == SUBREG
4093209962Smm	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
4094209962Smm	  && subreg_lowpart_p (XEXP (x, 0)))
4095209962Smm	return SUBREG_REG (XEXP (x, 0));
4096209962Smm
4097209962Smm      /* If we know that the value is already truncated, we can
4098209962Smm         replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
4099209962Smm         is nonzero for the corresponding modes.  But don't do this
4100209962Smm         for an (LSHIFTRT (MULT ...)) since this will cause problems
4101209962Smm         with the umulXi3_highpart patterns.  */
4102209962Smm      if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4103209962Smm				 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4104209962Smm	  && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4105209962Smm	     >= GET_MODE_BITSIZE (mode) + 1
4106209962Smm	  && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4107209962Smm		&& GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
4108209962Smm	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4109209962Smm
4110209962Smm      /* A truncate of a comparison can be replaced with a subreg if
4111209962Smm         STORE_FLAG_VALUE permits.  This is like the previous test,
4112209962Smm         but it works even if the comparison is done in a mode larger
4113209962Smm         than HOST_BITS_PER_WIDE_INT.  */
4114209962Smm      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4115209962Smm	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4116209962Smm	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
4117209962Smm	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4118209962Smm
4119209962Smm      /* Similarly, a truncate of a register whose value is a
4120209962Smm         comparison can be replaced with a subreg if STORE_FLAG_VALUE
4121240415Smm         permits.  */
4122240415Smm      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4123209962Smm	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
4124228103Smm	  && (temp = get_last_value (XEXP (x, 0)))
4125209962Smm	  && GET_RTX_CLASS (GET_CODE (temp)) == '<')
4126209962Smm	return gen_lowpart_for_combine (mode, XEXP (x, 0));
4127209962Smm
4128209962Smm      break;
4129240415Smm
4130209962Smm    case FLOAT_TRUNCATE:
4131209962Smm      /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF.  */
4132209962Smm      if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4133240415Smm	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4134248571Smm	return XEXP (XEXP (x, 0), 0);
4135240415Smm
4136240415Smm      /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4137240415Smm	 (OP:SF foo:SF) if OP is NEG or ABS.  */
4138240415Smm      if ((GET_CODE (XEXP (x, 0)) == ABS
4139240415Smm	   || GET_CODE (XEXP (x, 0)) == NEG)
4140240415Smm	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4141240415Smm	  && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4142209962Smm	return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4143209962Smm				   XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4144209962Smm
4145240415Smm      /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4146240415Smm	 is (float_truncate:SF x).  */
4147240415Smm      if (GET_CODE (XEXP (x, 0)) == SUBREG
4148209962Smm	  && subreg_lowpart_p (XEXP (x, 0))
4149209962Smm	  && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4150209962Smm	return SUBREG_REG (XEXP (x, 0));
4151209962Smm      break;
4152209962Smm
4153240415Smm#ifdef HAVE_cc0
4154209962Smm    case COMPARE:
4155209962Smm      /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4156248571Smm	 using cc0, in which case we want to leave it as a COMPARE
4157248571Smm	 so we can distinguish it from a register-register-copy.  */
4158248571Smm      if (XEXP (x, 1) == const0_rtx)
4159248571Smm	return XEXP (x, 0);
4160248571Smm
4161252219Sdelphij      /* In IEEE floating point, x-0 is not the same as x.  */
4162248571Smm      if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4163248571Smm	   || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
4164248571Smm	   || flag_unsafe_math_optimizations)
4165248571Smm	  && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4166248571Smm	return XEXP (x, 0);
4167248571Smm      break;
4168248571Smm#endif
4169248571Smm
4170248571Smm    case CONST:
4171248571Smm      /* (const (const X)) can become (const X).  Do it this way rather than
4172248571Smm	 returning the inner CONST since CONST can be shared with a
4173248571Smm	 REG_EQUAL note.  */
4174251646Sdelphij      if (GET_CODE (XEXP (x, 0)) == CONST)
4175248571Smm	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4176248571Smm      break;
4177248571Smm
4178248571Smm#ifdef HAVE_lo_sum
4179248571Smm    case LO_SUM:
4180248571Smm      /* Convert (lo_sum (high FOO) FOO) to FOO.  This is necessary so we
4181248571Smm	 can add in an offset.  find_split_point will split this address up
4182248571Smm	 again if it doesn't match.  */
4183219089Spjd      if (GET_CODE (XEXP (x, 0)) == HIGH
4184219089Spjd	  && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4185251646Sdelphij	return XEXP (x, 1);
4186219089Spjd      break;
4187248571Smm#endif
4188248571Smm
4189219089Spjd    case PLUS:
4190248571Smm      /* If we have (plus (plus (A const) B)), associate it so that CONST is
4191248571Smm	 outermost.  That's because that's the way indexed addresses are
4192248571Smm	 supposed to appear.  This code used to check many more cases, but
4193248571Smm	 they are now checked elsewhere.  */
4194248571Smm      if (GET_CODE (XEXP (x, 0)) == PLUS
4195249357Smm	  && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4196251646Sdelphij	return gen_binary (PLUS, mode,
4197251646Sdelphij			   gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4198251646Sdelphij				       XEXP (x, 1)),
4199249357Smm			   XEXP (XEXP (x, 0), 1));
4200249357Smm
4201251646Sdelphij      /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4202251646Sdelphij	 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4203251646Sdelphij	 bit-field and can be replaced by either a sign_extend or a
4204251646Sdelphij	 sign_extract.  The `and' may be a zero_extend and the two
4205251646Sdelphij	 <c>, -<c> constants may be reversed.  */
4206249357Smm      if (GET_CODE (XEXP (x, 0)) == XOR
4207249357Smm	  && GET_CODE (XEXP (x, 1)) == CONST_INT
4208249357Smm	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4209251646Sdelphij	  && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
4210248571Smm	  && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4211219089Spjd	      || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4212251646Sdelphij	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4213251646Sdelphij	  && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4214251646Sdelphij	       && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4215251646Sdelphij	       && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4216251646Sdelphij		   == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
4217251646Sdelphij	      || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4218251646Sdelphij		  && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
4219251646Sdelphij		      == (unsigned int) i + 1))))
4220251646Sdelphij	return simplify_shift_const
4221251646Sdelphij	  (NULL_RTX, ASHIFTRT, mode,
4222251646Sdelphij	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
4223251646Sdelphij				 XEXP (XEXP (XEXP (x, 0), 0), 0),
4224251646Sdelphij				 GET_MODE_BITSIZE (mode) - (i + 1)),
4225251646Sdelphij	   GET_MODE_BITSIZE (mode) - (i + 1));
4226251646Sdelphij
4227251646Sdelphij      /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4228251646Sdelphij	 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4229251646Sdelphij	 is 1.  This produces better code than the alternative immediately
4230248571Smm	 below.  */
4231251646Sdelphij      if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4232219089Spjd	  && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
4233251646Sdelphij	      || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))
4234248571Smm	  && (reversed = reversed_comparison (XEXP (x, 0), mode,
4235248571Smm					      XEXP (XEXP (x, 0), 0),
4236248571Smm					      XEXP (XEXP (x, 0), 1))))
4237248571Smm	return
4238248571Smm	  simplify_gen_unary (NEG, mode, reversed, mode);
4239248571Smm
4240248571Smm      /* If only the low-order bit of X is possibly nonzero, (plus x -1)
4241248571Smm	 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4242248571Smm	 the bitsize of the mode - 1.  This allows simplification of
4243248571Smm	 "a = (b & 8) == 0;"  */
4244248571Smm      if (XEXP (x, 1) == constm1_rtx
4245248571Smm	  && GET_CODE (XEXP (x, 0)) != REG
4246248571Smm	  && ! (GET_CODE (XEXP (x,0)) == SUBREG
4247248571Smm		&& GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
4248248571Smm	  && nonzero_bits (XEXP (x, 0), mode) == 1)
4249248571Smm	return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4250219089Spjd	   simplify_shift_const (NULL_RTX, ASHIFT, mode,
4251248571Smm				 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
4252248571Smm				 GET_MODE_BITSIZE (mode) - 1),
4253248571Smm	   GET_MODE_BITSIZE (mode) - 1);
4254248571Smm
4255248571Smm      /* If we are adding two things that have no bits in common, convert
4256248571Smm	 the addition into an IOR.  This will often be further simplified,
4257248571Smm	 for example in cases like ((a & 1) + (a & 2)), which can
4258219089Spjd	 become a & 3.  */
4259219089Spjd
4260219089Spjd      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4261219089Spjd	  && (nonzero_bits (XEXP (x, 0), mode)
4262219089Spjd	      & nonzero_bits (XEXP (x, 1), mode)) == 0)
4263219089Spjd	{
4264219089Spjd	  /* Try to simplify the expression further.  */
4265248571Smm	  rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4266248571Smm	  temp = combine_simplify_rtx (tor, mode, last, in_dest);
4267219089Spjd
4268248571Smm	  /* If we could, great.  If not, do not go ahead with the IOR
4269248571Smm	     replacement, since PLUS appears in many special purpose
4270219089Spjd	     address arithmetic instructions.  */
4271248571Smm	  if (GET_CODE (temp) != CLOBBER && temp != tor)
4272248571Smm	    return temp;
4273219089Spjd	}
4274248571Smm      break;
4275248571Smm
4276219089Spjd    case MINUS:
4277219089Spjd      /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4278219089Spjd	 by reversing the comparison code if valid.  */
4279248571Smm      if (STORE_FLAG_VALUE == 1
4280248571Smm	  && XEXP (x, 0) == const1_rtx
4281219089Spjd	  && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
4282219089Spjd	  && (reversed = reversed_comparison (XEXP (x, 1), mode,
4283248571Smm					      XEXP (XEXP (x, 1), 0),
4284248571Smm					      XEXP (XEXP (x, 1), 1))))
4285248571Smm	return reversed;
4286248571Smm
4287248571Smm      /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4288248571Smm	 (and <foo> (const_int pow2-1))  */
4289252219Sdelphij      if (GET_CODE (XEXP (x, 1)) == AND
4290248571Smm	  && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4291248571Smm	  && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4292248571Smm	  && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4293248571Smm	return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4294252219Sdelphij				       -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4295252219Sdelphij
4296252219Sdelphij      /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4297252219Sdelphij	 integers.  */
4298252219Sdelphij      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
4299252219Sdelphij	return gen_binary (MINUS, mode,
4300252219Sdelphij			   gen_binary (MINUS, mode, XEXP (x, 0),
4301252219Sdelphij				       XEXP (XEXP (x, 1), 0)),
4302252219Sdelphij			   XEXP (XEXP (x, 1), 1));
4303248571Smm      break;
4304248571Smm
4305248571Smm    case MULT:
4306248571Smm      /* If we have (mult (plus A B) C), apply the distributive law and then
4307248571Smm	 the inverse distributive law to see if things simplify.  This
4308248571Smm	 occurs mostly in addresses, often when unrolling loops.  */
4309248571Smm
4310248571Smm      if (GET_CODE (XEXP (x, 0)) == PLUS)
4311219089Spjd	{
4312219089Spjd	  x = apply_distributive_law
4313219089Spjd	    (gen_binary (PLUS, mode,
4314219089Spjd			 gen_binary (MULT, mode,
4315248571Smm				     XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4316248571Smm			 gen_binary (MULT, mode,
4317251646Sdelphij				     XEXP (XEXP (x, 0), 1),
4318248571Smm				     copy_rtx (XEXP (x, 1)))));
4319219089Spjd
4320249357Smm	  if (GET_CODE (x) != MULT)
4321219089Spjd	    return x;
4322248571Smm	}
4323248571Smm      /* Try simplify a*(b/c) as (a*b)/c.  */
4324248571Smm      if (FLOAT_MODE_P (mode) && flag_unsafe_math_optimizations
4325248571Smm	  && GET_CODE (XEXP (x, 0)) == DIV)
4326252219Sdelphij	{
4327248571Smm	  rtx tem = simplify_binary_operation (MULT, mode,
4328249357Smm					       XEXP (XEXP (x, 0), 0),
4329251646Sdelphij					       XEXP (x, 1));
4330249357Smm	  if (tem)
4331252219Sdelphij	    return gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
4332249357Smm	}
4333249357Smm      break;
4334249357Smm
4335249357Smm    case UDIV:
4336252219Sdelphij      /* If this is a divide by a power of two, treat it as a shift if
4337252219Sdelphij	 its first operand is a shift.  */
4338252219Sdelphij      if (GET_CODE (XEXP (x, 1)) == CONST_INT
4339252219Sdelphij	  && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4340252219Sdelphij	  && (GET_CODE (XEXP (x, 0)) == ASHIFT
4341249357Smm	      || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4342249357Smm	      || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4343249357Smm	      || GET_CODE (XEXP (x, 0)) == ROTATE
4344248571Smm	      || GET_CODE (XEXP (x, 0)) == ROTATERT))
4345248571Smm	return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4346219089Spjd      break;
4347251646Sdelphij
4348251646Sdelphij    case EQ:  case NE:
4349251646Sdelphij    case GT:  case GTU:  case GE:  case GEU:
4350248571Smm    case LT:  case LTU:  case LE:  case LEU:
4351251646Sdelphij    case UNEQ:  case LTGT:
4352219089Spjd    case UNGT:  case UNGE:
4353251646Sdelphij    case UNLT:  case UNLE:
4354248571Smm    case UNORDERED: case ORDERED:
4355219089Spjd      /* If the first operand is a condition code, we can't do anything
4356248571Smm	 with it.  */
4357219089Spjd      if (GET_CODE (XEXP (x, 0)) == COMPARE
4358219089Spjd	  || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4359219089Spjd#ifdef HAVE_cc0
4360219089Spjd	      && XEXP (x, 0) != cc0_rtx
4361248571Smm#endif
4362248571Smm	      ))
4363248571Smm	{
4364248571Smm	  rtx op0 = XEXP (x, 0);
4365248571Smm	  rtx op1 = XEXP (x, 1);
4366248571Smm	  enum rtx_code new_code;
4367248571Smm
4368248571Smm	  if (GET_CODE (op0) == COMPARE)
4369248571Smm	    op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4370248571Smm
4371248571Smm	  /* Simplify our comparison, if possible.  */
4372248571Smm	  new_code = simplify_comparison (code, &op0, &op1);
4373248571Smm
4374248571Smm	  /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4375248571Smm	     if only the low-order bit is possibly nonzero in X (such as when
4376248571Smm	     X is a ZERO_EXTRACT of one bit).  Similarly, we can convert EQ to
4377248571Smm	     (xor X 1) or (minus 1 X); we use the former.  Finally, if X is
4378248571Smm	     known to be either 0 or -1, NE becomes a NEG and EQ becomes
4379219089Spjd	     (plus X 1).
4380248571Smm
4381248571Smm	     Remove any ZERO_EXTRACT we made when thinking this was a
4382219089Spjd	     comparison.  It may now be simpler to use, e.g., an AND.  If a
4383248571Smm	     ZERO_EXTRACT is indeed appropriate, it will be placed back by
4384248571Smm	     the call to make_compound_operation in the SET case.  */
4385219089Spjd
4386219089Spjd	  if (STORE_FLAG_VALUE == 1
4387219089Spjd	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4388248571Smm	      && op1 == const0_rtx
4389248571Smm	      && mode == GET_MODE (op0)
4390219089Spjd	      && nonzero_bits (op0, mode) == 1)
4391219089Spjd	    return gen_lowpart_for_combine (mode,
4392219089Spjd					    expand_compound_operation (op0));
4393219089Spjd
4394219089Spjd	  else if (STORE_FLAG_VALUE == 1
4395219089Spjd		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4396219089Spjd		   && op1 == const0_rtx
4397219089Spjd		   && mode == GET_MODE (op0)
4398219089Spjd		   && (num_sign_bit_copies (op0, mode)
4399219089Spjd		       == GET_MODE_BITSIZE (mode)))
4400248571Smm	    {
4401219089Spjd	      op0 = expand_compound_operation (op0);
4402219089Spjd	      return simplify_gen_unary (NEG, mode,
4403219089Spjd					 gen_lowpart_for_combine (mode, op0),
4404219089Spjd					 mode);
4405219089Spjd	    }
4406219089Spjd
4407219089Spjd	  else if (STORE_FLAG_VALUE == 1
4408219089Spjd		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4409219089Spjd		   && op1 == const0_rtx
4410219089Spjd		   && mode == GET_MODE (op0)
4411219089Spjd		   && nonzero_bits (op0, mode) == 1)
4412219089Spjd	    {
4413219089Spjd	      op0 = expand_compound_operation (op0);
4414219089Spjd	      return gen_binary (XOR, mode,
4415219089Spjd				 gen_lowpart_for_combine (mode, op0),
4416219089Spjd				 const1_rtx);
4417219089Spjd	    }
4418230514Smm
4419219089Spjd	  else if (STORE_FLAG_VALUE == 1
4420219089Spjd		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4421219089Spjd		   && op1 == const0_rtx
4422219089Spjd		   && mode == GET_MODE (op0)
4423219089Spjd		   && (num_sign_bit_copies (op0, mode)
4424219089Spjd		       == GET_MODE_BITSIZE (mode)))
4425219089Spjd	    {
4426219089Spjd	      op0 = expand_compound_operation (op0);
4427219089Spjd	      return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
4428219089Spjd	    }
4429219089Spjd
4430219089Spjd	  /* If STORE_FLAG_VALUE is -1, we have cases similar to
4431219089Spjd	     those above.  */
4432219089Spjd	  if (STORE_FLAG_VALUE == -1
4433219089Spjd	      && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4434219089Spjd	      && op1 == const0_rtx
4435219089Spjd	      && (num_sign_bit_copies (op0, mode)
4436219089Spjd		  == GET_MODE_BITSIZE (mode)))
4437219089Spjd	    return gen_lowpart_for_combine (mode,
4438219089Spjd					    expand_compound_operation (op0));
4439219089Spjd
4440219089Spjd	  else if (STORE_FLAG_VALUE == -1
4441219089Spjd		   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4442219089Spjd		   && op1 == const0_rtx
4443219089Spjd		   && mode == GET_MODE (op0)
4444219089Spjd		   && nonzero_bits (op0, mode) == 1)
4445219089Spjd	    {
4446219089Spjd	      op0 = expand_compound_operation (op0);
4447219089Spjd	      return simplify_gen_unary (NEG, mode,
4448219089Spjd					 gen_lowpart_for_combine (mode, op0),
4449219089Spjd					 mode);
4450219089Spjd	    }
4451219089Spjd
4452219089Spjd	  else if (STORE_FLAG_VALUE == -1
4453219089Spjd		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4454219089Spjd		   && op1 == const0_rtx
4455219089Spjd		   && mode == GET_MODE (op0)
4456219089Spjd		   && (num_sign_bit_copies (op0, mode)
4457219089Spjd		       == GET_MODE_BITSIZE (mode)))
4458219089Spjd	    {
4459219089Spjd	      op0 = expand_compound_operation (op0);
4460219089Spjd	      return simplify_gen_unary (NOT, mode,
4461219089Spjd					 gen_lowpart_for_combine (mode, op0),
4462219089Spjd					 mode);
4463219089Spjd	    }
4464219089Spjd
4465248571Smm	  /* If X is 0/1, (eq X 0) is X-1.  */
4466219089Spjd	  else if (STORE_FLAG_VALUE == -1
4467219089Spjd		   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4468219089Spjd		   && op1 == const0_rtx
4469219089Spjd		   && mode == GET_MODE (op0)
4470219089Spjd		   && nonzero_bits (op0, mode) == 1)
4471219089Spjd	    {
4472219089Spjd	      op0 = expand_compound_operation (op0);
4473219089Spjd	      return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
4474219089Spjd	    }
4475219089Spjd
4476219089Spjd	  /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4477219089Spjd	     one bit that might be nonzero, we can convert (ne x 0) to
4478219089Spjd	     (ashift x c) where C puts the bit in the sign bit.  Remove any
4479219089Spjd	     AND with STORE_FLAG_VALUE when we are done, since we are only
4480219089Spjd	     going to test the sign bit.  */
4481219089Spjd	  if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4482219089Spjd	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4483219089Spjd	      && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4484219089Spjd		  == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE(mode)-1))
4485219089Spjd	      && op1 == const0_rtx
4486219089Spjd	      && mode == GET_MODE (op0)
4487219089Spjd	      && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4488219089Spjd	    {
4489219089Spjd	      x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4490219089Spjd					expand_compound_operation (op0),
4491219089Spjd					GET_MODE_BITSIZE (mode) - 1 - i);
4492219089Spjd	      if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4493219089Spjd		return XEXP (x, 0);
4494219089Spjd	      else
4495219089Spjd		return x;
4496219089Spjd	    }
4497219089Spjd
4498219089Spjd	  /* If the code changed, return a whole new comparison.  */
4499219089Spjd	  if (new_code != code)
4500219089Spjd	    return gen_rtx_fmt_ee (new_code, mode, op0, op1);
4501219089Spjd
4502219089Spjd	  /* Otherwise, keep this operation, but maybe change its operands.
4503219089Spjd	     This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR).  */
4504219089Spjd	  SUBST (XEXP (x, 0), op0);
4505219089Spjd	  SUBST (XEXP (x, 1), op1);
4506219089Spjd	}
4507219089Spjd      break;
4508219089Spjd
4509219089Spjd    case IF_THEN_ELSE:
4510219089Spjd      return simplify_if_then_else (x);
4511219089Spjd
4512219089Spjd    case ZERO_EXTRACT:
4513219089Spjd    case SIGN_EXTRACT:
4514219089Spjd    case ZERO_EXTEND:
4515219089Spjd    case SIGN_EXTEND:
4516248571Smm      /* If we are processing SET_DEST, we are done.  */
4517248571Smm      if (in_dest)
4518219089Spjd	return x;
4519248571Smm
4520219089Spjd      return expand_compound_operation (x);
4521248571Smm
4522248571Smm    case SET:
4523219089Spjd      return simplify_set (x);
4524219089Spjd
4525219089Spjd    case AND:
4526248571Smm    case IOR:
4527248571Smm    case XOR:
4528219089Spjd      return simplify_logical (x, last);
4529219089Spjd
4530219089Spjd    case ABS:
4531219089Spjd      /* (abs (neg <foo>)) -> (abs <foo>) */
4532219089Spjd      if (GET_CODE (XEXP (x, 0)) == NEG)
4533219089Spjd	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4534219089Spjd
4535219089Spjd      /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4536219089Spjd         do nothing.  */
4537219089Spjd      if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4538219089Spjd	break;
4539219089Spjd
4540219089Spjd      /* If operand is something known to be positive, ignore the ABS.  */
4541219089Spjd      if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4542219089Spjd	  || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4543219089Spjd	       <= HOST_BITS_PER_WIDE_INT)
4544219089Spjd	      && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4545219089Spjd		   & ((HOST_WIDE_INT) 1
4546219089Spjd		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4547219089Spjd		  == 0)))
4548251629Sdelphij	return XEXP (x, 0);
4549251629Sdelphij
4550251629Sdelphij      /* If operand is known to be only -1 or 0, convert ABS to NEG.  */
4551251629Sdelphij      if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4552251629Sdelphij	return gen_rtx_NEG (mode, XEXP (x, 0));
4553219089Spjd
4554219089Spjd      break;
4555219089Spjd
4556219089Spjd    case FFS:
4557219089Spjd      /* (ffs (*_extend <X>)) = (ffs <X>) */
4558219089Spjd      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4559219089Spjd	  || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4560219089Spjd	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4561219089Spjd      break;
4562219089Spjd
4563219089Spjd    case FLOAT:
4564219089Spjd      /* (float (sign_extend <X>)) = (float <X>).  */
4565219089Spjd      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4566219089Spjd	SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4567219089Spjd      break;
4568219089Spjd
4569219089Spjd    case ASHIFT:
4570219089Spjd    case LSHIFTRT:
4571219089Spjd    case ASHIFTRT:
4572219089Spjd    case ROTATE:
4573219089Spjd    case ROTATERT:
4574219089Spjd      /* If this is a shift by a constant amount, simplify it.  */
4575219089Spjd      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4576219089Spjd	return simplify_shift_const (x, code, mode, XEXP (x, 0),
4577219089Spjd				     INTVAL (XEXP (x, 1)));
4578219089Spjd
4579219089Spjd#ifdef SHIFT_COUNT_TRUNCATED
4580219089Spjd      else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4581219089Spjd	SUBST (XEXP (x, 1),
4582219089Spjd	       force_to_mode (XEXP (x, 1), GET_MODE (x),
4583219089Spjd			      ((HOST_WIDE_INT) 1
4584219089Spjd			       << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4585219089Spjd			      - 1,
4586219089Spjd			      NULL_RTX, 0));
4587219089Spjd#endif
4588219089Spjd
4589219089Spjd      break;
4590219089Spjd
4591168404Spjd    case VEC_SELECT:
4592168404Spjd      {
4593168404Spjd	rtx op0 = XEXP (x, 0);
4594168404Spjd	rtx op1 = XEXP (x, 1);
4595168404Spjd	int len;
4596168404Spjd
4597168404Spjd	if (GET_CODE (op1) != PARALLEL)
4598168404Spjd	  abort ();
4599168404Spjd	len = XVECLEN (op1, 0);
4600224525Smm	if (len == 1
4601224525Smm	    && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
4602168404Spjd	    && GET_CODE (op0) == VEC_CONCAT)
4603168404Spjd	  {
4604168404Spjd	    int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
4605168404Spjd
4606168404Spjd	    /* Try to find the element in the VEC_CONCAT.  */
4607168404Spjd	    for (;;)
4608249547Spjd	      {
4609168404Spjd		if (GET_MODE (op0) == GET_MODE (x))
4610168404Spjd		  return op0;
4611168404Spjd		if (GET_CODE (op0) == VEC_CONCAT)
4612168404Spjd		  {
4613168404Spjd		    HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
4614168404Spjd		    if (op0_size < offset)
4615168404Spjd		      op0 = XEXP (op0, 0);
4616168404Spjd		    else
4617168404Spjd		      {
4618168404Spjd			offset -= op0_size;
4619168404Spjd			op0 = XEXP (op0, 1);
4620168404Spjd		      }
4621168404Spjd		  }
4622168404Spjd		else
4623168404Spjd		  break;
4624168404Spjd	      }
4625168404Spjd	  }
4626168404Spjd      }
4627168404Spjd
4628168404Spjd      break;
4629168404Spjd
4630168404Spjd    default:
4631168404Spjd      break;
4632168404Spjd    }
4633
4634  return x;
4635}
4636
4637/* Simplify X, an IF_THEN_ELSE expression.  Return the new expression.  */
4638
4639static rtx
4640simplify_if_then_else (x)
4641     rtx x;
4642{
4643  enum machine_mode mode = GET_MODE (x);
4644  rtx cond = XEXP (x, 0);
4645  rtx true_rtx = XEXP (x, 1);
4646  rtx false_rtx = XEXP (x, 2);
4647  enum rtx_code true_code = GET_CODE (cond);
4648  int comparison_p = GET_RTX_CLASS (true_code) == '<';
4649  rtx temp;
4650  int i;
4651  enum rtx_code false_code;
4652  rtx reversed;
4653
4654  /* Simplify storing of the truth value.  */
4655  if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
4656    return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4657
4658  /* Also when the truth value has to be reversed.  */
4659  if (comparison_p
4660      && true_rtx == const0_rtx && false_rtx == const_true_rtx
4661      && (reversed = reversed_comparison (cond, mode, XEXP (cond, 0),
4662					  XEXP (cond, 1))))
4663    return reversed;
4664
4665  /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4666     in it is being compared against certain values.  Get the true and false
4667     comparisons and see if that says anything about the value of each arm.  */
4668
4669  if (comparison_p
4670      && ((false_code = combine_reversed_comparison_code (cond))
4671	  != UNKNOWN)
4672      && GET_CODE (XEXP (cond, 0)) == REG)
4673    {
4674      HOST_WIDE_INT nzb;
4675      rtx from = XEXP (cond, 0);
4676      rtx true_val = XEXP (cond, 1);
4677      rtx false_val = true_val;
4678      int swapped = 0;
4679
4680      /* If FALSE_CODE is EQ, swap the codes and arms.  */
4681
4682      if (false_code == EQ)
4683	{
4684	  swapped = 1, true_code = EQ, false_code = NE;
4685	  temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4686	}
4687
4688      /* If we are comparing against zero and the expression being tested has
4689	 only a single bit that might be nonzero, that is its value when it is
4690	 not equal to zero.  Similarly if it is known to be -1 or 0.  */
4691
4692      if (true_code == EQ && true_val == const0_rtx
4693	  && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4694	false_code = EQ, false_val = GEN_INT (nzb);
4695      else if (true_code == EQ && true_val == const0_rtx
4696	       && (num_sign_bit_copies (from, GET_MODE (from))
4697		   == GET_MODE_BITSIZE (GET_MODE (from))))
4698	false_code = EQ, false_val = constm1_rtx;
4699
4700      /* Now simplify an arm if we know the value of the register in the
4701	 branch and it is used in the arm.  Be careful due to the potential
4702	 of locally-shared RTL.  */
4703
4704      if (reg_mentioned_p (from, true_rtx))
4705	true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
4706				      from, true_val),
4707		      pc_rtx, pc_rtx, 0, 0);
4708      if (reg_mentioned_p (from, false_rtx))
4709	false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
4710				   from, false_val),
4711		       pc_rtx, pc_rtx, 0, 0);
4712
4713      SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
4714      SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
4715
4716      true_rtx = XEXP (x, 1);
4717      false_rtx = XEXP (x, 2);
4718      true_code = GET_CODE (cond);
4719    }
4720
4721  /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4722     reversed, do so to avoid needing two sets of patterns for
4723     subtract-and-branch insns.  Similarly if we have a constant in the true
4724     arm, the false arm is the same as the first operand of the comparison, or
4725     the false arm is more complicated than the true arm.  */
4726
4727  if (comparison_p
4728      && combine_reversed_comparison_code (cond) != UNKNOWN
4729      && (true_rtx == pc_rtx
4730	  || (CONSTANT_P (true_rtx)
4731	      && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx)
4732	  || true_rtx == const0_rtx
4733	  || (GET_RTX_CLASS (GET_CODE (true_rtx)) == 'o'
4734	      && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4735	  || (GET_CODE (true_rtx) == SUBREG
4736	      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true_rtx))) == 'o'
4737	      && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o')
4738	  || reg_mentioned_p (true_rtx, false_rtx)
4739	  || rtx_equal_p (false_rtx, XEXP (cond, 0))))
4740    {
4741      true_code = reversed_comparison_code (cond, NULL);
4742      SUBST (XEXP (x, 0),
4743	     reversed_comparison (cond, GET_MODE (cond), XEXP (cond, 0),
4744				  XEXP (cond, 1)));
4745
4746      SUBST (XEXP (x, 1), false_rtx);
4747      SUBST (XEXP (x, 2), true_rtx);
4748
4749      temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4750      cond = XEXP (x, 0);
4751
4752      /* It is possible that the conditional has been simplified out.  */
4753      true_code = GET_CODE (cond);
4754      comparison_p = GET_RTX_CLASS (true_code) == '<';
4755    }
4756
4757  /* If the two arms are identical, we don't need the comparison.  */
4758
4759  if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
4760    return true_rtx;
4761
4762  /* Convert a == b ? b : a to "a".  */
4763  if (true_code == EQ && ! side_effects_p (cond)
4764      && (! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
4765      && rtx_equal_p (XEXP (cond, 0), false_rtx)
4766      && rtx_equal_p (XEXP (cond, 1), true_rtx))
4767    return false_rtx;
4768  else if (true_code == NE && ! side_effects_p (cond)
4769	   && (! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
4770	   && rtx_equal_p (XEXP (cond, 0), true_rtx)
4771	   && rtx_equal_p (XEXP (cond, 1), false_rtx))
4772    return true_rtx;
4773
4774  /* Look for cases where we have (abs x) or (neg (abs X)).  */
4775
4776  if (GET_MODE_CLASS (mode) == MODE_INT
4777      && GET_CODE (false_rtx) == NEG
4778      && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
4779      && comparison_p
4780      && rtx_equal_p (true_rtx, XEXP (cond, 0))
4781      && ! side_effects_p (true_rtx))
4782    switch (true_code)
4783      {
4784      case GT:
4785      case GE:
4786	return simplify_gen_unary (ABS, mode, true_rtx, mode);
4787      case LT:
4788      case LE:
4789	return
4790	  simplify_gen_unary (NEG, mode,
4791			      simplify_gen_unary (ABS, mode, true_rtx, mode),
4792			      mode);
4793      default:
4794	break;
4795      }
4796
4797  /* Look for MIN or MAX.  */
4798
4799  if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
4800      && comparison_p
4801      && rtx_equal_p (XEXP (cond, 0), true_rtx)
4802      && rtx_equal_p (XEXP (cond, 1), false_rtx)
4803      && ! side_effects_p (cond))
4804    switch (true_code)
4805      {
4806      case GE:
4807      case GT:
4808	return gen_binary (SMAX, mode, true_rtx, false_rtx);
4809      case LE:
4810      case LT:
4811	return gen_binary (SMIN, mode, true_rtx, false_rtx);
4812      case GEU:
4813      case GTU:
4814	return gen_binary (UMAX, mode, true_rtx, false_rtx);
4815      case LEU:
4816      case LTU:
4817	return gen_binary (UMIN, mode, true_rtx, false_rtx);
4818      default:
4819	break;
4820      }
4821
4822  /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4823     second operand is zero, this can be done as (OP Z (mult COND C2)) where
4824     C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4825     SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4826     We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4827     neither 1 or -1, but it isn't worth checking for.  */
4828
4829  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4830      && comparison_p && mode != VOIDmode && ! side_effects_p (x))
4831    {
4832      rtx t = make_compound_operation (true_rtx, SET);
4833      rtx f = make_compound_operation (false_rtx, SET);
4834      rtx cond_op0 = XEXP (cond, 0);
4835      rtx cond_op1 = XEXP (cond, 1);
4836      enum rtx_code op = NIL, extend_op = NIL;
4837      enum machine_mode m = mode;
4838      rtx z = 0, c1 = NULL_RTX;
4839
4840      if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4841	   || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4842	   || GET_CODE (t) == ASHIFT
4843	   || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4844	  && rtx_equal_p (XEXP (t, 0), f))
4845	c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4846
4847      /* If an identity-zero op is commutative, check whether there
4848	 would be a match if we swapped the operands.  */
4849      else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4850		|| GET_CODE (t) == XOR)
4851	       && rtx_equal_p (XEXP (t, 1), f))
4852	c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4853      else if (GET_CODE (t) == SIGN_EXTEND
4854	       && (GET_CODE (XEXP (t, 0)) == PLUS
4855		   || GET_CODE (XEXP (t, 0)) == MINUS
4856		   || GET_CODE (XEXP (t, 0)) == IOR
4857		   || GET_CODE (XEXP (t, 0)) == XOR
4858		   || GET_CODE (XEXP (t, 0)) == ASHIFT
4859		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4860		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4861	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4862	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4863	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4864	       && (num_sign_bit_copies (f, GET_MODE (f))
4865		   > (GET_MODE_BITSIZE (mode)
4866		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4867	{
4868	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4869	  extend_op = SIGN_EXTEND;
4870	  m = GET_MODE (XEXP (t, 0));
4871	}
4872      else if (GET_CODE (t) == SIGN_EXTEND
4873	       && (GET_CODE (XEXP (t, 0)) == PLUS
4874		   || GET_CODE (XEXP (t, 0)) == IOR
4875		   || GET_CODE (XEXP (t, 0)) == XOR)
4876	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4877	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4878	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4879	       && (num_sign_bit_copies (f, GET_MODE (f))
4880		   > (GET_MODE_BITSIZE (mode)
4881		      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4882	{
4883	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4884	  extend_op = SIGN_EXTEND;
4885	  m = GET_MODE (XEXP (t, 0));
4886	}
4887      else if (GET_CODE (t) == ZERO_EXTEND
4888	       && (GET_CODE (XEXP (t, 0)) == PLUS
4889		   || GET_CODE (XEXP (t, 0)) == MINUS
4890		   || GET_CODE (XEXP (t, 0)) == IOR
4891		   || GET_CODE (XEXP (t, 0)) == XOR
4892		   || GET_CODE (XEXP (t, 0)) == ASHIFT
4893		   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4894		   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4895	       && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4896	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4897	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4898	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4899	       && ((nonzero_bits (f, GET_MODE (f))
4900		    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4901		   == 0))
4902	{
4903	  c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4904	  extend_op = ZERO_EXTEND;
4905	  m = GET_MODE (XEXP (t, 0));
4906	}
4907      else if (GET_CODE (t) == ZERO_EXTEND
4908	       && (GET_CODE (XEXP (t, 0)) == PLUS
4909		   || GET_CODE (XEXP (t, 0)) == IOR
4910		   || GET_CODE (XEXP (t, 0)) == XOR)
4911	       && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4912	       && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4913	       && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4914	       && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4915	       && ((nonzero_bits (f, GET_MODE (f))
4916		    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4917		   == 0))
4918	{
4919	  c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4920	  extend_op = ZERO_EXTEND;
4921	  m = GET_MODE (XEXP (t, 0));
4922	}
4923
4924      if (z)
4925	{
4926	  temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4927			pc_rtx, pc_rtx, 0, 0);
4928	  temp = gen_binary (MULT, m, temp,
4929			     gen_binary (MULT, m, c1, const_true_rtx));
4930	  temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4931	  temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4932
4933	  if (extend_op != NIL)
4934	    temp = simplify_gen_unary (extend_op, mode, temp, m);
4935
4936	  return temp;
4937	}
4938    }
4939
4940  /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4941     1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4942     negation of a single bit, we can convert this operation to a shift.  We
4943     can actually do this more generally, but it doesn't seem worth it.  */
4944
4945  if (true_code == NE && XEXP (cond, 1) == const0_rtx
4946      && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
4947      && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4948	   && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
4949	  || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4950	       == GET_MODE_BITSIZE (mode))
4951	      && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
4952    return
4953      simplify_shift_const (NULL_RTX, ASHIFT, mode,
4954			    gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
4955
4956  return x;
4957}
4958
4959/* Simplify X, a SET expression.  Return the new expression.  */
4960
4961static rtx
4962simplify_set (x)
4963     rtx x;
4964{
4965  rtx src = SET_SRC (x);
4966  rtx dest = SET_DEST (x);
4967  enum machine_mode mode
4968    = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4969  rtx other_insn;
4970  rtx *cc_use;
4971
4972  /* (set (pc) (return)) gets written as (return).  */
4973  if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4974    return src;
4975
4976  /* Now that we know for sure which bits of SRC we are using, see if we can
4977     simplify the expression for the object knowing that we only need the
4978     low-order bits.  */
4979
4980  if (GET_MODE_CLASS (mode) == MODE_INT)
4981    {
4982      src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
4983      SUBST (SET_SRC (x), src);
4984    }
4985
4986  /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4987     the comparison result and try to simplify it unless we already have used
4988     undobuf.other_insn.  */
4989  if ((GET_CODE (src) == COMPARE
4990#ifdef HAVE_cc0
4991       || dest == cc0_rtx
4992#endif
4993       )
4994      && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4995      && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4996      && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4997      && rtx_equal_p (XEXP (*cc_use, 0), dest))
4998    {
4999      enum rtx_code old_code = GET_CODE (*cc_use);
5000      enum rtx_code new_code;
5001      rtx op0, op1;
5002      int other_changed = 0;
5003      enum machine_mode compare_mode = GET_MODE (dest);
5004
5005      if (GET_CODE (src) == COMPARE)
5006	op0 = XEXP (src, 0), op1 = XEXP (src, 1);
5007      else
5008	op0 = src, op1 = const0_rtx;
5009
5010      /* Simplify our comparison, if possible.  */
5011      new_code = simplify_comparison (old_code, &op0, &op1);
5012
5013#ifdef EXTRA_CC_MODES
5014      /* If this machine has CC modes other than CCmode, check to see if we
5015	 need to use a different CC mode here.  */
5016      compare_mode = SELECT_CC_MODE (new_code, op0, op1);
5017#endif /* EXTRA_CC_MODES */
5018
5019#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
5020      /* If the mode changed, we have to change SET_DEST, the mode in the
5021	 compare, and the mode in the place SET_DEST is used.  If SET_DEST is
5022	 a hard register, just build new versions with the proper mode.  If it
5023	 is a pseudo, we lose unless it is only time we set the pseudo, in
5024	 which case we can safely change its mode.  */
5025      if (compare_mode != GET_MODE (dest))
5026	{
5027	  unsigned int regno = REGNO (dest);
5028	  rtx new_dest = gen_rtx_REG (compare_mode, regno);
5029
5030	  if (regno < FIRST_PSEUDO_REGISTER
5031	      || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
5032	    {
5033	      if (regno >= FIRST_PSEUDO_REGISTER)
5034		SUBST (regno_reg_rtx[regno], new_dest);
5035
5036	      SUBST (SET_DEST (x), new_dest);
5037	      SUBST (XEXP (*cc_use, 0), new_dest);
5038	      other_changed = 1;
5039
5040	      dest = new_dest;
5041	    }
5042	}
5043#endif
5044
5045      /* If the code changed, we have to build a new comparison in
5046	 undobuf.other_insn.  */
5047      if (new_code != old_code)
5048	{
5049	  unsigned HOST_WIDE_INT mask;
5050
5051	  SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
5052					  dest, const0_rtx));
5053
5054	  /* If the only change we made was to change an EQ into an NE or
5055	     vice versa, OP0 has only one bit that might be nonzero, and OP1
5056	     is zero, check if changing the user of the condition code will
5057	     produce a valid insn.  If it won't, we can keep the original code
5058	     in that insn by surrounding our operation with an XOR.  */
5059
5060	  if (((old_code == NE && new_code == EQ)
5061	       || (old_code == EQ && new_code == NE))
5062	      && ! other_changed && op1 == const0_rtx
5063	      && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
5064	      && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
5065	    {
5066	      rtx pat = PATTERN (other_insn), note = 0;
5067
5068	      if ((recog_for_combine (&pat, other_insn, &note) < 0
5069		   && ! check_asm_operands (pat)))
5070		{
5071		  PUT_CODE (*cc_use, old_code);
5072		  other_insn = 0;
5073
5074		  op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
5075		}
5076	    }
5077
5078	  other_changed = 1;
5079	}
5080
5081      if (other_changed)
5082	undobuf.other_insn = other_insn;
5083
5084#ifdef HAVE_cc0
5085      /* If we are now comparing against zero, change our source if
5086	 needed.  If we do not use cc0, we always have a COMPARE.  */
5087      if (op1 == const0_rtx && dest == cc0_rtx)
5088	{
5089	  SUBST (SET_SRC (x), op0);
5090	  src = op0;
5091	}
5092      else
5093#endif
5094
5095      /* Otherwise, if we didn't previously have a COMPARE in the
5096	 correct mode, we need one.  */
5097      if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5098	{
5099	  SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
5100	  src = SET_SRC (x);
5101	}
5102      else
5103	{
5104	  /* Otherwise, update the COMPARE if needed.  */
5105	  SUBST (XEXP (src, 0), op0);
5106	  SUBST (XEXP (src, 1), op1);
5107	}
5108    }
5109  else
5110    {
5111      /* Get SET_SRC in a form where we have placed back any
5112	 compound expressions.  Then do the checks below.  */
5113      src = make_compound_operation (src, SET);
5114      SUBST (SET_SRC (x), src);
5115    }
5116
5117  /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5118     and X being a REG or (subreg (reg)), we may be able to convert this to
5119     (set (subreg:m2 x) (op)).
5120
5121     We can always do this if M1 is narrower than M2 because that means that
5122     we only care about the low bits of the result.
5123
5124     However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
5125     perform a narrower operation than requested since the high-order bits will
5126     be undefined.  On machine where it is defined, this transformation is safe
5127     as long as M1 and M2 have the same number of words.  */
5128
5129  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5130      && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
5131      && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5132	   / UNITS_PER_WORD)
5133	  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5134	       + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
5135#ifndef WORD_REGISTER_OPERATIONS
5136      && (GET_MODE_SIZE (GET_MODE (src))
5137	  < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5138#endif
5139#ifdef CLASS_CANNOT_CHANGE_MODE
5140      && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
5141	    && (TEST_HARD_REG_BIT
5142		(reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
5143		 REGNO (dest)))
5144	    && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (src),
5145					   GET_MODE (SUBREG_REG (src))))
5146#endif
5147      && (GET_CODE (dest) == REG
5148	  || (GET_CODE (dest) == SUBREG
5149	      && GET_CODE (SUBREG_REG (dest)) == REG)))
5150    {
5151      SUBST (SET_DEST (x),
5152	     gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
5153				      dest));
5154      SUBST (SET_SRC (x), SUBREG_REG (src));
5155
5156      src = SET_SRC (x), dest = SET_DEST (x);
5157    }
5158
5159#ifdef LOAD_EXTEND_OP
5160  /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5161     would require a paradoxical subreg.  Replace the subreg with a
5162     zero_extend to avoid the reload that would otherwise be required.  */
5163
5164  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5165      && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
5166      && SUBREG_BYTE (src) == 0
5167      && (GET_MODE_SIZE (GET_MODE (src))
5168	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5169      && GET_CODE (SUBREG_REG (src)) == MEM)
5170    {
5171      SUBST (SET_SRC (x),
5172	     gen_rtx (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
5173		      GET_MODE (src), SUBREG_REG (src)));
5174
5175      src = SET_SRC (x);
5176    }
5177#endif
5178
5179  /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5180     are comparing an item known to be 0 or -1 against 0, use a logical
5181     operation instead. Check for one of the arms being an IOR of the other
5182     arm with some value.  We compute three terms to be IOR'ed together.  In
5183     practice, at most two will be nonzero.  Then we do the IOR's.  */
5184
5185  if (GET_CODE (dest) != PC
5186      && GET_CODE (src) == IF_THEN_ELSE
5187      && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
5188      && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5189      && XEXP (XEXP (src, 0), 1) == const0_rtx
5190      && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
5191#ifdef HAVE_conditional_move
5192      && ! can_conditionally_move_p (GET_MODE (src))
5193#endif
5194      && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5195			       GET_MODE (XEXP (XEXP (src, 0), 0)))
5196	  == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5197      && ! side_effects_p (src))
5198    {
5199      rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
5200		      ? XEXP (src, 1) : XEXP (src, 2));
5201      rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
5202		   ? XEXP (src, 2) : XEXP (src, 1));
5203      rtx term1 = const0_rtx, term2, term3;
5204
5205      if (GET_CODE (true_rtx) == IOR
5206	  && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
5207	term1 = false_rtx, true_rtx = XEXP(true_rtx, 1), false_rtx = const0_rtx;
5208      else if (GET_CODE (true_rtx) == IOR
5209	       && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
5210	term1 = false_rtx, true_rtx = XEXP(true_rtx, 0), false_rtx = const0_rtx;
5211      else if (GET_CODE (false_rtx) == IOR
5212	       && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
5213	term1 = true_rtx, false_rtx = XEXP(false_rtx, 1), true_rtx = const0_rtx;
5214      else if (GET_CODE (false_rtx) == IOR
5215	       && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
5216	term1 = true_rtx, false_rtx = XEXP(false_rtx, 0), true_rtx = const0_rtx;
5217
5218      term2 = gen_binary (AND, GET_MODE (src),
5219			  XEXP (XEXP (src, 0), 0), true_rtx);
5220      term3 = gen_binary (AND, GET_MODE (src),
5221			  simplify_gen_unary (NOT, GET_MODE (src),
5222					      XEXP (XEXP (src, 0), 0),
5223					      GET_MODE (src)),
5224			  false_rtx);
5225
5226      SUBST (SET_SRC (x),
5227	     gen_binary (IOR, GET_MODE (src),
5228			 gen_binary (IOR, GET_MODE (src), term1, term2),
5229			 term3));
5230
5231      src = SET_SRC (x);
5232    }
5233
5234  /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5235     whole thing fail.  */
5236  if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5237    return src;
5238  else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5239    return dest;
5240  else
5241    /* Convert this into a field assignment operation, if possible.  */
5242    return make_field_assignment (x);
5243}
5244
5245/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5246   result.  LAST is nonzero if this is the last retry.  */
5247
5248static rtx
5249simplify_logical (x, last)
5250     rtx x;
5251     int last;
5252{
5253  enum machine_mode mode = GET_MODE (x);
5254  rtx op0 = XEXP (x, 0);
5255  rtx op1 = XEXP (x, 1);
5256  rtx reversed;
5257
5258  switch (GET_CODE (x))
5259    {
5260    case AND:
5261      /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
5262	 insn (and may simplify more).  */
5263      if (GET_CODE (op0) == XOR
5264	  && rtx_equal_p (XEXP (op0, 0), op1)
5265	  && ! side_effects_p (op1))
5266	x = gen_binary (AND, mode,
5267			simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5268			op1);
5269
5270      if (GET_CODE (op0) == XOR
5271	  && rtx_equal_p (XEXP (op0, 1), op1)
5272	  && ! side_effects_p (op1))
5273	x = gen_binary (AND, mode,
5274			simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5275			op1);
5276
5277      /* Similarly for (~(A ^ B)) & A.  */
5278      if (GET_CODE (op0) == NOT
5279	  && GET_CODE (XEXP (op0, 0)) == XOR
5280	  && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5281	  && ! side_effects_p (op1))
5282	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5283
5284      if (GET_CODE (op0) == NOT
5285	  && GET_CODE (XEXP (op0, 0)) == XOR
5286	  && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5287	  && ! side_effects_p (op1))
5288	x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5289
5290      /* We can call simplify_and_const_int only if we don't lose
5291	 any (sign) bits when converting INTVAL (op1) to
5292	 "unsigned HOST_WIDE_INT".  */
5293      if (GET_CODE (op1) == CONST_INT
5294	  && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5295	      || INTVAL (op1) > 0))
5296	{
5297	  x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
5298
5299	  /* If we have (ior (and (X C1) C2)) and the next restart would be
5300	     the last, simplify this by making C1 as small as possible
5301	     and then exit.  */
5302	  if (last
5303	      && GET_CODE (x) == IOR && GET_CODE (op0) == AND
5304	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
5305	      && GET_CODE (op1) == CONST_INT)
5306	    return gen_binary (IOR, mode,
5307			       gen_binary (AND, mode, XEXP (op0, 0),
5308					   GEN_INT (INTVAL (XEXP (op0, 1))
5309						    & ~INTVAL (op1))), op1);
5310
5311	  if (GET_CODE (x) != AND)
5312	    return x;
5313
5314	  if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
5315	      || GET_RTX_CLASS (GET_CODE (x)) == '2')
5316	    op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5317	}
5318
5319      /* Convert (A | B) & A to A.  */
5320      if (GET_CODE (op0) == IOR
5321	  && (rtx_equal_p (XEXP (op0, 0), op1)
5322	      || rtx_equal_p (XEXP (op0, 1), op1))
5323	  && ! side_effects_p (XEXP (op0, 0))
5324	  && ! side_effects_p (XEXP (op0, 1)))
5325	return op1;
5326
5327      /* In the following group of tests (and those in case IOR below),
5328	 we start with some combination of logical operations and apply
5329	 the distributive law followed by the inverse distributive law.
5330	 Most of the time, this results in no change.  However, if some of
5331	 the operands are the same or inverses of each other, simplifications
5332	 will result.
5333
5334	 For example, (and (ior A B) (not B)) can occur as the result of
5335	 expanding a bit field assignment.  When we apply the distributive
5336	 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
5337	 which then simplifies to (and (A (not B))).
5338
5339	 If we have (and (ior A B) C), apply the distributive law and then
5340	 the inverse distributive law to see if things simplify.  */
5341
5342      if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
5343	{
5344	  x = apply_distributive_law
5345	    (gen_binary (GET_CODE (op0), mode,
5346			 gen_binary (AND, mode, XEXP (op0, 0), op1),
5347			 gen_binary (AND, mode, XEXP (op0, 1),
5348				     copy_rtx (op1))));
5349	  if (GET_CODE (x) != AND)
5350	    return x;
5351	}
5352
5353      if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5354	return apply_distributive_law
5355	  (gen_binary (GET_CODE (op1), mode,
5356		       gen_binary (AND, mode, XEXP (op1, 0), op0),
5357		       gen_binary (AND, mode, XEXP (op1, 1),
5358				   copy_rtx (op0))));
5359
5360      /* Similarly, taking advantage of the fact that
5361	 (and (not A) (xor B C)) == (xor (ior A B) (ior A C))  */
5362
5363      if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
5364	return apply_distributive_law
5365	  (gen_binary (XOR, mode,
5366		       gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
5367		       gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)),
5368				   XEXP (op1, 1))));
5369
5370      else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
5371	return apply_distributive_law
5372	  (gen_binary (XOR, mode,
5373		       gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
5374		       gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1))));
5375      break;
5376
5377    case IOR:
5378      /* (ior A C) is C if all bits of A that might be nonzero are on in C.  */
5379      if (GET_CODE (op1) == CONST_INT
5380	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5381	  && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
5382	return op1;
5383
5384      /* Convert (A & B) | A to A.  */
5385      if (GET_CODE (op0) == AND
5386	  && (rtx_equal_p (XEXP (op0, 0), op1)
5387	      || rtx_equal_p (XEXP (op0, 1), op1))
5388	  && ! side_effects_p (XEXP (op0, 0))
5389	  && ! side_effects_p (XEXP (op0, 1)))
5390	return op1;
5391
5392      /* If we have (ior (and A B) C), apply the distributive law and then
5393	 the inverse distributive law to see if things simplify.  */
5394
5395      if (GET_CODE (op0) == AND)
5396	{
5397	  x = apply_distributive_law
5398	    (gen_binary (AND, mode,
5399			 gen_binary (IOR, mode, XEXP (op0, 0), op1),
5400			 gen_binary (IOR, mode, XEXP (op0, 1),
5401				     copy_rtx (op1))));
5402
5403	  if (GET_CODE (x) != IOR)
5404	    return x;
5405	}
5406
5407      if (GET_CODE (op1) == AND)
5408	{
5409	  x = apply_distributive_law
5410	    (gen_binary (AND, mode,
5411			 gen_binary (IOR, mode, XEXP (op1, 0), op0),
5412			 gen_binary (IOR, mode, XEXP (op1, 1),
5413				     copy_rtx (op0))));
5414
5415	  if (GET_CODE (x) != IOR)
5416	    return x;
5417	}
5418
5419      /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5420	 mode size to (rotate A CX).  */
5421
5422      if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5423	   || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5424	  && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5425	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5426	  && GET_CODE (XEXP (op1, 1)) == CONST_INT
5427	  && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
5428	      == GET_MODE_BITSIZE (mode)))
5429	return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5430			       (GET_CODE (op0) == ASHIFT
5431				? XEXP (op0, 1) : XEXP (op1, 1)));
5432
5433      /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5434	 a (sign_extend (plus ...)).  If so, OP1 is a CONST_INT, and the PLUS
5435	 does not affect any of the bits in OP1, it can really be done
5436	 as a PLUS and we can associate.  We do this by seeing if OP1
5437	 can be safely shifted left C bits.  */
5438      if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5439	  && GET_CODE (XEXP (op0, 0)) == PLUS
5440	  && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5441	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5442	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5443	{
5444	  int count = INTVAL (XEXP (op0, 1));
5445	  HOST_WIDE_INT mask = INTVAL (op1) << count;
5446
5447	  if (mask >> count == INTVAL (op1)
5448	      && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5449	    {
5450	      SUBST (XEXP (XEXP (op0, 0), 1),
5451		     GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5452	      return op0;
5453	    }
5454	}
5455      break;
5456
5457    case XOR:
5458      /* If we are XORing two things that have no bits in common,
5459	 convert them into an IOR.  This helps to detect rotation encoded
5460	 using those methods and possibly other simplifications.  */
5461
5462      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5463	  && (nonzero_bits (op0, mode)
5464	      & nonzero_bits (op1, mode)) == 0)
5465	return (gen_binary (IOR, mode, op0, op1));
5466
5467      /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5468	 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5469	 (NOT y).  */
5470      {
5471	int num_negated = 0;
5472
5473	if (GET_CODE (op0) == NOT)
5474	  num_negated++, op0 = XEXP (op0, 0);
5475	if (GET_CODE (op1) == NOT)
5476	  num_negated++, op1 = XEXP (op1, 0);
5477
5478	if (num_negated == 2)
5479	  {
5480	    SUBST (XEXP (x, 0), op0);
5481	    SUBST (XEXP (x, 1), op1);
5482	  }
5483	else if (num_negated == 1)
5484	  return
5485	    simplify_gen_unary (NOT, mode, gen_binary (XOR, mode, op0, op1),
5486				mode);
5487      }
5488
5489      /* Convert (xor (and A B) B) to (and (not A) B).  The latter may
5490	 correspond to a machine insn or result in further simplifications
5491	 if B is a constant.  */
5492
5493      if (GET_CODE (op0) == AND
5494	  && rtx_equal_p (XEXP (op0, 1), op1)
5495	  && ! side_effects_p (op1))
5496	return gen_binary (AND, mode,
5497			   simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode),
5498			   op1);
5499
5500      else if (GET_CODE (op0) == AND
5501	       && rtx_equal_p (XEXP (op0, 0), op1)
5502	       && ! side_effects_p (op1))
5503	return gen_binary (AND, mode,
5504			   simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode),
5505			   op1);
5506
5507      /* (xor (comparison foo bar) (const_int 1)) can become the reversed
5508	 comparison if STORE_FLAG_VALUE is 1.  */
5509      if (STORE_FLAG_VALUE == 1
5510	  && op1 == const1_rtx
5511	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5512	  && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5513					      XEXP (op0, 1))))
5514	return reversed;
5515
5516      /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5517	 is (lt foo (const_int 0)), so we can perform the above
5518	 simplification if STORE_FLAG_VALUE is 1.  */
5519
5520      if (STORE_FLAG_VALUE == 1
5521	  && op1 == const1_rtx
5522	  && GET_CODE (op0) == LSHIFTRT
5523	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
5524	  && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5525	return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
5526
5527      /* (xor (comparison foo bar) (const_int sign-bit))
5528	 when STORE_FLAG_VALUE is the sign bit.  */
5529      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5530	  && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5531	      == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5532	  && op1 == const_true_rtx
5533	  && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5534	  && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0),
5535					      XEXP (op0, 1))))
5536	return reversed;
5537
5538      break;
5539
5540    default:
5541      abort ();
5542    }
5543
5544  return x;
5545}
5546
5547/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5548   operations" because they can be replaced with two more basic operations.
5549   ZERO_EXTEND is also considered "compound" because it can be replaced with
5550   an AND operation, which is simpler, though only one operation.
5551
5552   The function expand_compound_operation is called with an rtx expression
5553   and will convert it to the appropriate shifts and AND operations,
5554   simplifying at each stage.
5555
5556   The function make_compound_operation is called to convert an expression
5557   consisting of shifts and ANDs into the equivalent compound expression.
5558   It is the inverse of this function, loosely speaking.  */
5559
5560static rtx
5561expand_compound_operation (x)
5562     rtx x;
5563{
5564  unsigned HOST_WIDE_INT pos = 0, len;
5565  int unsignedp = 0;
5566  unsigned int modewidth;
5567  rtx tem;
5568
5569  switch (GET_CODE (x))
5570    {
5571    case ZERO_EXTEND:
5572      unsignedp = 1;
5573    case SIGN_EXTEND:
5574      /* We can't necessarily use a const_int for a multiword mode;
5575	 it depends on implicitly extending the value.
5576	 Since we don't know the right way to extend it,
5577	 we can't tell whether the implicit way is right.
5578
5579	 Even for a mode that is no wider than a const_int,
5580	 we can't win, because we need to sign extend one of its bits through
5581	 the rest of it, and we don't know which bit.  */
5582      if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5583	return x;
5584
5585      /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5586	 (zero_extend:MODE FROM) or (sign_extend:MODE FROM).  It is for any MEM
5587	 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5588	 reloaded. If not for that, MEM's would very rarely be safe.
5589
5590	 Reject MODEs bigger than a word, because we might not be able
5591	 to reference a two-register group starting with an arbitrary register
5592	 (and currently gen_lowpart might crash for a SUBREG).  */
5593
5594      if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5595	return x;
5596
5597      len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5598      /* If the inner object has VOIDmode (the only way this can happen
5599	 is if it is a ASM_OPERANDS), we can't do anything since we don't
5600	 know how much masking to do.  */
5601      if (len == 0)
5602	return x;
5603
5604      break;
5605
5606    case ZERO_EXTRACT:
5607      unsignedp = 1;
5608    case SIGN_EXTRACT:
5609      /* If the operand is a CLOBBER, just return it.  */
5610      if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5611	return XEXP (x, 0);
5612
5613      if (GET_CODE (XEXP (x, 1)) != CONST_INT
5614	  || GET_CODE (XEXP (x, 2)) != CONST_INT
5615	  || GET_MODE (XEXP (x, 0)) == VOIDmode)
5616	return x;
5617
5618      len = INTVAL (XEXP (x, 1));
5619      pos = INTVAL (XEXP (x, 2));
5620
5621      /* If this goes outside the object being extracted, replace the object
5622	 with a (use (mem ...)) construct that only combine understands
5623	 and is used only for this purpose.  */
5624      if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5625	SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
5626
5627      if (BITS_BIG_ENDIAN)
5628	pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5629
5630      break;
5631
5632    default:
5633      return x;
5634    }
5635  /* Convert sign extension to zero extension, if we know that the high
5636     bit is not set, as this is easier to optimize.  It will be converted
5637     back to cheaper alternative in make_extraction.  */
5638  if (GET_CODE (x) == SIGN_EXTEND
5639      && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5640	  && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5641		& ~(((unsigned HOST_WIDE_INT)
5642		      GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5643		     >> 1))
5644	       == 0)))
5645    {
5646      rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5647      return expand_compound_operation (temp);
5648    }
5649
5650  /* We can optimize some special cases of ZERO_EXTEND.  */
5651  if (GET_CODE (x) == ZERO_EXTEND)
5652    {
5653      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5654         know that the last value didn't have any inappropriate bits
5655         set.  */
5656      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5657	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5658	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5659	  && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5660	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5661	return XEXP (XEXP (x, 0), 0);
5662
5663      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5664      if (GET_CODE (XEXP (x, 0)) == SUBREG
5665	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5666	  && subreg_lowpart_p (XEXP (x, 0))
5667	  && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5668	  && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5669	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5670	return SUBREG_REG (XEXP (x, 0));
5671
5672      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5673         is a comparison and STORE_FLAG_VALUE permits.  This is like
5674         the first case, but it works even when GET_MODE (x) is larger
5675         than HOST_WIDE_INT.  */
5676      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5677	  && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5678	  && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5679	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5680	      <= HOST_BITS_PER_WIDE_INT)
5681	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5682	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5683	return XEXP (XEXP (x, 0), 0);
5684
5685      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5686      if (GET_CODE (XEXP (x, 0)) == SUBREG
5687	  && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5688	  && subreg_lowpart_p (XEXP (x, 0))
5689	  && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5690	  && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5691	      <= HOST_BITS_PER_WIDE_INT)
5692	  && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5693	      & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5694	return SUBREG_REG (XEXP (x, 0));
5695
5696    }
5697
5698  /* If we reach here, we want to return a pair of shifts.  The inner
5699     shift is a left shift of BITSIZE - POS - LEN bits.  The outer
5700     shift is a right shift of BITSIZE - LEN bits.  It is arithmetic or
5701     logical depending on the value of UNSIGNEDP.
5702
5703     If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5704     converted into an AND of a shift.
5705
5706     We must check for the case where the left shift would have a negative
5707     count.  This can happen in a case like (x >> 31) & 255 on machines
5708     that can't shift by a constant.  On those machines, we would first
5709     combine the shift with the AND to produce a variable-position
5710     extraction.  Then the constant of 31 would be substituted in to produce
5711     a such a position.  */
5712
5713  modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5714  if (modewidth + len >= pos)
5715    tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
5716				GET_MODE (x),
5717				simplify_shift_const (NULL_RTX, ASHIFT,
5718						      GET_MODE (x),
5719						      XEXP (x, 0),
5720						      modewidth - pos - len),
5721				modewidth - len);
5722
5723  else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5724    tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5725				  simplify_shift_const (NULL_RTX, LSHIFTRT,
5726							GET_MODE (x),
5727							XEXP (x, 0), pos),
5728				  ((HOST_WIDE_INT) 1 << len) - 1);
5729  else
5730    /* Any other cases we can't handle.  */
5731    return x;
5732
5733  /* If we couldn't do this for some reason, return the original
5734     expression.  */
5735  if (GET_CODE (tem) == CLOBBER)
5736    return x;
5737
5738  return tem;
5739}
5740
5741/* X is a SET which contains an assignment of one object into
5742   a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5743   or certain SUBREGS). If possible, convert it into a series of
5744   logical operations.
5745
5746   We half-heartedly support variable positions, but do not at all
5747   support variable lengths.  */
5748
5749static rtx
5750expand_field_assignment (x)
5751     rtx x;
5752{
5753  rtx inner;
5754  rtx pos;			/* Always counts from low bit.  */
5755  int len;
5756  rtx mask;
5757  enum machine_mode compute_mode;
5758
5759  /* Loop until we find something we can't simplify.  */
5760  while (1)
5761    {
5762      if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5763	  && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5764	{
5765	  inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5766	  len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
5767	  pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
5768	}
5769      else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5770	       && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5771	{
5772	  inner = XEXP (SET_DEST (x), 0);
5773	  len = INTVAL (XEXP (SET_DEST (x), 1));
5774	  pos = XEXP (SET_DEST (x), 2);
5775
5776	  /* If the position is constant and spans the width of INNER,
5777	     surround INNER  with a USE to indicate this.  */
5778	  if (GET_CODE (pos) == CONST_INT
5779	      && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5780	    inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
5781
5782	  if (BITS_BIG_ENDIAN)
5783	    {
5784	      if (GET_CODE (pos) == CONST_INT)
5785		pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5786			       - INTVAL (pos));
5787	      else if (GET_CODE (pos) == MINUS
5788		       && GET_CODE (XEXP (pos, 1)) == CONST_INT
5789		       && (INTVAL (XEXP (pos, 1))
5790			   == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5791		/* If position is ADJUST - X, new position is X.  */
5792		pos = XEXP (pos, 0);
5793	      else
5794		pos = gen_binary (MINUS, GET_MODE (pos),
5795				  GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5796					   - len),
5797				  pos);
5798	    }
5799	}
5800
5801      /* A SUBREG between two modes that occupy the same numbers of words
5802	 can be done by moving the SUBREG to the source.  */
5803      else if (GET_CODE (SET_DEST (x)) == SUBREG
5804	       /* We need SUBREGs to compute nonzero_bits properly.  */
5805	       && nonzero_sign_valid
5806	       && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5807		     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5808		   == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5809			+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5810	{
5811	  x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
5812			   gen_lowpart_for_combine
5813			   (GET_MODE (SUBREG_REG (SET_DEST (x))),
5814			    SET_SRC (x)));
5815	  continue;
5816	}
5817      else
5818	break;
5819
5820      while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5821	inner = SUBREG_REG (inner);
5822
5823      compute_mode = GET_MODE (inner);
5824
5825      /* Don't attempt bitwise arithmetic on non-integral modes.  */
5826      if (! INTEGRAL_MODE_P (compute_mode))
5827	{
5828	  enum machine_mode imode;
5829
5830	  /* Something is probably seriously wrong if this matches.  */
5831	  if (! FLOAT_MODE_P (compute_mode))
5832	    break;
5833
5834	  /* Try to find an integral mode to pun with.  */
5835	  imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5836	  if (imode == BLKmode)
5837	    break;
5838
5839	  compute_mode = imode;
5840	  inner = gen_lowpart_for_combine (imode, inner);
5841	}
5842
5843      /* Compute a mask of LEN bits, if we can do this on the host machine.  */
5844      if (len < HOST_BITS_PER_WIDE_INT)
5845	mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
5846      else
5847	break;
5848
5849      /* Now compute the equivalent expression.  Make a copy of INNER
5850	 for the SET_DEST in case it is a MEM into which we will substitute;
5851	 we don't want shared RTL in that case.  */
5852      x = gen_rtx_SET
5853	(VOIDmode, copy_rtx (inner),
5854	 gen_binary (IOR, compute_mode,
5855		     gen_binary (AND, compute_mode,
5856				 simplify_gen_unary (NOT, compute_mode,
5857						     gen_binary (ASHIFT,
5858								 compute_mode,
5859								 mask, pos),
5860						     compute_mode),
5861				 inner),
5862		     gen_binary (ASHIFT, compute_mode,
5863				 gen_binary (AND, compute_mode,
5864					     gen_lowpart_for_combine
5865					     (compute_mode, SET_SRC (x)),
5866					     mask),
5867				 pos)));
5868    }
5869
5870  return x;
5871}
5872
5873/* Return an RTX for a reference to LEN bits of INNER.  If POS_RTX is nonzero,
5874   it is an RTX that represents a variable starting position; otherwise,
5875   POS is the (constant) starting bit position (counted from the LSB).
5876
5877   INNER may be a USE.  This will occur when we started with a bitfield
5878   that went outside the boundary of the object in memory, which is
5879   allowed on most machines.  To isolate this case, we produce a USE
5880   whose mode is wide enough and surround the MEM with it.  The only
5881   code that understands the USE is this routine.  If it is not removed,
5882   it will cause the resulting insn not to match.
5883
5884   UNSIGNEDP is non-zero for an unsigned reference and zero for a
5885   signed reference.
5886
5887   IN_DEST is non-zero if this is a reference in the destination of a
5888   SET.  This is used when a ZERO_ or SIGN_EXTRACT isn't needed.  If non-zero,
5889   a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5890   be used.
5891
5892   IN_COMPARE is non-zero if we are in a COMPARE.  This means that a
5893   ZERO_EXTRACT should be built even for bits starting at bit 0.
5894
5895   MODE is the desired mode of the result (if IN_DEST == 0).
5896
5897   The result is an RTX for the extraction or NULL_RTX if the target
5898   can't handle it.  */
5899
5900static rtx
5901make_extraction (mode, inner, pos, pos_rtx, len,
5902		 unsignedp, in_dest, in_compare)
5903     enum machine_mode mode;
5904     rtx inner;
5905     HOST_WIDE_INT pos;
5906     rtx pos_rtx;
5907     unsigned HOST_WIDE_INT len;
5908     int unsignedp;
5909     int in_dest, in_compare;
5910{
5911  /* This mode describes the size of the storage area
5912     to fetch the overall value from.  Within that, we
5913     ignore the POS lowest bits, etc.  */
5914  enum machine_mode is_mode = GET_MODE (inner);
5915  enum machine_mode inner_mode;
5916  enum machine_mode wanted_inner_mode = byte_mode;
5917  enum machine_mode wanted_inner_reg_mode = word_mode;
5918  enum machine_mode pos_mode = word_mode;
5919  enum machine_mode extraction_mode = word_mode;
5920  enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5921  int spans_byte = 0;
5922  rtx new = 0;
5923  rtx orig_pos_rtx = pos_rtx;
5924  HOST_WIDE_INT orig_pos;
5925
5926  /* Get some information about INNER and get the innermost object.  */
5927  if (GET_CODE (inner) == USE)
5928    /* (use:SI (mem:QI foo)) stands for (mem:SI foo).  */
5929    /* We don't need to adjust the position because we set up the USE
5930       to pretend that it was a full-word object.  */
5931    spans_byte = 1, inner = XEXP (inner, 0);
5932  else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5933    {
5934      /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5935	 consider just the QI as the memory to extract from.
5936	 The subreg adds or removes high bits; its mode is
5937	 irrelevant to the meaning of this extraction,
5938	 since POS and LEN count from the lsb.  */
5939      if (GET_CODE (SUBREG_REG (inner)) == MEM)
5940	is_mode = GET_MODE (SUBREG_REG (inner));
5941      inner = SUBREG_REG (inner);
5942    }
5943
5944  inner_mode = GET_MODE (inner);
5945
5946  if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
5947    pos = INTVAL (pos_rtx), pos_rtx = 0;
5948
5949  /* See if this can be done without an extraction.  We never can if the
5950     width of the field is not the same as that of some integer mode. For
5951     registers, we can only avoid the extraction if the position is at the
5952     low-order bit and this is either not in the destination or we have the
5953     appropriate STRICT_LOW_PART operation available.
5954
5955     For MEM, we can avoid an extract if the field starts on an appropriate
5956     boundary and we can change the mode of the memory reference.  However,
5957     we cannot directly access the MEM if we have a USE and the underlying
5958     MEM is not TMODE.  This combination means that MEM was being used in a
5959     context where bits outside its mode were being referenced; that is only
5960     valid in bit-field insns.  */
5961
5962  if (tmode != BLKmode
5963      && ! (spans_byte && inner_mode != tmode)
5964      && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5965	   && GET_CODE (inner) != MEM
5966	   && (! in_dest
5967	       || (GET_CODE (inner) == REG
5968		   && have_insn_for (STRICT_LOW_PART, tmode))))
5969	  || (GET_CODE (inner) == MEM && pos_rtx == 0
5970	      && (pos
5971		  % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5972		     : BITS_PER_UNIT)) == 0
5973	      /* We can't do this if we are widening INNER_MODE (it
5974		 may not be aligned, for one thing).  */
5975	      && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5976	      && (inner_mode == tmode
5977		  || (! mode_dependent_address_p (XEXP (inner, 0))
5978		      && ! MEM_VOLATILE_P (inner))))))
5979    {
5980      /* If INNER is a MEM, make a new MEM that encompasses just the desired
5981	 field.  If the original and current mode are the same, we need not
5982	 adjust the offset.  Otherwise, we do if bytes big endian.
5983
5984	 If INNER is not a MEM, get a piece consisting of just the field
5985	 of interest (in this case POS % BITS_PER_WORD must be 0).  */
5986
5987      if (GET_CODE (inner) == MEM)
5988	{
5989	  HOST_WIDE_INT offset;
5990
5991	  /* POS counts from lsb, but make OFFSET count in memory order.  */
5992	  if (BYTES_BIG_ENDIAN)
5993	    offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5994	  else
5995	    offset = pos / BITS_PER_UNIT;
5996
5997	  new = adjust_address_nv (inner, tmode, offset);
5998	}
5999      else if (GET_CODE (inner) == REG)
6000	{
6001	  /* We can't call gen_lowpart_for_combine here since we always want
6002	     a SUBREG and it would sometimes return a new hard register.  */
6003	  if (tmode != inner_mode)
6004	    {
6005	      HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6006
6007	      if (WORDS_BIG_ENDIAN
6008		  && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6009		final_word = ((GET_MODE_SIZE (inner_mode)
6010			       - GET_MODE_SIZE (tmode))
6011			      / UNITS_PER_WORD) - final_word;
6012
6013	      final_word *= UNITS_PER_WORD;
6014	      if (BYTES_BIG_ENDIAN &&
6015		  GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6016		final_word += (GET_MODE_SIZE (inner_mode)
6017			       - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6018
6019	      /* Avoid creating invalid subregs, for example when
6020		 simplifying (x>>32)&255. */
6021	      if (final_word >= GET_MODE_SIZE (inner_mode))
6022		return NULL_RTX;
6023
6024	      new = gen_rtx_SUBREG (tmode, inner, final_word);
6025	    }
6026	  else
6027	    new = inner;
6028	}
6029      else
6030	new = force_to_mode (inner, tmode,
6031			     len >= HOST_BITS_PER_WIDE_INT
6032			     ? ~(unsigned HOST_WIDE_INT) 0
6033			     : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6034			     NULL_RTX, 0);
6035
6036      /* If this extraction is going into the destination of a SET,
6037	 make a STRICT_LOW_PART unless we made a MEM.  */
6038
6039      if (in_dest)
6040	return (GET_CODE (new) == MEM ? new
6041		: (GET_CODE (new) != SUBREG
6042		   ? gen_rtx_CLOBBER (tmode, const0_rtx)
6043		   : gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
6044
6045      if (mode == tmode)
6046	return new;
6047
6048      if (GET_CODE (new) == CONST_INT)
6049	return GEN_INT (trunc_int_for_mode (INTVAL (new), mode));
6050
6051      /* If we know that no extraneous bits are set, and that the high
6052	 bit is not set, convert the extraction to the cheaper of
6053	 sign and zero extension, that are equivalent in these cases.  */
6054      if (flag_expensive_optimizations
6055	  && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6056	      && ((nonzero_bits (new, tmode)
6057		   & ~(((unsigned HOST_WIDE_INT)
6058			GET_MODE_MASK (tmode))
6059		       >> 1))
6060		  == 0)))
6061	{
6062	  rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
6063	  rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
6064
6065	  /* Prefer ZERO_EXTENSION, since it gives more information to
6066	     backends.  */
6067	  if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
6068	    return temp;
6069	  return temp1;
6070	}
6071
6072      /* Otherwise, sign- or zero-extend unless we already are in the
6073	 proper mode.  */
6074
6075      return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6076			     mode, new));
6077    }
6078
6079  /* Unless this is a COMPARE or we have a funny memory reference,
6080     don't do anything with zero-extending field extracts starting at
6081     the low-order bit since they are simple AND operations.  */
6082  if (pos_rtx == 0 && pos == 0 && ! in_dest
6083      && ! in_compare && ! spans_byte && unsignedp)
6084    return 0;
6085
6086  /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
6087     we would be spanning bytes or if the position is not a constant and the
6088     length is not 1.  In all other cases, we would only be going outside
6089     our object in cases when an original shift would have been
6090     undefined.  */
6091  if (! spans_byte && GET_CODE (inner) == MEM
6092      && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6093	  || (pos_rtx != 0 && len != 1)))
6094    return 0;
6095
6096  /* Get the mode to use should INNER not be a MEM, the mode for the position,
6097     and the mode for the result.  */
6098  if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
6099    {
6100      wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6101      pos_mode = mode_for_extraction (EP_insv, 2);
6102      extraction_mode = mode_for_extraction (EP_insv, 3);
6103    }
6104
6105  if (! in_dest && unsignedp
6106      && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
6107    {
6108      wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6109      pos_mode = mode_for_extraction (EP_extzv, 3);
6110      extraction_mode = mode_for_extraction (EP_extzv, 0);
6111    }
6112
6113  if (! in_dest && ! unsignedp
6114      && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
6115    {
6116      wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6117      pos_mode = mode_for_extraction (EP_extv, 3);
6118      extraction_mode = mode_for_extraction (EP_extv, 0);
6119    }
6120
6121  /* Never narrow an object, since that might not be safe.  */
6122
6123  if (mode != VOIDmode
6124      && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6125    extraction_mode = mode;
6126
6127  if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6128      && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6129    pos_mode = GET_MODE (pos_rtx);
6130
6131  /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6132     if we have to change the mode of memory and cannot, the desired mode is
6133     EXTRACTION_MODE.  */
6134  if (GET_CODE (inner) != MEM)
6135    wanted_inner_mode = wanted_inner_reg_mode;
6136  else if (inner_mode != wanted_inner_mode
6137	   && (mode_dependent_address_p (XEXP (inner, 0))
6138	       || MEM_VOLATILE_P (inner)))
6139    wanted_inner_mode = extraction_mode;
6140
6141  orig_pos = pos;
6142
6143  if (BITS_BIG_ENDIAN)
6144    {
6145      /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6146	 BITS_BIG_ENDIAN style.  If position is constant, compute new
6147	 position.  Otherwise, build subtraction.
6148	 Note that POS is relative to the mode of the original argument.
6149	 If it's a MEM we need to recompute POS relative to that.
6150	 However, if we're extracting from (or inserting into) a register,
6151	 we want to recompute POS relative to wanted_inner_mode.  */
6152      int width = (GET_CODE (inner) == MEM
6153		   ? GET_MODE_BITSIZE (is_mode)
6154		   : GET_MODE_BITSIZE (wanted_inner_mode));
6155
6156      if (pos_rtx == 0)
6157	pos = width - len - pos;
6158      else
6159	pos_rtx
6160	  = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6161      /* POS may be less than 0 now, but we check for that below.
6162	 Note that it can only be less than 0 if GET_CODE (inner) != MEM.  */
6163    }
6164
6165  /* If INNER has a wider mode, make it smaller.  If this is a constant
6166     extract, try to adjust the byte to point to the byte containing
6167     the value.  */
6168  if (wanted_inner_mode != VOIDmode
6169      && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6170      && ((GET_CODE (inner) == MEM
6171	   && (inner_mode == wanted_inner_mode
6172	       || (! mode_dependent_address_p (XEXP (inner, 0))
6173		   && ! MEM_VOLATILE_P (inner))))))
6174    {
6175      int offset = 0;
6176
6177      /* The computations below will be correct if the machine is big
6178	 endian in both bits and bytes or little endian in bits and bytes.
6179	 If it is mixed, we must adjust.  */
6180
6181      /* If bytes are big endian and we had a paradoxical SUBREG, we must
6182	 adjust OFFSET to compensate.  */
6183      if (BYTES_BIG_ENDIAN
6184	  && ! spans_byte
6185	  && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6186	offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6187
6188      /* If this is a constant position, we can move to the desired byte.  */
6189      if (pos_rtx == 0)
6190	{
6191	  offset += pos / BITS_PER_UNIT;
6192	  pos %= GET_MODE_BITSIZE (wanted_inner_mode);
6193	}
6194
6195      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6196	  && ! spans_byte
6197	  && is_mode != wanted_inner_mode)
6198	offset = (GET_MODE_SIZE (is_mode)
6199		  - GET_MODE_SIZE (wanted_inner_mode) - offset);
6200
6201      if (offset != 0 || inner_mode != wanted_inner_mode)
6202	inner = adjust_address_nv (inner, wanted_inner_mode, offset);
6203    }
6204
6205  /* If INNER is not memory, we can always get it into the proper mode.  If we
6206     are changing its mode, POS must be a constant and smaller than the size
6207     of the new mode.  */
6208  else if (GET_CODE (inner) != MEM)
6209    {
6210      if (GET_MODE (inner) != wanted_inner_mode
6211	  && (pos_rtx != 0
6212	      || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6213	return 0;
6214
6215      inner = force_to_mode (inner, wanted_inner_mode,
6216			     pos_rtx
6217			     || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6218			     ? ~(unsigned HOST_WIDE_INT) 0
6219			     : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6220				<< orig_pos),
6221			     NULL_RTX, 0);
6222    }
6223
6224  /* Adjust mode of POS_RTX, if needed.  If we want a wider mode, we
6225     have to zero extend.  Otherwise, we can just use a SUBREG.  */
6226  if (pos_rtx != 0
6227      && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
6228    {
6229      rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
6230
6231      /* If we know that no extraneous bits are set, and that the high
6232	 bit is not set, convert extraction to cheaper one - either
6233	 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6234	 cases.  */
6235      if (flag_expensive_optimizations
6236	  && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6237	      && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
6238		   & ~(((unsigned HOST_WIDE_INT)
6239			GET_MODE_MASK (GET_MODE (pos_rtx)))
6240		       >> 1))
6241		  == 0)))
6242	{
6243	  rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6244
6245	  /* Prefer ZERO_EXTENSION, since it gives more information to
6246	     backends.  */
6247	  if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6248	    temp = temp1;
6249	}
6250      pos_rtx = temp;
6251    }
6252  else if (pos_rtx != 0
6253	   && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6254    pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
6255
6256  /* Make POS_RTX unless we already have it and it is correct.  If we don't
6257     have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
6258     be a CONST_INT.  */
6259  if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6260    pos_rtx = orig_pos_rtx;
6261
6262  else if (pos_rtx == 0)
6263    pos_rtx = GEN_INT (pos);
6264
6265  /* Make the required operation.  See if we can use existing rtx.  */
6266  new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
6267			 extraction_mode, inner, GEN_INT (len), pos_rtx);
6268  if (! in_dest)
6269    new = gen_lowpart_for_combine (mode, new);
6270
6271  return new;
6272}
6273
6274/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6275   with any other operations in X.  Return X without that shift if so.  */
6276
6277static rtx
6278extract_left_shift (x, count)
6279     rtx x;
6280     int count;
6281{
6282  enum rtx_code code = GET_CODE (x);
6283  enum machine_mode mode = GET_MODE (x);
6284  rtx tem;
6285
6286  switch (code)
6287    {
6288    case ASHIFT:
6289      /* This is the shift itself.  If it is wide enough, we will return
6290	 either the value being shifted if the shift count is equal to
6291	 COUNT or a shift for the difference.  */
6292      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6293	  && INTVAL (XEXP (x, 1)) >= count)
6294	return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6295				     INTVAL (XEXP (x, 1)) - count);
6296      break;
6297
6298    case NEG:  case NOT:
6299      if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6300	return simplify_gen_unary (code, mode, tem, mode);
6301
6302      break;
6303
6304    case PLUS:  case IOR:  case XOR:  case AND:
6305      /* If we can safely shift this constant and we find the inner shift,
6306	 make a new operation.  */
6307      if (GET_CODE (XEXP (x,1)) == CONST_INT
6308	  && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
6309	  && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6310	return gen_binary (code, mode, tem,
6311			   GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6312
6313      break;
6314
6315    default:
6316      break;
6317    }
6318
6319  return 0;
6320}
6321
6322/* Look at the expression rooted at X.  Look for expressions
6323   equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6324   Form these expressions.
6325
6326   Return the new rtx, usually just X.
6327
6328   Also, for machines like the VAX that don't have logical shift insns,
6329   try to convert logical to arithmetic shift operations in cases where
6330   they are equivalent.  This undoes the canonicalizations to logical
6331   shifts done elsewhere.
6332
6333   We try, as much as possible, to re-use rtl expressions to save memory.
6334
6335   IN_CODE says what kind of expression we are processing.  Normally, it is
6336   SET.  In a memory address (inside a MEM, PLUS or minus, the latter two
6337   being kludges), it is MEM.  When processing the arguments of a comparison
6338   or a COMPARE against zero, it is COMPARE.  */
6339
6340static rtx
6341make_compound_operation (x, in_code)
6342     rtx x;
6343     enum rtx_code in_code;
6344{
6345  enum rtx_code code = GET_CODE (x);
6346  enum machine_mode mode = GET_MODE (x);
6347  int mode_width = GET_MODE_BITSIZE (mode);
6348  rtx rhs, lhs;
6349  enum rtx_code next_code;
6350  int i;
6351  rtx new = 0;
6352  rtx tem;
6353  const char *fmt;
6354
6355  /* Select the code to be used in recursive calls.  Once we are inside an
6356     address, we stay there.  If we have a comparison, set to COMPARE,
6357     but once inside, go back to our default of SET.  */
6358
6359  next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
6360	       : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
6361		  && XEXP (x, 1) == const0_rtx) ? COMPARE
6362	       : in_code == COMPARE ? SET : in_code);
6363
6364  /* Process depending on the code of this operation.  If NEW is set
6365     non-zero, it will be returned.  */
6366
6367  switch (code)
6368    {
6369    case ASHIFT:
6370      /* Convert shifts by constants into multiplications if inside
6371	 an address.  */
6372      if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
6373	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6374	  && INTVAL (XEXP (x, 1)) >= 0)
6375	{
6376	  new = make_compound_operation (XEXP (x, 0), next_code);
6377	  new = gen_rtx_MULT (mode, new,
6378			      GEN_INT ((HOST_WIDE_INT) 1
6379				       << INTVAL (XEXP (x, 1))));
6380	}
6381      break;
6382
6383    case AND:
6384      /* If the second operand is not a constant, we can't do anything
6385	 with it.  */
6386      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6387	break;
6388
6389      /* If the constant is a power of two minus one and the first operand
6390	 is a logical right shift, make an extraction.  */
6391      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6392	  && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6393	{
6394	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6395	  new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6396				 0, in_code == COMPARE);
6397	}
6398
6399      /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
6400      else if (GET_CODE (XEXP (x, 0)) == SUBREG
6401	       && subreg_lowpart_p (XEXP (x, 0))
6402	       && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6403	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6404	{
6405	  new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6406					 next_code);
6407	  new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
6408				 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6409				 0, in_code == COMPARE);
6410	}
6411      /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)).  */
6412      else if ((GET_CODE (XEXP (x, 0)) == XOR
6413		|| GET_CODE (XEXP (x, 0)) == IOR)
6414	       && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6415	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6416	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6417	{
6418	  /* Apply the distributive law, and then try to make extractions.  */
6419	  new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
6420				gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6421					     XEXP (x, 1)),
6422				gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6423					     XEXP (x, 1)));
6424	  new = make_compound_operation (new, in_code);
6425	}
6426
6427      /* If we are have (and (rotate X C) M) and C is larger than the number
6428	 of bits in M, this is an extraction.  */
6429
6430      else if (GET_CODE (XEXP (x, 0)) == ROTATE
6431	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6432	       && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6433	       && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
6434	{
6435	  new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6436	  new = make_extraction (mode, new,
6437				 (GET_MODE_BITSIZE (mode)
6438				  - INTVAL (XEXP (XEXP (x, 0), 1))),
6439				 NULL_RTX, i, 1, 0, in_code == COMPARE);
6440	}
6441
6442      /* On machines without logical shifts, if the operand of the AND is
6443	 a logical shift and our mask turns off all the propagated sign
6444	 bits, we can replace the logical shift with an arithmetic shift.  */
6445      else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6446	       && !have_insn_for (LSHIFTRT, mode)
6447	       && have_insn_for (ASHIFTRT, mode)
6448	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6449	       && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6450	       && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6451	       && mode_width <= HOST_BITS_PER_WIDE_INT)
6452	{
6453	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6454
6455	  mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6456	  if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6457	    SUBST (XEXP (x, 0),
6458		   gen_rtx_ASHIFTRT (mode,
6459				     make_compound_operation
6460				     (XEXP (XEXP (x, 0), 0), next_code),
6461				     XEXP (XEXP (x, 0), 1)));
6462	}
6463
6464      /* If the constant is one less than a power of two, this might be
6465	 representable by an extraction even if no shift is present.
6466	 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6467	 we are in a COMPARE.  */
6468      else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6469	new = make_extraction (mode,
6470			       make_compound_operation (XEXP (x, 0),
6471							next_code),
6472			       0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6473
6474      /* If we are in a comparison and this is an AND with a power of two,
6475	 convert this into the appropriate bit extract.  */
6476      else if (in_code == COMPARE
6477	       && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6478	new = make_extraction (mode,
6479			       make_compound_operation (XEXP (x, 0),
6480							next_code),
6481			       i, NULL_RTX, 1, 1, 0, 1);
6482
6483      break;
6484
6485    case LSHIFTRT:
6486      /* If the sign bit is known to be zero, replace this with an
6487	 arithmetic shift.  */
6488      if (have_insn_for (ASHIFTRT, mode)
6489	  && ! have_insn_for (LSHIFTRT, mode)
6490	  && mode_width <= HOST_BITS_PER_WIDE_INT
6491	  && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6492	{
6493	  new = gen_rtx_ASHIFTRT (mode,
6494				  make_compound_operation (XEXP (x, 0),
6495							   next_code),
6496				  XEXP (x, 1));
6497	  break;
6498	}
6499
6500      /* ... fall through ...  */
6501
6502    case ASHIFTRT:
6503      lhs = XEXP (x, 0);
6504      rhs = XEXP (x, 1);
6505
6506      /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6507	 this is a SIGN_EXTRACT.  */
6508      if (GET_CODE (rhs) == CONST_INT
6509	  && GET_CODE (lhs) == ASHIFT
6510	  && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6511	  && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6512	{
6513	  new = make_compound_operation (XEXP (lhs, 0), next_code);
6514	  new = make_extraction (mode, new,
6515				 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6516				 NULL_RTX, mode_width - INTVAL (rhs),
6517				 code == LSHIFTRT, 0, in_code == COMPARE);
6518	  break;
6519	}
6520
6521      /* See if we have operations between an ASHIFTRT and an ASHIFT.
6522	 If so, try to merge the shifts into a SIGN_EXTEND.  We could
6523	 also do this for some cases of SIGN_EXTRACT, but it doesn't
6524	 seem worth the effort; the case checked for occurs on Alpha.  */
6525
6526      if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6527	  && ! (GET_CODE (lhs) == SUBREG
6528		&& (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6529	  && GET_CODE (rhs) == CONST_INT
6530	  && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6531	  && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6532	new = make_extraction (mode, make_compound_operation (new, next_code),
6533			       0, NULL_RTX, mode_width - INTVAL (rhs),
6534			       code == LSHIFTRT, 0, in_code == COMPARE);
6535
6536      break;
6537
6538    case SUBREG:
6539      /* Call ourselves recursively on the inner expression.  If we are
6540	 narrowing the object and it has a different RTL code from
6541	 what it originally did, do this SUBREG as a force_to_mode.  */
6542
6543      tem = make_compound_operation (SUBREG_REG (x), in_code);
6544      if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6545	  && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6546	  && subreg_lowpart_p (x))
6547	{
6548	  rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6549				     NULL_RTX, 0);
6550
6551	  /* If we have something other than a SUBREG, we might have
6552	     done an expansion, so rerun ourselves.  */
6553	  if (GET_CODE (newer) != SUBREG)
6554	    newer = make_compound_operation (newer, in_code);
6555
6556	  return newer;
6557	}
6558
6559      /* If this is a paradoxical subreg, and the new code is a sign or
6560	 zero extension, omit the subreg and widen the extension.  If it
6561	 is a regular subreg, we can still get rid of the subreg by not
6562	 widening so much, or in fact removing the extension entirely.  */
6563      if ((GET_CODE (tem) == SIGN_EXTEND
6564	   || GET_CODE (tem) == ZERO_EXTEND)
6565	  && subreg_lowpart_p (x))
6566	{
6567	  if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6568	      || (GET_MODE_SIZE (mode) >
6569		  GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6570	    tem = gen_rtx_fmt_e (GET_CODE (tem), mode, XEXP (tem, 0));
6571	  else
6572	    tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6573	  return tem;
6574	}
6575      break;
6576
6577    default:
6578      break;
6579    }
6580
6581  if (new)
6582    {
6583      x = gen_lowpart_for_combine (mode, new);
6584      code = GET_CODE (x);
6585    }
6586
6587  /* Now recursively process each operand of this operation.  */
6588  fmt = GET_RTX_FORMAT (code);
6589  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6590    if (fmt[i] == 'e')
6591      {
6592	new = make_compound_operation (XEXP (x, i), next_code);
6593	SUBST (XEXP (x, i), new);
6594      }
6595
6596  return x;
6597}
6598
6599/* Given M see if it is a value that would select a field of bits
6600   within an item, but not the entire word.  Return -1 if not.
6601   Otherwise, return the starting position of the field, where 0 is the
6602   low-order bit.
6603
6604   *PLEN is set to the length of the field.  */
6605
6606static int
6607get_pos_from_mask (m, plen)
6608     unsigned HOST_WIDE_INT m;
6609     unsigned HOST_WIDE_INT *plen;
6610{
6611  /* Get the bit number of the first 1 bit from the right, -1 if none.  */
6612  int pos = exact_log2 (m & -m);
6613  int len;
6614
6615  if (pos < 0)
6616    return -1;
6617
6618  /* Now shift off the low-order zero bits and see if we have a power of
6619     two minus 1.  */
6620  len = exact_log2 ((m >> pos) + 1);
6621
6622  if (len <= 0)
6623    return -1;
6624
6625  *plen = len;
6626  return pos;
6627}
6628
6629/* See if X can be simplified knowing that we will only refer to it in
6630   MODE and will only refer to those bits that are nonzero in MASK.
6631   If other bits are being computed or if masking operations are done
6632   that select a superset of the bits in MASK, they can sometimes be
6633   ignored.
6634
6635   Return a possibly simplified expression, but always convert X to
6636   MODE.  If X is a CONST_INT, AND the CONST_INT with MASK.
6637
6638   Also, if REG is non-zero and X is a register equal in value to REG,
6639   replace X with REG.
6640
6641   If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6642   are all off in X.  This is used when X will be complemented, by either
6643   NOT, NEG, or XOR.  */
6644
6645static rtx
6646force_to_mode (x, mode, mask, reg, just_select)
6647     rtx x;
6648     enum machine_mode mode;
6649     unsigned HOST_WIDE_INT mask;
6650     rtx reg;
6651     int just_select;
6652{
6653  enum rtx_code code = GET_CODE (x);
6654  int next_select = just_select || code == XOR || code == NOT || code == NEG;
6655  enum machine_mode op_mode;
6656  unsigned HOST_WIDE_INT fuller_mask, nonzero;
6657  rtx op0, op1, temp;
6658
6659  /* If this is a CALL or ASM_OPERANDS, don't do anything.  Some of the
6660     code below will do the wrong thing since the mode of such an
6661     expression is VOIDmode.
6662
6663     Also do nothing if X is a CLOBBER; this can happen if X was
6664     the return value from a call to gen_lowpart_for_combine.  */
6665  if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
6666    return x;
6667
6668  /* We want to perform the operation is its present mode unless we know
6669     that the operation is valid in MODE, in which case we do the operation
6670     in MODE.  */
6671  op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6672	      && have_insn_for (code, mode))
6673	     ? mode : GET_MODE (x));
6674
6675  /* It is not valid to do a right-shift in a narrower mode
6676     than the one it came in with.  */
6677  if ((code == LSHIFTRT || code == ASHIFTRT)
6678      && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6679    op_mode = GET_MODE (x);
6680
6681  /* Truncate MASK to fit OP_MODE.  */
6682  if (op_mode)
6683    mask &= GET_MODE_MASK (op_mode);
6684
6685  /* When we have an arithmetic operation, or a shift whose count we
6686     do not know, we need to assume that all bit the up to the highest-order
6687     bit in MASK will be needed.  This is how we form such a mask.  */
6688  if (op_mode)
6689    fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6690		   ? GET_MODE_MASK (op_mode)
6691		   : (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
6692		      - 1));
6693  else
6694    fuller_mask = ~(HOST_WIDE_INT) 0;
6695
6696  /* Determine what bits of X are guaranteed to be (non)zero.  */
6697  nonzero = nonzero_bits (x, mode);
6698
6699  /* If none of the bits in X are needed, return a zero.  */
6700  if (! just_select && (nonzero & mask) == 0)
6701    return const0_rtx;
6702
6703  /* If X is a CONST_INT, return a new one.  Do this here since the
6704     test below will fail.  */
6705  if (GET_CODE (x) == CONST_INT)
6706    return gen_int_mode (INTVAL (x) & mask, mode);
6707
6708  /* If X is narrower than MODE and we want all the bits in X's mode, just
6709     get X in the proper mode.  */
6710  if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6711      && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
6712    return gen_lowpart_for_combine (mode, x);
6713
6714  /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6715     MASK are already known to be zero in X, we need not do anything.  */
6716  if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0)
6717    return x;
6718
6719  switch (code)
6720    {
6721    case CLOBBER:
6722      /* If X is a (clobber (const_int)), return it since we know we are
6723	 generating something that won't match.  */
6724      return x;
6725
6726    case USE:
6727      /* X is a (use (mem ..)) that was made from a bit-field extraction that
6728	 spanned the boundary of the MEM.  If we are now masking so it is
6729	 within that boundary, we don't need the USE any more.  */
6730      if (! BITS_BIG_ENDIAN
6731	  && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6732	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6733      break;
6734
6735    case SIGN_EXTEND:
6736    case ZERO_EXTEND:
6737    case ZERO_EXTRACT:
6738    case SIGN_EXTRACT:
6739      x = expand_compound_operation (x);
6740      if (GET_CODE (x) != code)
6741	return force_to_mode (x, mode, mask, reg, next_select);
6742      break;
6743
6744    case REG:
6745      if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6746		       || rtx_equal_p (reg, get_last_value (x))))
6747	x = reg;
6748      break;
6749
6750    case SUBREG:
6751      if (subreg_lowpart_p (x)
6752	  /* We can ignore the effect of this SUBREG if it narrows the mode or
6753	     if the constant masks to zero all the bits the mode doesn't
6754	     have.  */
6755	  && ((GET_MODE_SIZE (GET_MODE (x))
6756	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6757	      || (0 == (mask
6758			& GET_MODE_MASK (GET_MODE (x))
6759			& ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
6760	return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
6761      break;
6762
6763    case AND:
6764      /* If this is an AND with a constant, convert it into an AND
6765	 whose constant is the AND of that constant with MASK.  If it
6766	 remains an AND of MASK, delete it since it is redundant.  */
6767
6768      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6769	{
6770	  x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6771				      mask & INTVAL (XEXP (x, 1)));
6772
6773	  /* If X is still an AND, see if it is an AND with a mask that
6774	     is just some low-order bits.  If so, and it is MASK, we don't
6775	     need it.  */
6776
6777	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6778	      && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
6779		  == (HOST_WIDE_INT) mask))
6780	    x = XEXP (x, 0);
6781
6782	  /* If it remains an AND, try making another AND with the bits
6783	     in the mode mask that aren't in MASK turned on.  If the
6784	     constant in the AND is wide enough, this might make a
6785	     cheaper constant.  */
6786
6787	  if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6788	      && GET_MODE_MASK (GET_MODE (x)) != mask
6789	      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
6790	    {
6791	      HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6792				    | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
6793	      int width = GET_MODE_BITSIZE (GET_MODE (x));
6794	      rtx y;
6795
6796	      /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6797		 number, sign extend it.  */
6798	      if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6799		  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6800		cval |= (HOST_WIDE_INT) -1 << width;
6801
6802	      y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6803	      if (rtx_cost (y, SET) < rtx_cost (x, SET))
6804		x = y;
6805	    }
6806
6807	  break;
6808	}
6809
6810      goto binop;
6811
6812    case PLUS:
6813      /* In (and (plus FOO C1) M), if M is a mask that just turns off
6814	 low-order bits (as in an alignment operation) and FOO is already
6815	 aligned to that boundary, mask C1 to that boundary as well.
6816	 This may eliminate that PLUS and, later, the AND.  */
6817
6818      {
6819	unsigned int width = GET_MODE_BITSIZE (mode);
6820	unsigned HOST_WIDE_INT smask = mask;
6821
6822	/* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6823	   number, sign extend it.  */
6824
6825	if (width < HOST_BITS_PER_WIDE_INT
6826	    && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6827	  smask |= (HOST_WIDE_INT) -1 << width;
6828
6829	if (GET_CODE (XEXP (x, 1)) == CONST_INT
6830	    && exact_log2 (- smask) >= 0
6831	    && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
6832	    && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
6833	  return force_to_mode (plus_constant (XEXP (x, 0),
6834					       (INTVAL (XEXP (x, 1)) & smask)),
6835				mode, smask, reg, next_select);
6836      }
6837
6838      /* ... fall through ...  */
6839
6840    case MULT:
6841      /* For PLUS, MINUS and MULT, we need any bits less significant than the
6842	 most significant bit in MASK since carries from those bits will
6843	 affect the bits we are interested in.  */
6844      mask = fuller_mask;
6845      goto binop;
6846
6847    case MINUS:
6848      /* If X is (minus C Y) where C's least set bit is larger than any bit
6849	 in the mask, then we may replace with (neg Y).  */
6850      if (GET_CODE (XEXP (x, 0)) == CONST_INT
6851	  && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
6852					& -INTVAL (XEXP (x, 0))))
6853	      > mask))
6854	{
6855	  x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
6856				  GET_MODE (x));
6857	  return force_to_mode (x, mode, mask, reg, next_select);
6858	}
6859
6860      /* Similarly, if C contains every bit in the fuller_mask, then we may
6861	 replace with (not Y).  */
6862      if (GET_CODE (XEXP (x, 0)) == CONST_INT
6863	  && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask)
6864	      == INTVAL (XEXP (x, 0))))
6865	{
6866	  x = simplify_gen_unary (NOT, GET_MODE (x),
6867				  XEXP (x, 1), GET_MODE (x));
6868	  return force_to_mode (x, mode, mask, reg, next_select);
6869	}
6870
6871      mask = fuller_mask;
6872      goto binop;
6873
6874    case IOR:
6875    case XOR:
6876      /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6877	 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6878	 operation which may be a bitfield extraction.  Ensure that the
6879	 constant we form is not wider than the mode of X.  */
6880
6881      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6882	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6883	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6884	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6885	  && GET_CODE (XEXP (x, 1)) == CONST_INT
6886	  && ((INTVAL (XEXP (XEXP (x, 0), 1))
6887	       + floor_log2 (INTVAL (XEXP (x, 1))))
6888	      < GET_MODE_BITSIZE (GET_MODE (x)))
6889	  && (INTVAL (XEXP (x, 1))
6890	      & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6891	{
6892	  temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6893			  << INTVAL (XEXP (XEXP (x, 0), 1)));
6894	  temp = gen_binary (GET_CODE (x), GET_MODE (x),
6895			     XEXP (XEXP (x, 0), 0), temp);
6896	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6897			  XEXP (XEXP (x, 0), 1));
6898	  return force_to_mode (x, mode, mask, reg, next_select);
6899	}
6900
6901    binop:
6902      /* For most binary operations, just propagate into the operation and
6903	 change the mode if we have an operation of that mode.  */
6904
6905      op0 = gen_lowpart_for_combine (op_mode,
6906				     force_to_mode (XEXP (x, 0), mode, mask,
6907						    reg, next_select));
6908      op1 = gen_lowpart_for_combine (op_mode,
6909				     force_to_mode (XEXP (x, 1), mode, mask,
6910						    reg, next_select));
6911
6912      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6913	x = gen_binary (code, op_mode, op0, op1);
6914      break;
6915
6916    case ASHIFT:
6917      /* For left shifts, do the same, but just for the first operand.
6918	 However, we cannot do anything with shifts where we cannot
6919	 guarantee that the counts are smaller than the size of the mode
6920	 because such a count will have a different meaning in a
6921	 wider mode.  */
6922
6923      if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6924	     && INTVAL (XEXP (x, 1)) >= 0
6925	     && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6926	  && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6927		&& (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
6928		    < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
6929	break;
6930
6931      /* If the shift count is a constant and we can do arithmetic in
6932	 the mode of the shift, refine which bits we need.  Otherwise, use the
6933	 conservative form of the mask.  */
6934      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6935	  && INTVAL (XEXP (x, 1)) >= 0
6936	  && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6937	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6938	mask >>= INTVAL (XEXP (x, 1));
6939      else
6940	mask = fuller_mask;
6941
6942      op0 = gen_lowpart_for_combine (op_mode,
6943				     force_to_mode (XEXP (x, 0), op_mode,
6944						    mask, reg, next_select));
6945
6946      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6947	x = gen_binary (code, op_mode, op0, XEXP (x, 1));
6948      break;
6949
6950    case LSHIFTRT:
6951      /* Here we can only do something if the shift count is a constant,
6952	 this shift constant is valid for the host, and we can do arithmetic
6953	 in OP_MODE.  */
6954
6955      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6956	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6957	  && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6958	{
6959	  rtx inner = XEXP (x, 0);
6960	  unsigned HOST_WIDE_INT inner_mask;
6961
6962	  /* Select the mask of the bits we need for the shift operand.  */
6963	  inner_mask = mask << INTVAL (XEXP (x, 1));
6964
6965	  /* We can only change the mode of the shift if we can do arithmetic
6966	     in the mode of the shift and INNER_MASK is no wider than the
6967	     width of OP_MODE.  */
6968	  if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
6969	      || (inner_mask & ~GET_MODE_MASK (op_mode)) != 0)
6970	    op_mode = GET_MODE (x);
6971
6972	  inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
6973
6974	  if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6975	    x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
6976	}
6977
6978      /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6979	 shift and AND produces only copies of the sign bit (C2 is one less
6980	 than a power of two), we can do this with just a shift.  */
6981
6982      if (GET_CODE (x) == LSHIFTRT
6983	  && GET_CODE (XEXP (x, 1)) == CONST_INT
6984	  /* The shift puts one of the sign bit copies in the least significant
6985	     bit.  */
6986	  && ((INTVAL (XEXP (x, 1))
6987	       + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6988	      >= GET_MODE_BITSIZE (GET_MODE (x)))
6989	  && exact_log2 (mask + 1) >= 0
6990	  /* Number of bits left after the shift must be more than the mask
6991	     needs.  */
6992	  && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
6993	      <= GET_MODE_BITSIZE (GET_MODE (x)))
6994	  /* Must be more sign bit copies than the mask needs.  */
6995	  && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6996	      >= exact_log2 (mask + 1)))
6997	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6998			GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6999				 - exact_log2 (mask + 1)));
7000
7001      goto shiftrt;
7002
7003    case ASHIFTRT:
7004      /* If we are just looking for the sign bit, we don't need this shift at
7005	 all, even if it has a variable count.  */
7006      if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7007	  && (mask == ((unsigned HOST_WIDE_INT) 1
7008		       << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7009	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7010
7011      /* If this is a shift by a constant, get a mask that contains those bits
7012	 that are not copies of the sign bit.  We then have two cases:  If
7013	 MASK only includes those bits, this can be a logical shift, which may
7014	 allow simplifications.  If MASK is a single-bit field not within
7015	 those bits, we are requesting a copy of the sign bit and hence can
7016	 shift the sign bit to the appropriate location.  */
7017
7018      if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
7019	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7020	{
7021	  int i = -1;
7022
7023	  /* If the considered data is wider than HOST_WIDE_INT, we can't
7024	     represent a mask for all its bits in a single scalar.
7025	     But we only care about the lower bits, so calculate these.  */
7026
7027	  if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7028	    {
7029	      nonzero = ~(HOST_WIDE_INT) 0;
7030
7031	      /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7032		 is the number of bits a full-width mask would have set.
7033		 We need only shift if these are fewer than nonzero can
7034		 hold.  If not, we must keep all bits set in nonzero.  */
7035
7036	      if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7037		  < HOST_BITS_PER_WIDE_INT)
7038		nonzero >>= INTVAL (XEXP (x, 1))
7039			    + HOST_BITS_PER_WIDE_INT
7040			    - GET_MODE_BITSIZE (GET_MODE (x)) ;
7041	    }
7042	  else
7043	    {
7044	      nonzero = GET_MODE_MASK (GET_MODE (x));
7045	      nonzero >>= INTVAL (XEXP (x, 1));
7046	    }
7047
7048	  if ((mask & ~nonzero) == 0
7049	      || (i = exact_log2 (mask)) >= 0)
7050	    {
7051	      x = simplify_shift_const
7052		(x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7053		 i < 0 ? INTVAL (XEXP (x, 1))
7054		 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7055
7056	      if (GET_CODE (x) != ASHIFTRT)
7057		return force_to_mode (x, mode, mask, reg, next_select);
7058	    }
7059	}
7060
7061      /* If MASK is 1, convert this to a LSHIFTRT.  This can be done
7062	 even if the shift count isn't a constant.  */
7063      if (mask == 1)
7064	x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
7065
7066    shiftrt:
7067
7068      /* If this is a zero- or sign-extension operation that just affects bits
7069	 we don't care about, remove it.  Be sure the call above returned
7070	 something that is still a shift.  */
7071
7072      if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7073	  && GET_CODE (XEXP (x, 1)) == CONST_INT
7074	  && INTVAL (XEXP (x, 1)) >= 0
7075	  && (INTVAL (XEXP (x, 1))
7076	      <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
7077	  && GET_CODE (XEXP (x, 0)) == ASHIFT
7078	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7079	  && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
7080	return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7081			      reg, next_select);
7082
7083      break;
7084
7085    case ROTATE:
7086    case ROTATERT:
7087      /* If the shift count is constant and we can do computations
7088	 in the mode of X, compute where the bits we care about are.
7089	 Otherwise, we can't do anything.  Don't change the mode of
7090	 the shift or propagate MODE into the shift, though.  */
7091      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7092	  && INTVAL (XEXP (x, 1)) >= 0)
7093	{
7094	  temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7095					    GET_MODE (x), GEN_INT (mask),
7096					    XEXP (x, 1));
7097	  if (temp && GET_CODE(temp) == CONST_INT)
7098	    SUBST (XEXP (x, 0),
7099		   force_to_mode (XEXP (x, 0), GET_MODE (x),
7100				  INTVAL (temp), reg, next_select));
7101	}
7102      break;
7103
7104    case NEG:
7105      /* If we just want the low-order bit, the NEG isn't needed since it
7106	 won't change the low-order bit.  */
7107      if (mask == 1)
7108	return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
7109
7110      /* We need any bits less significant than the most significant bit in
7111	 MASK since carries from those bits will affect the bits we are
7112	 interested in.  */
7113      mask = fuller_mask;
7114      goto unop;
7115
7116    case NOT:
7117      /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7118	 same as the XOR case above.  Ensure that the constant we form is not
7119	 wider than the mode of X.  */
7120
7121      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7122	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7123	  && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7124	  && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7125	      < GET_MODE_BITSIZE (GET_MODE (x)))
7126	  && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7127	{
7128	  temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
7129	  temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
7130	  x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
7131
7132	  return force_to_mode (x, mode, mask, reg, next_select);
7133	}
7134
7135      /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7136	 use the full mask inside the NOT.  */
7137      mask = fuller_mask;
7138
7139    unop:
7140      op0 = gen_lowpart_for_combine (op_mode,
7141				     force_to_mode (XEXP (x, 0), mode, mask,
7142						    reg, next_select));
7143      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7144	x = simplify_gen_unary (code, op_mode, op0, op_mode);
7145      break;
7146
7147    case NE:
7148      /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
7149	 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
7150	 which is equal to STORE_FLAG_VALUE.  */
7151      if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
7152	  && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
7153	  && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
7154	return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7155
7156      break;
7157
7158    case IF_THEN_ELSE:
7159      /* We have no way of knowing if the IF_THEN_ELSE can itself be
7160	 written in a narrower mode.  We play it safe and do not do so.  */
7161
7162      SUBST (XEXP (x, 1),
7163	     gen_lowpart_for_combine (GET_MODE (x),
7164				      force_to_mode (XEXP (x, 1), mode,
7165						     mask, reg, next_select)));
7166      SUBST (XEXP (x, 2),
7167	     gen_lowpart_for_combine (GET_MODE (x),
7168				      force_to_mode (XEXP (x, 2), mode,
7169						     mask, reg,next_select)));
7170      break;
7171
7172    default:
7173      break;
7174    }
7175
7176  /* Ensure we return a value of the proper mode.  */
7177  return gen_lowpart_for_combine (mode, x);
7178}
7179
7180/* Return nonzero if X is an expression that has one of two values depending on
7181   whether some other value is zero or nonzero.  In that case, we return the
7182   value that is being tested, *PTRUE is set to the value if the rtx being
7183   returned has a nonzero value, and *PFALSE is set to the other alternative.
7184
7185   If we return zero, we set *PTRUE and *PFALSE to X.  */
7186
7187static rtx
7188if_then_else_cond (x, ptrue, pfalse)
7189     rtx x;
7190     rtx *ptrue, *pfalse;
7191{
7192  enum machine_mode mode = GET_MODE (x);
7193  enum rtx_code code = GET_CODE (x);
7194  rtx cond0, cond1, true0, true1, false0, false1;
7195  unsigned HOST_WIDE_INT nz;
7196
7197  /* If we are comparing a value against zero, we are done.  */
7198  if ((code == NE || code == EQ)
7199      && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
7200    {
7201      *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7202      *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
7203      return XEXP (x, 0);
7204    }
7205
7206  /* If this is a unary operation whose operand has one of two values, apply
7207     our opcode to compute those values.  */
7208  else if (GET_RTX_CLASS (code) == '1'
7209	   && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
7210    {
7211      *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
7212      *pfalse = simplify_gen_unary (code, mode, false0,
7213				    GET_MODE (XEXP (x, 0)));
7214      return cond0;
7215    }
7216
7217  /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
7218     make can't possibly match and would suppress other optimizations.  */
7219  else if (code == COMPARE)
7220    ;
7221
7222  /* If this is a binary operation, see if either side has only one of two
7223     values.  If either one does or if both do and they are conditional on
7224     the same value, compute the new true and false values.  */
7225  else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
7226	   || GET_RTX_CLASS (code) == '<')
7227    {
7228      cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7229      cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7230
7231      if ((cond0 != 0 || cond1 != 0)
7232	  && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7233	{
7234	  /* If if_then_else_cond returned zero, then true/false are the
7235	     same rtl.  We must copy one of them to prevent invalid rtl
7236	     sharing.  */
7237	  if (cond0 == 0)
7238	    true0 = copy_rtx (true0);
7239	  else if (cond1 == 0)
7240	    true1 = copy_rtx (true1);
7241
7242	  *ptrue = gen_binary (code, mode, true0, true1);
7243	  *pfalse = gen_binary (code, mode, false0, false1);
7244	  return cond0 ? cond0 : cond1;
7245	}
7246
7247      /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
7248	 operands is zero when the other is non-zero, and vice-versa,
7249	 and STORE_FLAG_VALUE is 1 or -1.  */
7250
7251      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7252	  && (code == PLUS || code == IOR || code == XOR || code == MINUS
7253	      || code == UMAX)
7254	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7255	{
7256	  rtx op0 = XEXP (XEXP (x, 0), 1);
7257	  rtx op1 = XEXP (XEXP (x, 1), 1);
7258
7259	  cond0 = XEXP (XEXP (x, 0), 0);
7260	  cond1 = XEXP (XEXP (x, 1), 0);
7261
7262	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7263	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7264	      && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7265		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7266		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7267		  || ((swap_condition (GET_CODE (cond0))
7268		       == combine_reversed_comparison_code (cond1))
7269		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7270		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7271	      && ! side_effects_p (x))
7272	    {
7273	      *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
7274	      *pfalse = gen_binary (MULT, mode,
7275				    (code == MINUS
7276				     ? simplify_gen_unary (NEG, mode, op1,
7277							   mode)
7278				     : op1),
7279				    const_true_rtx);
7280	      return cond0;
7281	    }
7282	}
7283
7284      /* Similarly for MULT, AND and UMIN, except that for these the result
7285	 is always zero.  */
7286      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7287	  && (code == MULT || code == AND || code == UMIN)
7288	  && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7289	{
7290	  cond0 = XEXP (XEXP (x, 0), 0);
7291	  cond1 = XEXP (XEXP (x, 1), 0);
7292
7293	  if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7294	      && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7295	      && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1)
7296		   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7297		   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7298		  || ((swap_condition (GET_CODE (cond0))
7299		       == combine_reversed_comparison_code (cond1))
7300		      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7301		      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7302	      && ! side_effects_p (x))
7303	    {
7304	      *ptrue = *pfalse = const0_rtx;
7305	      return cond0;
7306	    }
7307	}
7308    }
7309
7310  else if (code == IF_THEN_ELSE)
7311    {
7312      /* If we have IF_THEN_ELSE already, extract the condition and
7313	 canonicalize it if it is NE or EQ.  */
7314      cond0 = XEXP (x, 0);
7315      *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7316      if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7317	return XEXP (cond0, 0);
7318      else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7319	{
7320	  *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7321	  return XEXP (cond0, 0);
7322	}
7323      else
7324	return cond0;
7325    }
7326
7327  /* If X is a SUBREG, we can narrow both the true and false values
7328     if the inner expression, if there is a condition.  */
7329  else if (code == SUBREG
7330	   && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7331					       &true0, &false0)))
7332    {
7333      *ptrue = simplify_gen_subreg (mode, true0,
7334				    GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7335      *pfalse = simplify_gen_subreg (mode, false0,
7336				     GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7337
7338      return cond0;
7339    }
7340
7341  /* If X is a constant, this isn't special and will cause confusions
7342     if we treat it as such.  Likewise if it is equivalent to a constant.  */
7343  else if (CONSTANT_P (x)
7344	   || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7345    ;
7346
7347  /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7348     will be least confusing to the rest of the compiler.  */
7349  else if (mode == BImode)
7350    {
7351      *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7352      return x;
7353    }
7354
7355  /* If X is known to be either 0 or -1, those are the true and
7356     false values when testing X.  */
7357  else if (x == constm1_rtx || x == const0_rtx
7358	   || (mode != VOIDmode
7359	       && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
7360    {
7361      *ptrue = constm1_rtx, *pfalse = const0_rtx;
7362      return x;
7363    }
7364
7365  /* Likewise for 0 or a single bit.  */
7366  else if (mode != VOIDmode
7367	   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7368	   && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
7369    {
7370      *ptrue = GEN_INT (trunc_int_for_mode (nz, mode)), *pfalse = const0_rtx;
7371      return x;
7372    }
7373
7374  /* Otherwise fail; show no condition with true and false values the same.  */
7375  *ptrue = *pfalse = x;
7376  return 0;
7377}
7378
7379/* Return the value of expression X given the fact that condition COND
7380   is known to be true when applied to REG as its first operand and VAL
7381   as its second.  X is known to not be shared and so can be modified in
7382   place.
7383
7384   We only handle the simplest cases, and specifically those cases that
7385   arise with IF_THEN_ELSE expressions.  */
7386
7387static rtx
7388known_cond (x, cond, reg, val)
7389     rtx x;
7390     enum rtx_code cond;
7391     rtx reg, val;
7392{
7393  enum rtx_code code = GET_CODE (x);
7394  rtx temp;
7395  const char *fmt;
7396  int i, j;
7397
7398  if (side_effects_p (x))
7399    return x;
7400
7401  /* If either operand of the condition is a floating point value,
7402     then we have to avoid collapsing an EQ comparison.  */
7403  if (cond == EQ
7404      && rtx_equal_p (x, reg)
7405      && ! FLOAT_MODE_P (GET_MODE (x))
7406      && ! FLOAT_MODE_P (GET_MODE (val)))
7407    return val;
7408
7409  if (cond == UNEQ && rtx_equal_p (x, reg))
7410    return val;
7411
7412  /* If X is (abs REG) and we know something about REG's relationship
7413     with zero, we may be able to simplify this.  */
7414
7415  if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7416    switch (cond)
7417      {
7418      case GE:  case GT:  case EQ:
7419	return XEXP (x, 0);
7420      case LT:  case LE:
7421	return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
7422				   XEXP (x, 0),
7423				   GET_MODE (XEXP (x, 0)));
7424      default:
7425	break;
7426      }
7427
7428  /* The only other cases we handle are MIN, MAX, and comparisons if the
7429     operands are the same as REG and VAL.  */
7430
7431  else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
7432    {
7433      if (rtx_equal_p (XEXP (x, 0), val))
7434	cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7435
7436      if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7437	{
7438	  if (GET_RTX_CLASS (code) == '<')
7439	    {
7440	      if (comparison_dominates_p (cond, code))
7441		return const_true_rtx;
7442
7443	      code = combine_reversed_comparison_code (x);
7444	      if (code != UNKNOWN
7445		  && comparison_dominates_p (cond, code))
7446		return const0_rtx;
7447	      else
7448		return x;
7449	    }
7450	  else if (code == SMAX || code == SMIN
7451		   || code == UMIN || code == UMAX)
7452	    {
7453	      int unsignedp = (code == UMIN || code == UMAX);
7454
7455	      /* Do not reverse the condition when it is NE or EQ.
7456		 This is because we cannot conclude anything about
7457		 the value of 'SMAX (x, y)' when x is not equal to y,
7458		 but we can when x equals y.  */
7459	      if ((code == SMAX || code == UMAX)
7460		  && ! (cond == EQ || cond == NE))
7461		cond = reverse_condition (cond);
7462
7463	      switch (cond)
7464		{
7465		case GE:   case GT:
7466		  return unsignedp ? x : XEXP (x, 1);
7467		case LE:   case LT:
7468		  return unsignedp ? x : XEXP (x, 0);
7469		case GEU:  case GTU:
7470		  return unsignedp ? XEXP (x, 1) : x;
7471		case LEU:  case LTU:
7472		  return unsignedp ? XEXP (x, 0) : x;
7473		default:
7474		  break;
7475		}
7476	    }
7477	}
7478    }
7479  else if (code == SUBREG)
7480    {
7481      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
7482      rtx new, r = known_cond (SUBREG_REG (x), cond, reg, val);
7483
7484      if (SUBREG_REG (x) != r)
7485	{
7486	  /* We must simplify subreg here, before we lose track of the
7487	     original inner_mode.  */
7488	  new = simplify_subreg (GET_MODE (x), r,
7489				 inner_mode, SUBREG_BYTE (x));
7490	  if (new)
7491	    return new;
7492	  else
7493	    SUBST (SUBREG_REG (x), r);
7494	}
7495
7496      return x;
7497    }
7498  /* We don't have to handle SIGN_EXTEND here, because even in the
7499     case of replacing something with a modeless CONST_INT, a
7500     CONST_INT is already (supposed to be) a valid sign extension for
7501     its narrower mode, which implies it's already properly
7502     sign-extended for the wider mode.  Now, for ZERO_EXTEND, the
7503     story is different.  */
7504  else if (code == ZERO_EXTEND)
7505    {
7506      enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
7507      rtx new, r = known_cond (XEXP (x, 0), cond, reg, val);
7508
7509      if (XEXP (x, 0) != r)
7510	{
7511	  /* We must simplify the zero_extend here, before we lose
7512             track of the original inner_mode.  */
7513	  new = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
7514					  r, inner_mode);
7515	  if (new)
7516	    return new;
7517	  else
7518	    SUBST (XEXP (x, 0), r);
7519	}
7520
7521      return x;
7522    }
7523
7524  fmt = GET_RTX_FORMAT (code);
7525  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7526    {
7527      if (fmt[i] == 'e')
7528	SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7529      else if (fmt[i] == 'E')
7530	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7531	  SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7532						cond, reg, val));
7533    }
7534
7535  return x;
7536}
7537
7538/* See if X and Y are equal for the purposes of seeing if we can rewrite an
7539   assignment as a field assignment.  */
7540
7541static int
7542rtx_equal_for_field_assignment_p (x, y)
7543     rtx x;
7544     rtx y;
7545{
7546  if (x == y || rtx_equal_p (x, y))
7547    return 1;
7548
7549  if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7550    return 0;
7551
7552  /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7553     Note that all SUBREGs of MEM are paradoxical; otherwise they
7554     would have been rewritten.  */
7555  if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7556      && GET_CODE (SUBREG_REG (y)) == MEM
7557      && rtx_equal_p (SUBREG_REG (y),
7558		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7559    return 1;
7560
7561  if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7562      && GET_CODE (SUBREG_REG (x)) == MEM
7563      && rtx_equal_p (SUBREG_REG (x),
7564		      gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7565    return 1;
7566
7567  /* We used to see if get_last_value of X and Y were the same but that's
7568     not correct.  In one direction, we'll cause the assignment to have
7569     the wrong destination and in the case, we'll import a register into this
7570     insn that might have already have been dead.   So fail if none of the
7571     above cases are true.  */
7572  return 0;
7573}
7574
7575/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7576   Return that assignment if so.
7577
7578   We only handle the most common cases.  */
7579
7580static rtx
7581make_field_assignment (x)
7582     rtx x;
7583{
7584  rtx dest = SET_DEST (x);
7585  rtx src = SET_SRC (x);
7586  rtx assign;
7587  rtx rhs, lhs;
7588  HOST_WIDE_INT c1;
7589  HOST_WIDE_INT pos;
7590  unsigned HOST_WIDE_INT len;
7591  rtx other;
7592  enum machine_mode mode;
7593
7594  /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7595     a clear of a one-bit field.  We will have changed it to
7596     (and (rotate (const_int -2) POS) DEST), so check for that.  Also check
7597     for a SUBREG.  */
7598
7599  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7600      && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7601      && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7602      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7603    {
7604      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7605				1, 1, 1, 0);
7606      if (assign != 0)
7607	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7608      return x;
7609    }
7610
7611  else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7612	   && subreg_lowpart_p (XEXP (src, 0))
7613	   && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7614	       < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7615	   && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7616	   && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7617	   && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7618    {
7619      assign = make_extraction (VOIDmode, dest, 0,
7620				XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7621				1, 1, 1, 0);
7622      if (assign != 0)
7623	return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7624      return x;
7625    }
7626
7627  /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
7628     one-bit field.  */
7629  else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7630	   && XEXP (XEXP (src, 0), 0) == const1_rtx
7631	   && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7632    {
7633      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7634				1, 1, 1, 0);
7635      if (assign != 0)
7636	return gen_rtx_SET (VOIDmode, assign, const1_rtx);
7637      return x;
7638    }
7639
7640  /* The other case we handle is assignments into a constant-position
7641     field.  They look like (ior/xor (and DEST C1) OTHER).  If C1 represents
7642     a mask that has all one bits except for a group of zero bits and
7643     OTHER is known to have zeros where C1 has ones, this is such an
7644     assignment.  Compute the position and length from C1.  Shift OTHER
7645     to the appropriate position, force it to the required mode, and
7646     make the extraction.  Check for the AND in both operands.  */
7647
7648  if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
7649    return x;
7650
7651  rhs = expand_compound_operation (XEXP (src, 0));
7652  lhs = expand_compound_operation (XEXP (src, 1));
7653
7654  if (GET_CODE (rhs) == AND
7655      && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7656      && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7657    c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7658  else if (GET_CODE (lhs) == AND
7659	   && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7660	   && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7661    c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
7662  else
7663    return x;
7664
7665  pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
7666  if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
7667      || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7668      || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
7669    return x;
7670
7671  assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
7672  if (assign == 0)
7673    return x;
7674
7675  /* The mode to use for the source is the mode of the assignment, or of
7676     what is inside a possible STRICT_LOW_PART.  */
7677  mode = (GET_CODE (assign) == STRICT_LOW_PART
7678	  ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
7679
7680  /* Shift OTHER right POS places and make it the source, restricting it
7681     to the proper length and mode.  */
7682
7683  src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7684					     GET_MODE (src), other, pos),
7685		       mode,
7686		       GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7687		       ? ~(unsigned HOST_WIDE_INT) 0
7688		       : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
7689		       dest, 0);
7690
7691  return gen_rtx_SET (VOIDmode, assign, src);
7692}
7693
7694/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7695   if so.  */
7696
7697static rtx
7698apply_distributive_law (x)
7699     rtx x;
7700{
7701  enum rtx_code code = GET_CODE (x);
7702  rtx lhs, rhs, other;
7703  rtx tem;
7704  enum rtx_code inner_code;
7705
7706  /* Distributivity is not true for floating point.
7707     It can change the value.  So don't do it.
7708     -- rms and moshier@world.std.com.  */
7709  if (FLOAT_MODE_P (GET_MODE (x)))
7710    return x;
7711
7712  /* The outer operation can only be one of the following:  */
7713  if (code != IOR && code != AND && code != XOR
7714      && code != PLUS && code != MINUS)
7715    return x;
7716
7717  lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7718
7719  /* If either operand is a primitive we can't do anything, so get out
7720     fast.  */
7721  if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
7722      || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
7723    return x;
7724
7725  lhs = expand_compound_operation (lhs);
7726  rhs = expand_compound_operation (rhs);
7727  inner_code = GET_CODE (lhs);
7728  if (inner_code != GET_CODE (rhs))
7729    return x;
7730
7731  /* See if the inner and outer operations distribute.  */
7732  switch (inner_code)
7733    {
7734    case LSHIFTRT:
7735    case ASHIFTRT:
7736    case AND:
7737    case IOR:
7738      /* These all distribute except over PLUS.  */
7739      if (code == PLUS || code == MINUS)
7740	return x;
7741      break;
7742
7743    case MULT:
7744      if (code != PLUS && code != MINUS)
7745	return x;
7746      break;
7747
7748    case ASHIFT:
7749      /* This is also a multiply, so it distributes over everything.  */
7750      break;
7751
7752    case SUBREG:
7753      /* Non-paradoxical SUBREGs distributes over all operations, provided
7754	 the inner modes and byte offsets are the same, this is an extraction
7755	 of a low-order part, we don't convert an fp operation to int or
7756	 vice versa, and we would not be converting a single-word
7757	 operation into a multi-word operation.  The latter test is not
7758	 required, but it prevents generating unneeded multi-word operations.
7759	 Some of the previous tests are redundant given the latter test, but
7760	 are retained because they are required for correctness.
7761
7762	 We produce the result slightly differently in this case.  */
7763
7764      if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7765	  || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
7766	  || ! subreg_lowpart_p (lhs)
7767	  || (GET_MODE_CLASS (GET_MODE (lhs))
7768	      != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
7769	  || (GET_MODE_SIZE (GET_MODE (lhs))
7770	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
7771	  || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
7772	return x;
7773
7774      tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7775			SUBREG_REG (lhs), SUBREG_REG (rhs));
7776      return gen_lowpart_for_combine (GET_MODE (x), tem);
7777
7778    default:
7779      return x;
7780    }
7781
7782  /* Set LHS and RHS to the inner operands (A and B in the example
7783     above) and set OTHER to the common operand (C in the example).
7784     These is only one way to do this unless the inner operation is
7785     commutative.  */
7786  if (GET_RTX_CLASS (inner_code) == 'c'
7787      && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7788    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7789  else if (GET_RTX_CLASS (inner_code) == 'c'
7790	   && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7791    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7792  else if (GET_RTX_CLASS (inner_code) == 'c'
7793	   && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7794    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7795  else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7796    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7797  else
7798    return x;
7799
7800  /* Form the new inner operation, seeing if it simplifies first.  */
7801  tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7802
7803  /* There is one exception to the general way of distributing:
7804     (a ^ b) | (a ^ c) -> (~a) & (b ^ c)  */
7805  if (code == XOR && inner_code == IOR)
7806    {
7807      inner_code = AND;
7808      other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
7809    }
7810
7811  /* We may be able to continuing distributing the result, so call
7812     ourselves recursively on the inner operation before forming the
7813     outer operation, which we return.  */
7814  return gen_binary (inner_code, GET_MODE (x),
7815		     apply_distributive_law (tem), other);
7816}
7817
7818/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7819   in MODE.
7820
7821   Return an equivalent form, if different from X.  Otherwise, return X.  If
7822   X is zero, we are to always construct the equivalent form.  */
7823
7824static rtx
7825simplify_and_const_int (x, mode, varop, constop)
7826     rtx x;
7827     enum machine_mode mode;
7828     rtx varop;
7829     unsigned HOST_WIDE_INT constop;
7830{
7831  unsigned HOST_WIDE_INT nonzero;
7832  int i;
7833
7834  /* Simplify VAROP knowing that we will be only looking at some of the
7835     bits in it.
7836
7837     Note by passing in CONSTOP, we guarantee that the bits not set in
7838     CONSTOP are not significant and will never be examined.  We must
7839     ensure that is the case by explicitly masking out those bits
7840     before returning.  */
7841  varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
7842
7843  /* If VAROP is a CLOBBER, we will fail so return it.  */
7844  if (GET_CODE (varop) == CLOBBER)
7845    return varop;
7846
7847  /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
7848     to VAROP and return the new constant.  */
7849  if (GET_CODE (varop) == CONST_INT)
7850    return GEN_INT (trunc_int_for_mode (INTVAL (varop) & constop, mode));
7851
7852  /* See what bits may be nonzero in VAROP.  Unlike the general case of
7853     a call to nonzero_bits, here we don't care about bits outside
7854     MODE.  */
7855
7856  nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7857
7858  /* Turn off all bits in the constant that are known to already be zero.
7859     Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
7860     which is tested below.  */
7861
7862  constop &= nonzero;
7863
7864  /* If we don't have any bits left, return zero.  */
7865  if (constop == 0)
7866    return const0_rtx;
7867
7868  /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7869     a power of two, we can replace this with a ASHIFT.  */
7870  if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7871      && (i = exact_log2 (constop)) >= 0)
7872    return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7873
7874  /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7875     or XOR, then try to apply the distributive law.  This may eliminate
7876     operations if either branch can be simplified because of the AND.
7877     It may also make some cases more complex, but those cases probably
7878     won't match a pattern either with or without this.  */
7879
7880  if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7881    return
7882      gen_lowpart_for_combine
7883	(mode,
7884	 apply_distributive_law
7885	 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7886		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7887					      XEXP (varop, 0), constop),
7888		      simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7889					      XEXP (varop, 1), constop))));
7890
7891  /* If VAROP is PLUS, and the constant is a mask of low bite, distribute
7892     the AND and see if one of the operands simplifies to zero.  If so, we
7893     may eliminate it.  */
7894
7895  if (GET_CODE (varop) == PLUS
7896      && exact_log2 (constop + 1) >= 0)
7897    {
7898      rtx o0, o1;
7899
7900      o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
7901      o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
7902      if (o0 == const0_rtx)
7903	return o1;
7904      if (o1 == const0_rtx)
7905	return o0;
7906    }
7907
7908  /* Get VAROP in MODE.  Try to get a SUBREG if not.  Don't make a new SUBREG
7909     if we already had one (just check for the simplest cases).  */
7910  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7911      && GET_MODE (XEXP (x, 0)) == mode
7912      && SUBREG_REG (XEXP (x, 0)) == varop)
7913    varop = XEXP (x, 0);
7914  else
7915    varop = gen_lowpart_for_combine (mode, varop);
7916
7917  /* If we can't make the SUBREG, try to return what we were given.  */
7918  if (GET_CODE (varop) == CLOBBER)
7919    return x ? x : varop;
7920
7921  /* If we are only masking insignificant bits, return VAROP.  */
7922  if (constop == nonzero)
7923    x = varop;
7924  else
7925    {
7926      /* Otherwise, return an AND.  */
7927      constop = trunc_int_for_mode (constop, mode);
7928      /* See how much, if any, of X we can use.  */
7929      if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
7930	x = gen_binary (AND, mode, varop, GEN_INT (constop));
7931
7932      else
7933	{
7934	  if (GET_CODE (XEXP (x, 1)) != CONST_INT
7935	      || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
7936	    SUBST (XEXP (x, 1), GEN_INT (constop));
7937
7938	  SUBST (XEXP (x, 0), varop);
7939	}
7940    }
7941
7942  return x;
7943}
7944
7945/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7946   We don't let nonzero_bits recur into num_sign_bit_copies, because that
7947   is less useful.  We can't allow both, because that results in exponential
7948   run time recursion.  There is a nullstone testcase that triggered
7949   this.  This macro avoids accidental uses of num_sign_bit_copies.  */
7950#define num_sign_bit_copies()
7951
7952/* Given an expression, X, compute which bits in X can be non-zero.
7953   We don't care about bits outside of those defined in MODE.
7954
7955   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7956   a shift, AND, or zero_extract, we can do better.  */
7957
7958static unsigned HOST_WIDE_INT
7959nonzero_bits (x, mode)
7960     rtx x;
7961     enum machine_mode mode;
7962{
7963  unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7964  unsigned HOST_WIDE_INT inner_nz;
7965  enum rtx_code code;
7966  unsigned int mode_width = GET_MODE_BITSIZE (mode);
7967  rtx tem;
7968
7969  /* For floating-point values, assume all bits are needed.  */
7970  if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7971    return nonzero;
7972
7973  /* If X is wider than MODE, use its mode instead.  */
7974  if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7975    {
7976      mode = GET_MODE (x);
7977      nonzero = GET_MODE_MASK (mode);
7978      mode_width = GET_MODE_BITSIZE (mode);
7979    }
7980
7981  if (mode_width > HOST_BITS_PER_WIDE_INT)
7982    /* Our only callers in this case look for single bit values.  So
7983       just return the mode mask.  Those tests will then be false.  */
7984    return nonzero;
7985
7986#ifndef WORD_REGISTER_OPERATIONS
7987  /* If MODE is wider than X, but both are a single word for both the host
7988     and target machines, we can compute this from which bits of the
7989     object might be nonzero in its own mode, taking into account the fact
7990     that on many CISC machines, accessing an object in a wider mode
7991     causes the high-order bits to become undefined.  So they are
7992     not known to be zero.  */
7993
7994  if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7995      && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7996      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7997      && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
7998    {
7999      nonzero &= nonzero_bits (x, GET_MODE (x));
8000      nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
8001      return nonzero;
8002    }
8003#endif
8004
8005  code = GET_CODE (x);
8006  switch (code)
8007    {
8008    case REG:
8009#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
8010      /* If pointers extend unsigned and this is a pointer in Pmode, say that
8011	 all the bits above ptr_mode are known to be zero.  */
8012      if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8013	  && REG_POINTER (x))
8014	nonzero &= GET_MODE_MASK (ptr_mode);
8015#endif
8016
8017      /* Include declared information about alignment of pointers.  */
8018      /* ??? We don't properly preserve REG_POINTER changes across
8019	 pointer-to-integer casts, so we can't trust it except for
8020	 things that we know must be pointers.  See execute/960116-1.c.  */
8021      if ((x == stack_pointer_rtx
8022	   || x == frame_pointer_rtx
8023	   || x == arg_pointer_rtx)
8024	  && REGNO_POINTER_ALIGN (REGNO (x)))
8025	{
8026	  unsigned HOST_WIDE_INT alignment
8027	    = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
8028
8029#ifdef PUSH_ROUNDING
8030	  /* If PUSH_ROUNDING is defined, it is possible for the
8031	     stack to be momentarily aligned only to that amount,
8032	     so we pick the least alignment.  */
8033	  if (x == stack_pointer_rtx && PUSH_ARGS)
8034	    alignment = MIN (PUSH_ROUNDING (1), alignment);
8035#endif
8036
8037	  nonzero &= ~(alignment - 1);
8038	}
8039
8040      /* If X is a register whose nonzero bits value is current, use it.
8041	 Otherwise, if X is a register whose value we can find, use that
8042	 value.  Otherwise, use the previously-computed global nonzero bits
8043	 for this register.  */
8044
8045      if (reg_last_set_value[REGNO (x)] != 0
8046	  && (reg_last_set_mode[REGNO (x)] == mode
8047	      || (GET_MODE_CLASS (reg_last_set_mode[REGNO (x)]) == MODE_INT
8048		  && GET_MODE_CLASS (mode) == MODE_INT))
8049	  && (reg_last_set_label[REGNO (x)] == label_tick
8050	      || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8051		  && REG_N_SETS (REGNO (x)) == 1
8052		  && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
8053					REGNO (x))))
8054	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8055	return reg_last_set_nonzero_bits[REGNO (x)] & nonzero;
8056
8057      tem = get_last_value (x);
8058
8059      if (tem)
8060	{
8061#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8062	  /* If X is narrower than MODE and TEM is a non-negative
8063	     constant that would appear negative in the mode of X,
8064	     sign-extend it for use in reg_nonzero_bits because some
8065	     machines (maybe most) will actually do the sign-extension
8066	     and this is the conservative approach.
8067
8068	     ??? For 2.5, try to tighten up the MD files in this regard
8069	     instead of this kludge.  */
8070
8071	  if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
8072	      && GET_CODE (tem) == CONST_INT
8073	      && INTVAL (tem) > 0
8074	      && 0 != (INTVAL (tem)
8075		       & ((HOST_WIDE_INT) 1
8076			  << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
8077	    tem = GEN_INT (INTVAL (tem)
8078			   | ((HOST_WIDE_INT) (-1)
8079			      << GET_MODE_BITSIZE (GET_MODE (x))));
8080#endif
8081	  return nonzero_bits (tem, mode) & nonzero;
8082	}
8083      else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
8084	{
8085	  unsigned HOST_WIDE_INT mask = reg_nonzero_bits[REGNO (x)];
8086
8087	  if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width)
8088	    /* We don't know anything about the upper bits.  */
8089	    mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
8090	  return nonzero & mask;
8091	}
8092      else
8093	return nonzero;
8094
8095    case CONST_INT:
8096#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8097      /* If X is negative in MODE, sign-extend the value.  */
8098      if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
8099	  && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
8100	return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
8101#endif
8102
8103      return INTVAL (x);
8104
8105    case MEM:
8106#ifdef LOAD_EXTEND_OP
8107      /* In many, if not most, RISC machines, reading a byte from memory
8108	 zeros the rest of the register.  Noticing that fact saves a lot
8109	 of extra zero-extends.  */
8110      if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
8111	nonzero &= GET_MODE_MASK (GET_MODE (x));
8112#endif
8113      break;
8114
8115    case EQ:  case NE:
8116    case UNEQ:  case LTGT:
8117    case GT:  case GTU:  case UNGT:
8118    case LT:  case LTU:  case UNLT:
8119    case GE:  case GEU:  case UNGE:
8120    case LE:  case LEU:  case UNLE:
8121    case UNORDERED: case ORDERED:
8122
8123      /* If this produces an integer result, we know which bits are set.
8124	 Code here used to clear bits outside the mode of X, but that is
8125	 now done above.  */
8126
8127      if (GET_MODE_CLASS (mode) == MODE_INT
8128	  && mode_width <= HOST_BITS_PER_WIDE_INT)
8129	nonzero = STORE_FLAG_VALUE;
8130      break;
8131
8132    case NEG:
8133#if 0
8134      /* Disabled to avoid exponential mutual recursion between nonzero_bits
8135	 and num_sign_bit_copies.  */
8136      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8137	  == GET_MODE_BITSIZE (GET_MODE (x)))
8138	nonzero = 1;
8139#endif
8140
8141      if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
8142	nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
8143      break;
8144
8145    case ABS:
8146#if 0
8147      /* Disabled to avoid exponential mutual recursion between nonzero_bits
8148	 and num_sign_bit_copies.  */
8149      if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
8150	  == GET_MODE_BITSIZE (GET_MODE (x)))
8151	nonzero = 1;
8152#endif
8153      break;
8154
8155    case TRUNCATE:
8156      nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
8157      break;
8158
8159    case ZERO_EXTEND:
8160      nonzero &= nonzero_bits (XEXP (x, 0), mode);
8161      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8162	nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8163      break;
8164
8165    case SIGN_EXTEND:
8166      /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
8167	 Otherwise, show all the bits in the outer mode but not the inner
8168	 may be non-zero.  */
8169      inner_nz = nonzero_bits (XEXP (x, 0), mode);
8170      if (GET_MODE (XEXP (x, 0)) != VOIDmode)
8171	{
8172	  inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
8173	  if (inner_nz
8174	      & (((HOST_WIDE_INT) 1
8175		  << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
8176	    inner_nz |= (GET_MODE_MASK (mode)
8177			 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
8178	}
8179
8180      nonzero &= inner_nz;
8181      break;
8182
8183    case AND:
8184      nonzero &= (nonzero_bits (XEXP (x, 0), mode)
8185		  & nonzero_bits (XEXP (x, 1), mode));
8186      break;
8187
8188    case XOR:   case IOR:
8189    case UMIN:  case UMAX:  case SMIN:  case SMAX:
8190      {
8191	unsigned HOST_WIDE_INT nonzero0 = nonzero_bits (XEXP (x, 0), mode);
8192
8193	/* Don't call nonzero_bits for the second time if it cannot change
8194	   anything.  */
8195	if ((nonzero & nonzero0) != nonzero)
8196	  nonzero &= (nonzero0 | nonzero_bits (XEXP (x, 1), mode));
8197      }
8198      break;
8199
8200    case PLUS:  case MINUS:
8201    case MULT:
8202    case DIV:   case UDIV:
8203    case MOD:   case UMOD:
8204      /* We can apply the rules of arithmetic to compute the number of
8205	 high- and low-order zero bits of these operations.  We start by
8206	 computing the width (position of the highest-order non-zero bit)
8207	 and the number of low-order zero bits for each value.  */
8208      {
8209	unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
8210	unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
8211	int width0 = floor_log2 (nz0) + 1;
8212	int width1 = floor_log2 (nz1) + 1;
8213	int low0 = floor_log2 (nz0 & -nz0);
8214	int low1 = floor_log2 (nz1 & -nz1);
8215	HOST_WIDE_INT op0_maybe_minusp
8216	  = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8217	HOST_WIDE_INT op1_maybe_minusp
8218	  = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
8219	unsigned int result_width = mode_width;
8220	int result_low = 0;
8221
8222	switch (code)
8223	  {
8224	  case PLUS:
8225	    result_width = MAX (width0, width1) + 1;
8226	    result_low = MIN (low0, low1);
8227	    break;
8228	  case MINUS:
8229	    result_low = MIN (low0, low1);
8230	    break;
8231	  case MULT:
8232	    result_width = width0 + width1;
8233	    result_low = low0 + low1;
8234	    break;
8235	  case DIV:
8236	    if (width1 == 0)
8237	      break;
8238	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
8239	      result_width = width0;
8240	    break;
8241	  case UDIV:
8242	    if (width1 == 0)
8243	      break;
8244	    result_width = width0;
8245	    break;
8246	  case MOD:
8247	    if (width1 == 0)
8248	      break;
8249	    if (! op0_maybe_minusp && ! op1_maybe_minusp)
8250	      result_width = MIN (width0, width1);
8251	    result_low = MIN (low0, low1);
8252	    break;
8253	  case UMOD:
8254	    if (width1 == 0)
8255	      break;
8256	    result_width = MIN (width0, width1);
8257	    result_low = MIN (low0, low1);
8258	    break;
8259	  default:
8260	    abort ();
8261	  }
8262
8263	if (result_width < mode_width)
8264	  nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
8265
8266	if (result_low > 0)
8267	  nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
8268
8269#ifdef POINTERS_EXTEND_UNSIGNED
8270	/* If pointers extend unsigned and this is an addition or subtraction
8271	   to a pointer in Pmode, all the bits above ptr_mode are known to be
8272	   zero.  */
8273	if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
8274	    && (code == PLUS || code == MINUS)
8275	    && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8276	  nonzero &= GET_MODE_MASK (ptr_mode);
8277#endif
8278      }
8279      break;
8280
8281    case ZERO_EXTRACT:
8282      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8283	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8284	nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
8285      break;
8286
8287    case SUBREG:
8288      /* If this is a SUBREG formed for a promoted variable that has
8289	 been zero-extended, we know that at least the high-order bits
8290	 are zero, though others might be too.  */
8291
8292      if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
8293	nonzero = (GET_MODE_MASK (GET_MODE (x))
8294		   & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
8295
8296      /* If the inner mode is a single word for both the host and target
8297	 machines, we can compute this from which bits of the inner
8298	 object might be nonzero.  */
8299      if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
8300	  && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8301	      <= HOST_BITS_PER_WIDE_INT))
8302	{
8303	  nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8304
8305#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
8306	  /* If this is a typical RISC machine, we only have to worry
8307	     about the way loads are extended.  */
8308	  if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
8309	      ? (((nonzero
8310		   & (((unsigned HOST_WIDE_INT) 1
8311		       << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
8312		  != 0))
8313	      : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
8314#endif
8315	    {
8316	      /* On many CISC machines, accessing an object in a wider mode
8317		 causes the high-order bits to become undefined.  So they are
8318		 not known to be zero.  */
8319	      if (GET_MODE_SIZE (GET_MODE (x))
8320		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8321		nonzero |= (GET_MODE_MASK (GET_MODE (x))
8322			    & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
8323	    }
8324	}
8325      break;
8326
8327    case ASHIFTRT:
8328    case LSHIFTRT:
8329    case ASHIFT:
8330    case ROTATE:
8331      /* The nonzero bits are in two classes: any bits within MODE
8332	 that aren't in GET_MODE (x) are always significant.  The rest of the
8333	 nonzero bits are those that are significant in the operand of
8334	 the shift when shifted the appropriate number of bits.  This
8335	 shows that high-order bits are cleared by the right shift and
8336	 low-order bits by left shifts.  */
8337      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8338	  && INTVAL (XEXP (x, 1)) >= 0
8339	  && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8340	{
8341	  enum machine_mode inner_mode = GET_MODE (x);
8342	  unsigned int width = GET_MODE_BITSIZE (inner_mode);
8343	  int count = INTVAL (XEXP (x, 1));
8344	  unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
8345	  unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
8346	  unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
8347	  unsigned HOST_WIDE_INT outer = 0;
8348
8349	  if (mode_width > width)
8350	    outer = (op_nonzero & nonzero & ~mode_mask);
8351
8352	  if (code == LSHIFTRT)
8353	    inner >>= count;
8354	  else if (code == ASHIFTRT)
8355	    {
8356	      inner >>= count;
8357
8358	      /* If the sign bit may have been nonzero before the shift, we
8359		 need to mark all the places it could have been copied to
8360		 by the shift as possibly nonzero.  */
8361	      if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
8362		inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
8363	    }
8364	  else if (code == ASHIFT)
8365	    inner <<= count;
8366	  else
8367	    inner = ((inner << (count % width)
8368		      | (inner >> (width - (count % width)))) & mode_mask);
8369
8370	  nonzero &= (outer | inner);
8371	}
8372      break;
8373
8374    case FFS:
8375      /* This is at most the number of bits in the mode.  */
8376      nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
8377      break;
8378
8379    case IF_THEN_ELSE:
8380      nonzero &= (nonzero_bits (XEXP (x, 1), mode)
8381		  | nonzero_bits (XEXP (x, 2), mode));
8382      break;
8383
8384    default:
8385      break;
8386    }
8387
8388  return nonzero;
8389}
8390
8391/* See the macro definition above.  */
8392#undef num_sign_bit_copies
8393
8394/* Return the number of bits at the high-order end of X that are known to
8395   be equal to the sign bit.  X will be used in mode MODE; if MODE is
8396   VOIDmode, X will be used in its own mode.  The returned value  will always
8397   be between 1 and the number of bits in MODE.  */
8398
8399static unsigned int
8400num_sign_bit_copies (x, mode)
8401     rtx x;
8402     enum machine_mode mode;
8403{
8404  enum rtx_code code = GET_CODE (x);
8405  unsigned int bitwidth;
8406  int num0, num1, result;
8407  unsigned HOST_WIDE_INT nonzero;
8408  rtx tem;
8409
8410  /* If we weren't given a mode, use the mode of X.  If the mode is still
8411     VOIDmode, we don't know anything.  Likewise if one of the modes is
8412     floating-point.  */
8413
8414  if (mode == VOIDmode)
8415    mode = GET_MODE (x);
8416
8417  if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
8418    return 1;
8419
8420  bitwidth = GET_MODE_BITSIZE (mode);
8421
8422  /* For a smaller object, just ignore the high bits.  */
8423  if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
8424    {
8425      num0 = num_sign_bit_copies (x, GET_MODE (x));
8426      return MAX (1,
8427		  num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
8428    }
8429
8430  if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
8431    {
8432#ifndef WORD_REGISTER_OPERATIONS
8433  /* If this machine does not do all register operations on the entire
8434     register and MODE is wider than the mode of X, we can say nothing
8435     at all about the high-order bits.  */
8436      return 1;
8437#else
8438      /* Likewise on machines that do, if the mode of the object is smaller
8439	 than a word and loads of that size don't sign extend, we can say
8440	 nothing about the high order bits.  */
8441      if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
8442#ifdef LOAD_EXTEND_OP
8443	  && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
8444#endif
8445	  )
8446	return 1;
8447#endif
8448    }
8449
8450  switch (code)
8451    {
8452    case REG:
8453
8454#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
8455      /* If pointers extend signed and this is a pointer in Pmode, say that
8456	 all the bits above ptr_mode are known to be sign bit copies.  */
8457      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
8458	  && REG_POINTER (x))
8459	return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
8460#endif
8461
8462      if (reg_last_set_value[REGNO (x)] != 0
8463	  && reg_last_set_mode[REGNO (x)] == mode
8464	  && (reg_last_set_label[REGNO (x)] == label_tick
8465	      || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8466		  && REG_N_SETS (REGNO (x)) == 1
8467		  && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
8468					REGNO (x))))
8469	  && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8470	return reg_last_set_sign_bit_copies[REGNO (x)];
8471
8472      tem = get_last_value (x);
8473      if (tem != 0)
8474	return num_sign_bit_copies (tem, mode);
8475
8476      if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0
8477	  && GET_MODE_BITSIZE (GET_MODE (x)) == bitwidth)
8478	return reg_sign_bit_copies[REGNO (x)];
8479      break;
8480
8481    case MEM:
8482#ifdef LOAD_EXTEND_OP
8483      /* Some RISC machines sign-extend all loads of smaller than a word.  */
8484      if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
8485	return MAX (1, ((int) bitwidth
8486			- (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
8487#endif
8488      break;
8489
8490    case CONST_INT:
8491      /* If the constant is negative, take its 1's complement and remask.
8492	 Then see how many zero bits we have.  */
8493      nonzero = INTVAL (x) & GET_MODE_MASK (mode);
8494      if (bitwidth <= HOST_BITS_PER_WIDE_INT
8495	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8496	nonzero = (~nonzero) & GET_MODE_MASK (mode);
8497
8498      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8499
8500    case SUBREG:
8501      /* If this is a SUBREG for a promoted object that is sign-extended
8502	 and we are looking at it in a wider mode, we know that at least the
8503	 high-order bits are known to be sign bit copies.  */
8504
8505      if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
8506	{
8507	  num0 = num_sign_bit_copies (SUBREG_REG (x), mode);
8508	  return MAX ((int) bitwidth
8509		      - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
8510		      num0);
8511	}
8512
8513      /* For a smaller object, just ignore the high bits.  */
8514      if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
8515	{
8516	  num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
8517	  return MAX (1, (num0
8518			  - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8519				   - bitwidth)));
8520	}
8521
8522#ifdef WORD_REGISTER_OPERATIONS
8523#ifdef LOAD_EXTEND_OP
8524      /* For paradoxical SUBREGs on machines where all register operations
8525	 affect the entire register, just look inside.  Note that we are
8526	 passing MODE to the recursive call, so the number of sign bit copies
8527	 will remain relative to that mode, not the inner mode.  */
8528
8529      /* This works only if loads sign extend.  Otherwise, if we get a
8530	 reload for the inner part, it may be loaded from the stack, and
8531	 then we lose all sign bit copies that existed before the store
8532	 to the stack.  */
8533
8534      if ((GET_MODE_SIZE (GET_MODE (x))
8535	   > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8536	  && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
8537	return num_sign_bit_copies (SUBREG_REG (x), mode);
8538#endif
8539#endif
8540      break;
8541
8542    case SIGN_EXTRACT:
8543      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
8544	return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
8545      break;
8546
8547    case SIGN_EXTEND:
8548      return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8549	      + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
8550
8551    case TRUNCATE:
8552      /* For a smaller object, just ignore the high bits.  */
8553      num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
8554      return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8555				    - bitwidth)));
8556
8557    case NOT:
8558      return num_sign_bit_copies (XEXP (x, 0), mode);
8559
8560    case ROTATE:       case ROTATERT:
8561      /* If we are rotating left by a number of bits less than the number
8562	 of sign bit copies, we can just subtract that amount from the
8563	 number.  */
8564      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8565	  && INTVAL (XEXP (x, 1)) >= 0
8566	  && INTVAL (XEXP (x, 1)) < (int) bitwidth)
8567	{
8568	  num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8569	  return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
8570				 : (int) bitwidth - INTVAL (XEXP (x, 1))));
8571	}
8572      break;
8573
8574    case NEG:
8575      /* In general, this subtracts one sign bit copy.  But if the value
8576	 is known to be positive, the number of sign bit copies is the
8577	 same as that of the input.  Finally, if the input has just one bit
8578	 that might be nonzero, all the bits are copies of the sign bit.  */
8579      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8580      if (bitwidth > HOST_BITS_PER_WIDE_INT)
8581	return num0 > 1 ? num0 - 1 : 1;
8582
8583      nonzero = nonzero_bits (XEXP (x, 0), mode);
8584      if (nonzero == 1)
8585	return bitwidth;
8586
8587      if (num0 > 1
8588	  && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
8589	num0--;
8590
8591      return num0;
8592
8593    case IOR:   case AND:   case XOR:
8594    case SMIN:  case SMAX:  case UMIN:  case UMAX:
8595      /* Logical operations will preserve the number of sign-bit copies.
8596	 MIN and MAX operations always return one of the operands.  */
8597      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8598      num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8599      return MIN (num0, num1);
8600
8601    case PLUS:  case MINUS:
8602      /* For addition and subtraction, we can have a 1-bit carry.  However,
8603	 if we are subtracting 1 from a positive number, there will not
8604	 be such a carry.  Furthermore, if the positive number is known to
8605	 be 0 or 1, we know the result is either -1 or 0.  */
8606
8607      if (code == PLUS && XEXP (x, 1) == constm1_rtx
8608	  && bitwidth <= HOST_BITS_PER_WIDE_INT)
8609	{
8610	  nonzero = nonzero_bits (XEXP (x, 0), mode);
8611	  if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8612	    return (nonzero == 1 || nonzero == 0 ? bitwidth
8613		    : bitwidth - floor_log2 (nonzero) - 1);
8614	}
8615
8616      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8617      num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8618      result = MAX (1, MIN (num0, num1) - 1);
8619
8620#ifdef POINTERS_EXTEND_UNSIGNED
8621      /* If pointers extend signed and this is an addition or subtraction
8622	 to a pointer in Pmode, all the bits above ptr_mode are known to be
8623	 sign bit copies.  */
8624      if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
8625	  && (code == PLUS || code == MINUS)
8626	  && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
8627	result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
8628			     - GET_MODE_BITSIZE (ptr_mode) + 1),
8629		      result);
8630#endif
8631      return result;
8632
8633    case MULT:
8634      /* The number of bits of the product is the sum of the number of
8635	 bits of both terms.  However, unless one of the terms if known
8636	 to be positive, we must allow for an additional bit since negating
8637	 a negative number can remove one sign bit copy.  */
8638
8639      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8640      num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8641
8642      result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8643      if (result > 0
8644	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8645	      || (((nonzero_bits (XEXP (x, 0), mode)
8646		    & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8647		  && ((nonzero_bits (XEXP (x, 1), mode)
8648		       & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
8649	result--;
8650
8651      return MAX (1, result);
8652
8653    case UDIV:
8654      /* The result must be <= the first operand.  If the first operand
8655         has the high bit set, we know nothing about the number of sign
8656         bit copies.  */
8657      if (bitwidth > HOST_BITS_PER_WIDE_INT)
8658	return 1;
8659      else if ((nonzero_bits (XEXP (x, 0), mode)
8660		& ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8661	return 1;
8662      else
8663	return num_sign_bit_copies (XEXP (x, 0), mode);
8664
8665    case UMOD:
8666      /* The result must be <= the second operand.  */
8667      return num_sign_bit_copies (XEXP (x, 1), mode);
8668
8669    case DIV:
8670      /* Similar to unsigned division, except that we have to worry about
8671	 the case where the divisor is negative, in which case we have
8672	 to add 1.  */
8673      result = num_sign_bit_copies (XEXP (x, 0), mode);
8674      if (result > 1
8675	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8676	      || (nonzero_bits (XEXP (x, 1), mode)
8677		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8678	result--;
8679
8680      return result;
8681
8682    case MOD:
8683      result = num_sign_bit_copies (XEXP (x, 1), mode);
8684      if (result > 1
8685	  && (bitwidth > HOST_BITS_PER_WIDE_INT
8686	      || (nonzero_bits (XEXP (x, 1), mode)
8687		  & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8688	result--;
8689
8690      return result;
8691
8692    case ASHIFTRT:
8693      /* Shifts by a constant add to the number of bits equal to the
8694	 sign bit.  */
8695      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8696      if (GET_CODE (XEXP (x, 1)) == CONST_INT
8697	  && INTVAL (XEXP (x, 1)) > 0)
8698	num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
8699
8700      return num0;
8701
8702    case ASHIFT:
8703      /* Left shifts destroy copies.  */
8704      if (GET_CODE (XEXP (x, 1)) != CONST_INT
8705	  || INTVAL (XEXP (x, 1)) < 0
8706	  || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
8707	return 1;
8708
8709      num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8710      return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8711
8712    case IF_THEN_ELSE:
8713      num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8714      num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8715      return MIN (num0, num1);
8716
8717    case EQ:  case NE:  case GE:  case GT:  case LE:  case LT:
8718    case UNEQ:  case LTGT:  case UNGE:  case UNGT:  case UNLE:  case UNLT:
8719    case GEU: case GTU: case LEU: case LTU:
8720    case UNORDERED: case ORDERED:
8721      /* If the constant is negative, take its 1's complement and remask.
8722	 Then see how many zero bits we have.  */
8723      nonzero = STORE_FLAG_VALUE;
8724      if (bitwidth <= HOST_BITS_PER_WIDE_INT
8725	  && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8726	nonzero = (~nonzero) & GET_MODE_MASK (mode);
8727
8728      return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
8729      break;
8730
8731    default:
8732      break;
8733    }
8734
8735  /* If we haven't been able to figure it out by one of the above rules,
8736     see if some of the high-order bits are known to be zero.  If so,
8737     count those bits and return one less than that amount.  If we can't
8738     safely compute the mask for this mode, always return BITWIDTH.  */
8739
8740  if (bitwidth > HOST_BITS_PER_WIDE_INT)
8741    return 1;
8742
8743  nonzero = nonzero_bits (x, mode);
8744  return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
8745	  ? 1 : bitwidth - floor_log2 (nonzero) - 1);
8746}
8747
8748/* Return the number of "extended" bits there are in X, when interpreted
8749   as a quantity in MODE whose signedness is indicated by UNSIGNEDP.  For
8750   unsigned quantities, this is the number of high-order zero bits.
8751   For signed quantities, this is the number of copies of the sign bit
8752   minus 1.  In both case, this function returns the number of "spare"
8753   bits.  For example, if two quantities for which this function returns
8754   at least 1 are added, the addition is known not to overflow.
8755
8756   This function will always return 0 unless called during combine, which
8757   implies that it must be called from a define_split.  */
8758
8759unsigned int
8760extended_count (x, mode, unsignedp)
8761     rtx x;
8762     enum machine_mode mode;
8763     int unsignedp;
8764{
8765  if (nonzero_sign_valid == 0)
8766    return 0;
8767
8768  return (unsignedp
8769	  ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8770	     ? (GET_MODE_BITSIZE (mode) - 1
8771		- floor_log2 (nonzero_bits (x, mode)))
8772	     : 0)
8773	  : num_sign_bit_copies (x, mode) - 1);
8774}
8775
8776/* This function is called from `simplify_shift_const' to merge two
8777   outer operations.  Specifically, we have already found that we need
8778   to perform operation *POP0 with constant *PCONST0 at the outermost
8779   position.  We would now like to also perform OP1 with constant CONST1
8780   (with *POP0 being done last).
8781
8782   Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8783   the resulting operation.  *PCOMP_P is set to 1 if we would need to
8784   complement the innermost operand, otherwise it is unchanged.
8785
8786   MODE is the mode in which the operation will be done.  No bits outside
8787   the width of this mode matter.  It is assumed that the width of this mode
8788   is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8789
8790   If *POP0 or OP1 are NIL, it means no operation is required.  Only NEG, PLUS,
8791   IOR, XOR, and AND are supported.  We may set *POP0 to SET if the proper
8792   result is simply *PCONST0.
8793
8794   If the resulting operation cannot be expressed as one operation, we
8795   return 0 and do not change *POP0, *PCONST0, and *PCOMP_P.  */
8796
8797static int
8798merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8799     enum rtx_code *pop0;
8800     HOST_WIDE_INT *pconst0;
8801     enum rtx_code op1;
8802     HOST_WIDE_INT const1;
8803     enum machine_mode mode;
8804     int *pcomp_p;
8805{
8806  enum rtx_code op0 = *pop0;
8807  HOST_WIDE_INT const0 = *pconst0;
8808
8809  const0 &= GET_MODE_MASK (mode);
8810  const1 &= GET_MODE_MASK (mode);
8811
8812  /* If OP0 is an AND, clear unimportant bits in CONST1.  */
8813  if (op0 == AND)
8814    const1 &= const0;
8815
8816  /* If OP0 or OP1 is NIL, this is easy.  Similarly if they are the same or
8817     if OP0 is SET.  */
8818
8819  if (op1 == NIL || op0 == SET)
8820    return 1;
8821
8822  else if (op0 == NIL)
8823    op0 = op1, const0 = const1;
8824
8825  else if (op0 == op1)
8826    {
8827      switch (op0)
8828	{
8829	case AND:
8830	  const0 &= const1;
8831	  break;
8832	case IOR:
8833	  const0 |= const1;
8834	  break;
8835	case XOR:
8836	  const0 ^= const1;
8837	  break;
8838	case PLUS:
8839	  const0 += const1;
8840	  break;
8841	case NEG:
8842	  op0 = NIL;
8843	  break;
8844	default:
8845	  break;
8846	}
8847    }
8848
8849  /* Otherwise, if either is a PLUS or NEG, we can't do anything.  */
8850  else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8851    return 0;
8852
8853  /* If the two constants aren't the same, we can't do anything.  The
8854     remaining six cases can all be done.  */
8855  else if (const0 != const1)
8856    return 0;
8857
8858  else
8859    switch (op0)
8860      {
8861      case IOR:
8862	if (op1 == AND)
8863	  /* (a & b) | b == b */
8864	  op0 = SET;
8865	else /* op1 == XOR */
8866	  /* (a ^ b) | b == a | b */
8867	  {;}
8868	break;
8869
8870      case XOR:
8871	if (op1 == AND)
8872	  /* (a & b) ^ b == (~a) & b */
8873	  op0 = AND, *pcomp_p = 1;
8874	else /* op1 == IOR */
8875	  /* (a | b) ^ b == a & ~b */
8876	  op0 = AND, *pconst0 = ~const0;
8877	break;
8878
8879      case AND:
8880	if (op1 == IOR)
8881	  /* (a | b) & b == b */
8882	op0 = SET;
8883	else /* op1 == XOR */
8884	  /* (a ^ b) & b) == (~a) & b */
8885	  *pcomp_p = 1;
8886	break;
8887      default:
8888	break;
8889      }
8890
8891  /* Check for NO-OP cases.  */
8892  const0 &= GET_MODE_MASK (mode);
8893  if (const0 == 0
8894      && (op0 == IOR || op0 == XOR || op0 == PLUS))
8895    op0 = NIL;
8896  else if (const0 == 0 && op0 == AND)
8897    op0 = SET;
8898  else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8899	   && op0 == AND)
8900    op0 = NIL;
8901
8902  /* ??? Slightly redundant with the above mask, but not entirely.
8903     Moving this above means we'd have to sign-extend the mode mask
8904     for the final test.  */
8905  const0 = trunc_int_for_mode (const0, mode);
8906
8907  *pop0 = op0;
8908  *pconst0 = const0;
8909
8910  return 1;
8911}
8912
8913/* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
8914   The result of the shift is RESULT_MODE.  X, if non-zero, is an expression
8915   that we started with.
8916
8917   The shift is normally computed in the widest mode we find in VAROP, as
8918   long as it isn't a different number of words than RESULT_MODE.  Exceptions
8919   are ASHIFTRT and ROTATE, which are always done in their original mode,  */
8920
8921static rtx
8922simplify_shift_const (x, code, result_mode, varop, orig_count)
8923     rtx x;
8924     enum rtx_code code;
8925     enum machine_mode result_mode;
8926     rtx varop;
8927     int orig_count;
8928{
8929  enum rtx_code orig_code = code;
8930  unsigned int count;
8931  int signed_count;
8932  enum machine_mode mode = result_mode;
8933  enum machine_mode shift_mode, tmode;
8934  unsigned int mode_words
8935    = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8936  /* We form (outer_op (code varop count) (outer_const)).  */
8937  enum rtx_code outer_op = NIL;
8938  HOST_WIDE_INT outer_const = 0;
8939  rtx const_rtx;
8940  int complement_p = 0;
8941  rtx new;
8942
8943  /* Make sure and truncate the "natural" shift on the way in.  We don't
8944     want to do this inside the loop as it makes it more difficult to
8945     combine shifts.  */
8946#ifdef SHIFT_COUNT_TRUNCATED
8947  if (SHIFT_COUNT_TRUNCATED)
8948    orig_count &= GET_MODE_BITSIZE (mode) - 1;
8949#endif
8950
8951  /* If we were given an invalid count, don't do anything except exactly
8952     what was requested.  */
8953
8954  if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
8955    {
8956      if (x)
8957	return x;
8958
8959      return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (orig_count));
8960    }
8961
8962  count = orig_count;
8963
8964  /* Unless one of the branches of the `if' in this loop does a `continue',
8965     we will `break' the loop after the `if'.  */
8966
8967  while (count != 0)
8968    {
8969      /* If we have an operand of (clobber (const_int 0)), just return that
8970	 value.  */
8971      if (GET_CODE (varop) == CLOBBER)
8972	return varop;
8973
8974      /* If we discovered we had to complement VAROP, leave.  Making a NOT
8975	 here would cause an infinite loop.  */
8976      if (complement_p)
8977	break;
8978
8979      /* Convert ROTATERT to ROTATE.  */
8980      if (code == ROTATERT)
8981	code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8982
8983      /* We need to determine what mode we will do the shift in.  If the
8984	 shift is a right shift or a ROTATE, we must always do it in the mode
8985	 it was originally done in.  Otherwise, we can do it in MODE, the
8986	 widest mode encountered.  */
8987      shift_mode
8988	= (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8989	   ? result_mode : mode);
8990
8991      /* Handle cases where the count is greater than the size of the mode
8992	 minus 1.  For ASHIFT, use the size minus one as the count (this can
8993	 occur when simplifying (lshiftrt (ashiftrt ..))).  For rotates,
8994	 take the count modulo the size.  For other shifts, the result is
8995	 zero.
8996
8997	 Since these shifts are being produced by the compiler by combining
8998	 multiple operations, each of which are defined, we know what the
8999	 result is supposed to be.  */
9000
9001      if (count > GET_MODE_BITSIZE (shift_mode) - 1)
9002	{
9003	  if (code == ASHIFTRT)
9004	    count = GET_MODE_BITSIZE (shift_mode) - 1;
9005	  else if (code == ROTATE || code == ROTATERT)
9006	    count %= GET_MODE_BITSIZE (shift_mode);
9007	  else
9008	    {
9009	      /* We can't simply return zero because there may be an
9010		 outer op.  */
9011	      varop = const0_rtx;
9012	      count = 0;
9013	      break;
9014	    }
9015	}
9016
9017      /* An arithmetic right shift of a quantity known to be -1 or 0
9018	 is a no-op.  */
9019      if (code == ASHIFTRT
9020	  && (num_sign_bit_copies (varop, shift_mode)
9021	      == GET_MODE_BITSIZE (shift_mode)))
9022	{
9023	  count = 0;
9024	  break;
9025	}
9026
9027      /* If we are doing an arithmetic right shift and discarding all but
9028	 the sign bit copies, this is equivalent to doing a shift by the
9029	 bitsize minus one.  Convert it into that shift because it will often
9030	 allow other simplifications.  */
9031
9032      if (code == ASHIFTRT
9033	  && (count + num_sign_bit_copies (varop, shift_mode)
9034	      >= GET_MODE_BITSIZE (shift_mode)))
9035	count = GET_MODE_BITSIZE (shift_mode) - 1;
9036
9037      /* We simplify the tests below and elsewhere by converting
9038	 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
9039	 `make_compound_operation' will convert it to a ASHIFTRT for
9040	 those machines (such as VAX) that don't have a LSHIFTRT.  */
9041      if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9042	  && code == ASHIFTRT
9043	  && ((nonzero_bits (varop, shift_mode)
9044	       & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
9045	      == 0))
9046	code = LSHIFTRT;
9047
9048      switch (GET_CODE (varop))
9049	{
9050	case SIGN_EXTEND:
9051	case ZERO_EXTEND:
9052	case SIGN_EXTRACT:
9053	case ZERO_EXTRACT:
9054	  new = expand_compound_operation (varop);
9055	  if (new != varop)
9056	    {
9057	      varop = new;
9058	      continue;
9059	    }
9060	  break;
9061
9062	case MEM:
9063	  /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
9064	     minus the width of a smaller mode, we can do this with a
9065	     SIGN_EXTEND or ZERO_EXTEND from the narrower memory location.  */
9066	  if ((code == ASHIFTRT || code == LSHIFTRT)
9067	      && ! mode_dependent_address_p (XEXP (varop, 0))
9068	      && ! MEM_VOLATILE_P (varop)
9069	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9070					 MODE_INT, 1)) != BLKmode)
9071	    {
9072	      new = adjust_address_nv (varop, tmode,
9073				       BYTES_BIG_ENDIAN ? 0
9074				       : count / BITS_PER_UNIT);
9075
9076	      varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9077				     : ZERO_EXTEND, mode, new);
9078	      count = 0;
9079	      continue;
9080	    }
9081	  break;
9082
9083	case USE:
9084	  /* Similar to the case above, except that we can only do this if
9085	     the resulting mode is the same as that of the underlying
9086	     MEM and adjust the address depending on the *bits* endianness
9087	     because of the way that bit-field extract insns are defined.  */
9088	  if ((code == ASHIFTRT || code == LSHIFTRT)
9089	      && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9090					 MODE_INT, 1)) != BLKmode
9091	      && tmode == GET_MODE (XEXP (varop, 0)))
9092	    {
9093	      if (BITS_BIG_ENDIAN)
9094		new = XEXP (varop, 0);
9095	      else
9096		{
9097		  new = copy_rtx (XEXP (varop, 0));
9098		  SUBST (XEXP (new, 0),
9099			 plus_constant (XEXP (new, 0),
9100					count / BITS_PER_UNIT));
9101		}
9102
9103	      varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9104				     : ZERO_EXTEND, mode, new);
9105	      count = 0;
9106	      continue;
9107	    }
9108	  break;
9109
9110	case SUBREG:
9111	  /* If VAROP is a SUBREG, strip it as long as the inner operand has
9112	     the same number of words as what we've seen so far.  Then store
9113	     the widest mode in MODE.  */
9114	  if (subreg_lowpart_p (varop)
9115	      && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9116		  > GET_MODE_SIZE (GET_MODE (varop)))
9117	      && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9118		    + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9119		  == mode_words))
9120	    {
9121	      varop = SUBREG_REG (varop);
9122	      if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9123		mode = GET_MODE (varop);
9124	      continue;
9125	    }
9126	  break;
9127
9128	case MULT:
9129	  /* Some machines use MULT instead of ASHIFT because MULT
9130	     is cheaper.  But it is still better on those machines to
9131	     merge two shifts into one.  */
9132	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9133	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9134	    {
9135	      varop
9136		= gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
9137			      GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9138	      continue;
9139	    }
9140	  break;
9141
9142	case UDIV:
9143	  /* Similar, for when divides are cheaper.  */
9144	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9145	      && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9146	    {
9147	      varop
9148		= gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
9149			      GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
9150	      continue;
9151	    }
9152	  break;
9153
9154	case ASHIFTRT:
9155	  /* If we are extracting just the sign bit of an arithmetic
9156	     right shift, that shift is not needed.  However, the sign
9157	     bit of a wider mode may be different from what would be
9158	     interpreted as the sign bit in a narrower mode, so, if
9159	     the result is narrower, don't discard the shift.  */
9160	  if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9161	      && (GET_MODE_BITSIZE (result_mode)
9162		  >= GET_MODE_BITSIZE (GET_MODE (varop))))
9163	    {
9164	      varop = XEXP (varop, 0);
9165	      continue;
9166	    }
9167
9168	  /* ... fall through ...  */
9169
9170	case LSHIFTRT:
9171	case ASHIFT:
9172	case ROTATE:
9173	  /* Here we have two nested shifts.  The result is usually the
9174	     AND of a new shift with a mask.  We compute the result below.  */
9175	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9176	      && INTVAL (XEXP (varop, 1)) >= 0
9177	      && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
9178	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9179	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9180	    {
9181	      enum rtx_code first_code = GET_CODE (varop);
9182	      unsigned int first_count = INTVAL (XEXP (varop, 1));
9183	      unsigned HOST_WIDE_INT mask;
9184	      rtx mask_rtx;
9185
9186	      /* We have one common special case.  We can't do any merging if
9187		 the inner code is an ASHIFTRT of a smaller mode.  However, if
9188		 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9189		 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9190		 we can convert it to
9191		 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9192		 This simplifies certain SIGN_EXTEND operations.  */
9193	      if (code == ASHIFT && first_code == ASHIFTRT
9194		  && (GET_MODE_BITSIZE (result_mode)
9195		      - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
9196		{
9197		  /* C3 has the low-order C1 bits zero.  */
9198
9199		  mask = (GET_MODE_MASK (mode)
9200			  & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
9201
9202		  varop = simplify_and_const_int (NULL_RTX, result_mode,
9203						  XEXP (varop, 0), mask);
9204		  varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
9205						varop, count);
9206		  count = first_count;
9207		  code = ASHIFTRT;
9208		  continue;
9209		}
9210
9211	      /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9212		 than C1 high-order bits equal to the sign bit, we can convert
9213		 this to either an ASHIFT or a ASHIFTRT depending on the
9214		 two counts.
9215
9216		 We cannot do this if VAROP's mode is not SHIFT_MODE.  */
9217
9218	      if (code == ASHIFTRT && first_code == ASHIFT
9219		  && GET_MODE (varop) == shift_mode
9220		  && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9221		      > first_count))
9222		{
9223		  varop = XEXP (varop, 0);
9224
9225		  signed_count = count - first_count;
9226		  if (signed_count < 0)
9227		    count = -signed_count, code = ASHIFT;
9228		  else
9229		    count = signed_count;
9230
9231		  continue;
9232		}
9233
9234	      /* There are some cases we can't do.  If CODE is ASHIFTRT,
9235		 we can only do this if FIRST_CODE is also ASHIFTRT.
9236
9237		 We can't do the case when CODE is ROTATE and FIRST_CODE is
9238		 ASHIFTRT.
9239
9240		 If the mode of this shift is not the mode of the outer shift,
9241		 we can't do this if either shift is a right shift or ROTATE.
9242
9243		 Finally, we can't do any of these if the mode is too wide
9244		 unless the codes are the same.
9245
9246		 Handle the case where the shift codes are the same
9247		 first.  */
9248
9249	      if (code == first_code)
9250		{
9251		  if (GET_MODE (varop) != result_mode
9252		      && (code == ASHIFTRT || code == LSHIFTRT
9253			  || code == ROTATE))
9254		    break;
9255
9256		  count += first_count;
9257		  varop = XEXP (varop, 0);
9258		  continue;
9259		}
9260
9261	      if (code == ASHIFTRT
9262		  || (code == ROTATE && first_code == ASHIFTRT)
9263		  || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
9264		  || (GET_MODE (varop) != result_mode
9265		      && (first_code == ASHIFTRT || first_code == LSHIFTRT
9266			  || first_code == ROTATE
9267			  || code == ROTATE)))
9268		break;
9269
9270	      /* To compute the mask to apply after the shift, shift the
9271		 nonzero bits of the inner shift the same way the
9272		 outer shift will.  */
9273
9274	      mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
9275
9276	      mask_rtx
9277		= simplify_binary_operation (code, result_mode, mask_rtx,
9278					     GEN_INT (count));
9279
9280	      /* Give up if we can't compute an outer operation to use.  */
9281	      if (mask_rtx == 0
9282		  || GET_CODE (mask_rtx) != CONST_INT
9283		  || ! merge_outer_ops (&outer_op, &outer_const, AND,
9284					INTVAL (mask_rtx),
9285					result_mode, &complement_p))
9286		break;
9287
9288	      /* If the shifts are in the same direction, we add the
9289		 counts.  Otherwise, we subtract them.  */
9290	      signed_count = count;
9291	      if ((code == ASHIFTRT || code == LSHIFTRT)
9292		  == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9293		signed_count += first_count;
9294	      else
9295		signed_count -= first_count;
9296
9297	      /* If COUNT is positive, the new shift is usually CODE,
9298		 except for the two exceptions below, in which case it is
9299		 FIRST_CODE.  If the count is negative, FIRST_CODE should
9300		 always be used  */
9301	      if (signed_count > 0
9302		  && ((first_code == ROTATE && code == ASHIFT)
9303		      || (first_code == ASHIFTRT && code == LSHIFTRT)))
9304		code = first_code, count = signed_count;
9305	      else if (signed_count < 0)
9306		code = first_code, count = -signed_count;
9307	      else
9308		count = signed_count;
9309
9310	      varop = XEXP (varop, 0);
9311	      continue;
9312	    }
9313
9314	  /* If we have (A << B << C) for any shift, we can convert this to
9315	     (A << C << B).  This wins if A is a constant.  Only try this if
9316	     B is not a constant.  */
9317
9318	  else if (GET_CODE (varop) == code
9319		   && GET_CODE (XEXP (varop, 1)) != CONST_INT
9320		   && 0 != (new
9321			    = simplify_binary_operation (code, mode,
9322							 XEXP (varop, 0),
9323							 GEN_INT (count))))
9324	    {
9325	      varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1));
9326	      count = 0;
9327	      continue;
9328	    }
9329	  break;
9330
9331	case NOT:
9332	  /* Make this fit the case below.  */
9333	  varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9334			       GEN_INT (GET_MODE_MASK (mode)));
9335	  continue;
9336
9337	case IOR:
9338	case AND:
9339	case XOR:
9340	  /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9341	     with C the size of VAROP - 1 and the shift is logical if
9342	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9343	     we have an (le X 0) operation.   If we have an arithmetic shift
9344	     and STORE_FLAG_VALUE is 1 or we have a logical shift with
9345	     STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
9346
9347	  if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9348	      && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9349	      && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9350	      && (code == LSHIFTRT || code == ASHIFTRT)
9351	      && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
9352	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9353	    {
9354	      count = 0;
9355	      varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9356				  const0_rtx);
9357
9358	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9359		varop = gen_rtx_NEG (GET_MODE (varop), varop);
9360
9361	      continue;
9362	    }
9363
9364	  /* If we have (shift (logical)), move the logical to the outside
9365	     to allow it to possibly combine with another logical and the
9366	     shift to combine with another shift.  This also canonicalizes to
9367	     what a ZERO_EXTRACT looks like.  Also, some machines have
9368	     (and (shift)) insns.  */
9369
9370	  if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9371	      && (new = simplify_binary_operation (code, result_mode,
9372						   XEXP (varop, 1),
9373						   GEN_INT (count))) != 0
9374	      && GET_CODE (new) == CONST_INT
9375	      && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9376				  INTVAL (new), result_mode, &complement_p))
9377	    {
9378	      varop = XEXP (varop, 0);
9379	      continue;
9380	    }
9381
9382	  /* If we can't do that, try to simplify the shift in each arm of the
9383	     logical expression, make a new logical expression, and apply
9384	     the inverse distributive law.  */
9385	  {
9386	    rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9387					    XEXP (varop, 0), count);
9388	    rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9389					    XEXP (varop, 1), count);
9390
9391	    varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
9392	    varop = apply_distributive_law (varop);
9393
9394	    count = 0;
9395	  }
9396	  break;
9397
9398	case EQ:
9399	  /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9400	     says that the sign bit can be tested, FOO has mode MODE, C is
9401	     GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9402	     that may be nonzero.  */
9403	  if (code == LSHIFTRT
9404	      && XEXP (varop, 1) == const0_rtx
9405	      && GET_MODE (XEXP (varop, 0)) == result_mode
9406	      && count == GET_MODE_BITSIZE (result_mode) - 1
9407	      && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9408	      && ((STORE_FLAG_VALUE
9409		   & ((HOST_WIDE_INT) 1
9410		      < (GET_MODE_BITSIZE (result_mode) - 1))))
9411	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9412	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9413				  (HOST_WIDE_INT) 1, result_mode,
9414				  &complement_p))
9415	    {
9416	      varop = XEXP (varop, 0);
9417	      count = 0;
9418	      continue;
9419	    }
9420	  break;
9421
9422	case NEG:
9423	  /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9424	     than the number of bits in the mode is equivalent to A.  */
9425	  if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9426	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9427	    {
9428	      varop = XEXP (varop, 0);
9429	      count = 0;
9430	      continue;
9431	    }
9432
9433	  /* NEG commutes with ASHIFT since it is multiplication.  Move the
9434	     NEG outside to allow shifts to combine.  */
9435	  if (code == ASHIFT
9436	      && merge_outer_ops (&outer_op, &outer_const, NEG,
9437				  (HOST_WIDE_INT) 0, result_mode,
9438				  &complement_p))
9439	    {
9440	      varop = XEXP (varop, 0);
9441	      continue;
9442	    }
9443	  break;
9444
9445	case PLUS:
9446	  /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9447	     is one less than the number of bits in the mode is
9448	     equivalent to (xor A 1).  */
9449	  if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9450	      && XEXP (varop, 1) == constm1_rtx
9451	      && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9452	      && merge_outer_ops (&outer_op, &outer_const, XOR,
9453				  (HOST_WIDE_INT) 1, result_mode,
9454				  &complement_p))
9455	    {
9456	      count = 0;
9457	      varop = XEXP (varop, 0);
9458	      continue;
9459	    }
9460
9461	  /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9462	     that might be nonzero in BAR are those being shifted out and those
9463	     bits are known zero in FOO, we can replace the PLUS with FOO.
9464	     Similarly in the other operand order.  This code occurs when
9465	     we are computing the size of a variable-size array.  */
9466
9467	  if ((code == ASHIFTRT || code == LSHIFTRT)
9468	      && count < HOST_BITS_PER_WIDE_INT
9469	      && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9470	      && (nonzero_bits (XEXP (varop, 1), result_mode)
9471		  & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9472	    {
9473	      varop = XEXP (varop, 0);
9474	      continue;
9475	    }
9476	  else if ((code == ASHIFTRT || code == LSHIFTRT)
9477		   && count < HOST_BITS_PER_WIDE_INT
9478		   && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9479		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9480			    >> count)
9481		   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9482			    & nonzero_bits (XEXP (varop, 1),
9483						 result_mode)))
9484	    {
9485	      varop = XEXP (varop, 1);
9486	      continue;
9487	    }
9488
9489	  /* (ashift (plus foo C) N) is (plus (ashift foo N) C').  */
9490	  if (code == ASHIFT
9491	      && GET_CODE (XEXP (varop, 1)) == CONST_INT
9492	      && (new = simplify_binary_operation (ASHIFT, result_mode,
9493						   XEXP (varop, 1),
9494						   GEN_INT (count))) != 0
9495	      && GET_CODE (new) == CONST_INT
9496	      && merge_outer_ops (&outer_op, &outer_const, PLUS,
9497				  INTVAL (new), result_mode, &complement_p))
9498	    {
9499	      varop = XEXP (varop, 0);
9500	      continue;
9501	    }
9502	  break;
9503
9504	case MINUS:
9505	  /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9506	     with C the size of VAROP - 1 and the shift is logical if
9507	     STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9508	     we have a (gt X 0) operation.  If the shift is arithmetic with
9509	     STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9510	     we have a (neg (gt X 0)) operation.  */
9511
9512	  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9513	      && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9514	      && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
9515	      && (code == LSHIFTRT || code == ASHIFTRT)
9516	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9517	      && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
9518	      && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9519	    {
9520	      count = 0;
9521	      varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9522				  const0_rtx);
9523
9524	      if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9525		varop = gen_rtx_NEG (GET_MODE (varop), varop);
9526
9527	      continue;
9528	    }
9529	  break;
9530
9531	case TRUNCATE:
9532	  /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9533	     if the truncate does not affect the value.  */
9534	  if (code == LSHIFTRT
9535	      && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9536	      && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9537	      && (INTVAL (XEXP (XEXP (varop, 0), 1))
9538		  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9539		      - GET_MODE_BITSIZE (GET_MODE (varop)))))
9540	    {
9541	      rtx varop_inner = XEXP (varop, 0);
9542
9543	      varop_inner
9544		= gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9545				    XEXP (varop_inner, 0),
9546				    GEN_INT
9547				    (count + INTVAL (XEXP (varop_inner, 1))));
9548	      varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
9549	      count = 0;
9550	      continue;
9551	    }
9552	  break;
9553
9554	default:
9555	  break;
9556	}
9557
9558      break;
9559    }
9560
9561  /* We need to determine what mode to do the shift in.  If the shift is
9562     a right shift or ROTATE, we must always do it in the mode it was
9563     originally done in.  Otherwise, we can do it in MODE, the widest mode
9564     encountered.  The code we care about is that of the shift that will
9565     actually be done, not the shift that was originally requested.  */
9566  shift_mode
9567    = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9568       ? result_mode : mode);
9569
9570  /* We have now finished analyzing the shift.  The result should be
9571     a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places.  If
9572     OUTER_OP is non-NIL, it is an operation that needs to be applied
9573     to the result of the shift.  OUTER_CONST is the relevant constant,
9574     but we must turn off all bits turned off in the shift.
9575
9576     If we were passed a value for X, see if we can use any pieces of
9577     it.  If not, make new rtx.  */
9578
9579  if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
9580      && GET_CODE (XEXP (x, 1)) == CONST_INT
9581      && INTVAL (XEXP (x, 1)) == count)
9582    const_rtx = XEXP (x, 1);
9583  else
9584    const_rtx = GEN_INT (count);
9585
9586  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9587      && GET_MODE (XEXP (x, 0)) == shift_mode
9588      && SUBREG_REG (XEXP (x, 0)) == varop)
9589    varop = XEXP (x, 0);
9590  else if (GET_MODE (varop) != shift_mode)
9591    varop = gen_lowpart_for_combine (shift_mode, varop);
9592
9593  /* If we can't make the SUBREG, try to return what we were given.  */
9594  if (GET_CODE (varop) == CLOBBER)
9595    return x ? x : varop;
9596
9597  new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9598  if (new != 0)
9599    x = new;
9600  else
9601    x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx);
9602
9603  /* If we have an outer operation and we just made a shift, it is
9604     possible that we could have simplified the shift were it not
9605     for the outer operation.  So try to do the simplification
9606     recursively.  */
9607
9608  if (outer_op != NIL && GET_CODE (x) == code
9609      && GET_CODE (XEXP (x, 1)) == CONST_INT)
9610    x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9611			      INTVAL (XEXP (x, 1)));
9612
9613  /* If we were doing a LSHIFTRT in a wider mode than it was originally,
9614     turn off all the bits that the shift would have turned off.  */
9615  if (orig_code == LSHIFTRT && result_mode != shift_mode)
9616    x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9617				GET_MODE_MASK (result_mode) >> orig_count);
9618
9619  /* Do the remainder of the processing in RESULT_MODE.  */
9620  x = gen_lowpart_for_combine (result_mode, x);
9621
9622  /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9623     operation.  */
9624  if (complement_p)
9625    x =simplify_gen_unary (NOT, result_mode, x, result_mode);
9626
9627  if (outer_op != NIL)
9628    {
9629      if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9630	outer_const = trunc_int_for_mode (outer_const, result_mode);
9631
9632      if (outer_op == AND)
9633	x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9634      else if (outer_op == SET)
9635	/* This means that we have determined that the result is
9636	   equivalent to a constant.  This should be rare.  */
9637	x = GEN_INT (outer_const);
9638      else if (GET_RTX_CLASS (outer_op) == '1')
9639	x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
9640      else
9641	x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
9642    }
9643
9644  return x;
9645}
9646
9647/* Like recog, but we receive the address of a pointer to a new pattern.
9648   We try to match the rtx that the pointer points to.
9649   If that fails, we may try to modify or replace the pattern,
9650   storing the replacement into the same pointer object.
9651
9652   Modifications include deletion or addition of CLOBBERs.
9653
9654   PNOTES is a pointer to a location where any REG_UNUSED notes added for
9655   the CLOBBERs are placed.
9656
9657   The value is the final insn code from the pattern ultimately matched,
9658   or -1.  */
9659
9660static int
9661recog_for_combine (pnewpat, insn, pnotes)
9662     rtx *pnewpat;
9663     rtx insn;
9664     rtx *pnotes;
9665{
9666  rtx pat = *pnewpat;
9667  int insn_code_number;
9668  int num_clobbers_to_add = 0;
9669  int i;
9670  rtx notes = 0;
9671  rtx dummy_insn;
9672
9673  /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9674     we use to indicate that something didn't match.  If we find such a
9675     thing, force rejection.  */
9676  if (GET_CODE (pat) == PARALLEL)
9677    for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9678      if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9679	  && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
9680	return -1;
9681
9682  /* *pnewpat does not have to be actual PATTERN (insn), so make a dummy
9683     instruction for pattern recognition.  */
9684  dummy_insn = shallow_copy_rtx (insn);
9685  PATTERN (dummy_insn) = pat;
9686  REG_NOTES (dummy_insn) = 0;
9687
9688  insn_code_number = recog (pat, dummy_insn, &num_clobbers_to_add);
9689
9690  /* If it isn't, there is the possibility that we previously had an insn
9691     that clobbered some register as a side effect, but the combined
9692     insn doesn't need to do that.  So try once more without the clobbers
9693     unless this represents an ASM insn.  */
9694
9695  if (insn_code_number < 0 && ! check_asm_operands (pat)
9696      && GET_CODE (pat) == PARALLEL)
9697    {
9698      int pos;
9699
9700      for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9701	if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9702	  {
9703	    if (i != pos)
9704	      SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9705	    pos++;
9706	  }
9707
9708      SUBST_INT (XVECLEN (pat, 0), pos);
9709
9710      if (pos == 1)
9711	pat = XVECEXP (pat, 0, 0);
9712
9713      PATTERN (dummy_insn) = pat;
9714      insn_code_number = recog (pat, dummy_insn, &num_clobbers_to_add);
9715    }
9716
9717  /* Recognize all noop sets, these will be killed by followup pass.  */
9718  if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
9719    insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
9720
9721  /* If we had any clobbers to add, make a new pattern than contains
9722     them.  Then check to make sure that all of them are dead.  */
9723  if (num_clobbers_to_add)
9724    {
9725      rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9726				     rtvec_alloc (GET_CODE (pat) == PARALLEL
9727						  ? (XVECLEN (pat, 0)
9728						     + num_clobbers_to_add)
9729						  : num_clobbers_to_add + 1));
9730
9731      if (GET_CODE (pat) == PARALLEL)
9732	for (i = 0; i < XVECLEN (pat, 0); i++)
9733	  XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9734      else
9735	XVECEXP (newpat, 0, 0) = pat;
9736
9737      add_clobbers (newpat, insn_code_number);
9738
9739      for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9740	   i < XVECLEN (newpat, 0); i++)
9741	{
9742	  if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9743	      && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9744	    return -1;
9745	  notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9746				     XEXP (XVECEXP (newpat, 0, i), 0), notes);
9747	}
9748      pat = newpat;
9749    }
9750
9751  *pnewpat = pat;
9752  *pnotes = notes;
9753
9754  return insn_code_number;
9755}
9756
9757/* Like gen_lowpart but for use by combine.  In combine it is not possible
9758   to create any new pseudoregs.  However, it is safe to create
9759   invalid memory addresses, because combine will try to recognize
9760   them and all they will do is make the combine attempt fail.
9761
9762   If for some reason this cannot do its job, an rtx
9763   (clobber (const_int 0)) is returned.
9764   An insn containing that will not be recognized.  */
9765
9766#undef gen_lowpart
9767
9768static rtx
9769gen_lowpart_for_combine (mode, x)
9770     enum machine_mode mode;
9771     rtx x;
9772{
9773  rtx result;
9774
9775  if (GET_MODE (x) == mode)
9776    return x;
9777
9778  /* We can only support MODE being wider than a word if X is a
9779     constant integer or has a mode the same size.  */
9780
9781  if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9782      && ! ((GET_MODE (x) == VOIDmode
9783	     && (GET_CODE (x) == CONST_INT
9784		 || GET_CODE (x) == CONST_DOUBLE))
9785	    || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
9786    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9787
9788  /* X might be a paradoxical (subreg (mem)).  In that case, gen_lowpart
9789     won't know what to do.  So we will strip off the SUBREG here and
9790     process normally.  */
9791  if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9792    {
9793      x = SUBREG_REG (x);
9794      if (GET_MODE (x) == mode)
9795	return x;
9796    }
9797
9798  result = gen_lowpart_common (mode, x);
9799#ifdef CLASS_CANNOT_CHANGE_MODE
9800  if (result != 0
9801      && GET_CODE (result) == SUBREG
9802      && GET_CODE (SUBREG_REG (result)) == REG
9803      && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
9804      && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (result),
9805				     GET_MODE (SUBREG_REG (result))))
9806    REG_CHANGES_MODE (REGNO (SUBREG_REG (result))) = 1;
9807#endif
9808
9809  if (result)
9810    return result;
9811
9812  if (GET_CODE (x) == MEM)
9813    {
9814      int offset = 0;
9815
9816      /* Refuse to work on a volatile memory ref or one with a mode-dependent
9817	 address.  */
9818      if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
9819	return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9820
9821      /* If we want to refer to something bigger than the original memref,
9822	 generate a perverse subreg instead.  That will force a reload
9823	 of the original memref X.  */
9824      if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
9825	return gen_rtx_SUBREG (mode, x, 0);
9826
9827      if (WORDS_BIG_ENDIAN)
9828	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9829		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
9830
9831      if (BYTES_BIG_ENDIAN)
9832	{
9833	  /* Adjust the address so that the address-after-the-data is
9834	     unchanged.  */
9835	  offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9836		     - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9837	}
9838
9839      return adjust_address_nv (x, mode, offset);
9840    }
9841
9842  /* If X is a comparison operator, rewrite it in a new mode.  This
9843     probably won't match, but may allow further simplifications.  */
9844  else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
9845    return gen_rtx_fmt_ee (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
9846
9847  /* If we couldn't simplify X any other way, just enclose it in a
9848     SUBREG.  Normally, this SUBREG won't match, but some patterns may
9849     include an explicit SUBREG or we may simplify it further in combine.  */
9850  else
9851    {
9852      int offset = 0;
9853      rtx res;
9854
9855      /* We can't handle VOIDmodes.  We can get here when generating vector
9856	 modes since these, unlike integral and floating point modes are not
9857	 handled earlier.  */
9858      if (GET_MODE (x) == VOIDmode)
9859	return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9860
9861      offset = subreg_lowpart_offset (mode, GET_MODE (x));
9862      res = simplify_gen_subreg (mode, x, GET_MODE (x), offset);
9863      if (res)
9864	return res;
9865      return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9866    }
9867}
9868
9869/* These routines make binary and unary operations by first seeing if they
9870   fold; if not, a new expression is allocated.  */
9871
9872static rtx
9873gen_binary (code, mode, op0, op1)
9874     enum rtx_code code;
9875     enum machine_mode mode;
9876     rtx op0, op1;
9877{
9878  rtx result;
9879  rtx tem;
9880
9881  if (GET_RTX_CLASS (code) == 'c'
9882      && swap_commutative_operands_p (op0, op1))
9883    tem = op0, op0 = op1, op1 = tem;
9884
9885  if (GET_RTX_CLASS (code) == '<')
9886    {
9887      enum machine_mode op_mode = GET_MODE (op0);
9888
9889      /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
9890	 just (REL_OP X Y).  */
9891      if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9892	{
9893	  op1 = XEXP (op0, 1);
9894	  op0 = XEXP (op0, 0);
9895	  op_mode = GET_MODE (op0);
9896	}
9897
9898      if (op_mode == VOIDmode)
9899	op_mode = GET_MODE (op1);
9900      result = simplify_relational_operation (code, op_mode, op0, op1);
9901    }
9902  else
9903    result = simplify_binary_operation (code, mode, op0, op1);
9904
9905  if (result)
9906    return result;
9907
9908  /* Put complex operands first and constants second.  */
9909  if (GET_RTX_CLASS (code) == 'c'
9910      && swap_commutative_operands_p (op0, op1))
9911    return gen_rtx_fmt_ee (code, mode, op1, op0);
9912
9913  /* If we are turning off bits already known off in OP0, we need not do
9914     an AND.  */
9915  else if (code == AND && GET_CODE (op1) == CONST_INT
9916	   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9917	   && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
9918    return op0;
9919
9920  return gen_rtx_fmt_ee (code, mode, op0, op1);
9921}
9922
9923/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9924   comparison code that will be tested.
9925
9926   The result is a possibly different comparison code to use.  *POP0 and
9927   *POP1 may be updated.
9928
9929   It is possible that we might detect that a comparison is either always
9930   true or always false.  However, we do not perform general constant
9931   folding in combine, so this knowledge isn't useful.  Such tautologies
9932   should have been detected earlier.  Hence we ignore all such cases.  */
9933
9934static enum rtx_code
9935simplify_comparison (code, pop0, pop1)
9936     enum rtx_code code;
9937     rtx *pop0;
9938     rtx *pop1;
9939{
9940  rtx op0 = *pop0;
9941  rtx op1 = *pop1;
9942  rtx tem, tem1;
9943  int i;
9944  enum machine_mode mode, tmode;
9945
9946  /* Try a few ways of applying the same transformation to both operands.  */
9947  while (1)
9948    {
9949#ifndef WORD_REGISTER_OPERATIONS
9950      /* The test below this one won't handle SIGN_EXTENDs on these machines,
9951	 so check specially.  */
9952      if (code != GTU && code != GEU && code != LTU && code != LEU
9953	  && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9954	  && GET_CODE (XEXP (op0, 0)) == ASHIFT
9955	  && GET_CODE (XEXP (op1, 0)) == ASHIFT
9956	  && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9957	  && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9958	  && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
9959	      == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
9960	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
9961	  && GET_CODE (XEXP (op1, 1)) == CONST_INT
9962	  && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9963	  && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
9964	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
9965	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
9966	  && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
9967	  && (INTVAL (XEXP (op0, 1))
9968	      == (GET_MODE_BITSIZE (GET_MODE (op0))
9969		  - (GET_MODE_BITSIZE
9970		     (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9971	{
9972	  op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9973	  op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9974	}
9975#endif
9976
9977      /* If both operands are the same constant shift, see if we can ignore the
9978	 shift.  We can if the shift is a rotate or if the bits shifted out of
9979	 this shift are known to be zero for both inputs and if the type of
9980	 comparison is compatible with the shift.  */
9981      if (GET_CODE (op0) == GET_CODE (op1)
9982	  && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9983	  && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
9984	      || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
9985		  && (code != GT && code != LT && code != GE && code != LE))
9986	      || (GET_CODE (op0) == ASHIFTRT
9987		  && (code != GTU && code != LTU
9988		      && code != GEU && code != LEU)))
9989	  && GET_CODE (XEXP (op0, 1)) == CONST_INT
9990	  && INTVAL (XEXP (op0, 1)) >= 0
9991	  && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9992	  && XEXP (op0, 1) == XEXP (op1, 1))
9993	{
9994	  enum machine_mode mode = GET_MODE (op0);
9995	  unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9996	  int shift_count = INTVAL (XEXP (op0, 1));
9997
9998	  if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9999	    mask &= (mask >> shift_count) << shift_count;
10000	  else if (GET_CODE (op0) == ASHIFT)
10001	    mask = (mask & (mask << shift_count)) >> shift_count;
10002
10003	  if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
10004	      && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
10005	    op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
10006	  else
10007	    break;
10008	}
10009
10010      /* If both operands are AND's of a paradoxical SUBREG by constant, the
10011	 SUBREGs are of the same mode, and, in both cases, the AND would
10012	 be redundant if the comparison was done in the narrower mode,
10013	 do the comparison in the narrower mode (e.g., we are AND'ing with 1
10014	 and the operand's possibly nonzero bits are 0xffffff01; in that case
10015	 if we only care about QImode, we don't need the AND).  This case
10016	 occurs if the output mode of an scc insn is not SImode and
10017	 STORE_FLAG_VALUE == 1 (e.g., the 386).
10018
10019	 Similarly, check for a case where the AND's are ZERO_EXTEND
10020	 operations from some narrower mode even though a SUBREG is not
10021	 present.  */
10022
10023      else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
10024	       && GET_CODE (XEXP (op0, 1)) == CONST_INT
10025	       && GET_CODE (XEXP (op1, 1)) == CONST_INT)
10026	{
10027	  rtx inner_op0 = XEXP (op0, 0);
10028	  rtx inner_op1 = XEXP (op1, 0);
10029	  HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
10030	  HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
10031	  int changed = 0;
10032
10033	  if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
10034	      && (GET_MODE_SIZE (GET_MODE (inner_op0))
10035		  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
10036	      && (GET_MODE (SUBREG_REG (inner_op0))
10037		  == GET_MODE (SUBREG_REG (inner_op1)))
10038	      && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
10039		  <= HOST_BITS_PER_WIDE_INT)
10040	      && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
10041					     GET_MODE (SUBREG_REG (inner_op0)))))
10042	      && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
10043					     GET_MODE (SUBREG_REG (inner_op1))))))
10044	    {
10045	      op0 = SUBREG_REG (inner_op0);
10046	      op1 = SUBREG_REG (inner_op1);
10047
10048	      /* The resulting comparison is always unsigned since we masked
10049		 off the original sign bit.  */
10050	      code = unsigned_condition (code);
10051
10052	      changed = 1;
10053	    }
10054
10055	  else if (c0 == c1)
10056	    for (tmode = GET_CLASS_NARROWEST_MODE
10057		 (GET_MODE_CLASS (GET_MODE (op0)));
10058		 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
10059	      if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
10060		{
10061		  op0 = gen_lowpart_for_combine (tmode, inner_op0);
10062		  op1 = gen_lowpart_for_combine (tmode, inner_op1);
10063		  code = unsigned_condition (code);
10064		  changed = 1;
10065		  break;
10066		}
10067
10068	  if (! changed)
10069	    break;
10070	}
10071
10072      /* If both operands are NOT, we can strip off the outer operation
10073	 and adjust the comparison code for swapped operands; similarly for
10074	 NEG, except that this must be an equality comparison.  */
10075      else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
10076	       || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
10077		   && (code == EQ || code == NE)))
10078	op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
10079
10080      else
10081	break;
10082    }
10083
10084  /* If the first operand is a constant, swap the operands and adjust the
10085     comparison code appropriately, but don't do this if the second operand
10086     is already a constant integer.  */
10087  if (swap_commutative_operands_p (op0, op1))
10088    {
10089      tem = op0, op0 = op1, op1 = tem;
10090      code = swap_condition (code);
10091    }
10092
10093  /* We now enter a loop during which we will try to simplify the comparison.
10094     For the most part, we only are concerned with comparisons with zero,
10095     but some things may really be comparisons with zero but not start
10096     out looking that way.  */
10097
10098  while (GET_CODE (op1) == CONST_INT)
10099    {
10100      enum machine_mode mode = GET_MODE (op0);
10101      unsigned int mode_width = GET_MODE_BITSIZE (mode);
10102      unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10103      int equality_comparison_p;
10104      int sign_bit_comparison_p;
10105      int unsigned_comparison_p;
10106      HOST_WIDE_INT const_op;
10107
10108      /* We only want to handle integral modes.  This catches VOIDmode,
10109	 CCmode, and the floating-point modes.  An exception is that we
10110	 can handle VOIDmode if OP0 is a COMPARE or a comparison
10111	 operation.  */
10112
10113      if (GET_MODE_CLASS (mode) != MODE_INT
10114	  && ! (mode == VOIDmode
10115		&& (GET_CODE (op0) == COMPARE
10116		    || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
10117	break;
10118
10119      /* Get the constant we are comparing against and turn off all bits
10120	 not on in our mode.  */
10121      const_op = trunc_int_for_mode (INTVAL (op1), mode);
10122      op1 = GEN_INT (const_op);
10123
10124      /* If we are comparing against a constant power of two and the value
10125	 being compared can only have that single bit nonzero (e.g., it was
10126	 `and'ed with that bit), we can replace this with a comparison
10127	 with zero.  */
10128      if (const_op
10129	  && (code == EQ || code == NE || code == GE || code == GEU
10130	      || code == LT || code == LTU)
10131	  && mode_width <= HOST_BITS_PER_WIDE_INT
10132	  && exact_log2 (const_op) >= 0
10133	  && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
10134	{
10135	  code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10136	  op1 = const0_rtx, const_op = 0;
10137	}
10138
10139      /* Similarly, if we are comparing a value known to be either -1 or
10140	 0 with -1, change it to the opposite comparison against zero.  */
10141
10142      if (const_op == -1
10143	  && (code == EQ || code == NE || code == GT || code == LE
10144	      || code == GEU || code == LTU)
10145	  && num_sign_bit_copies (op0, mode) == mode_width)
10146	{
10147	  code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10148	  op1 = const0_rtx, const_op = 0;
10149	}
10150
10151      /* Do some canonicalizations based on the comparison code.  We prefer
10152	 comparisons against zero and then prefer equality comparisons.
10153	 If we can reduce the size of a constant, we will do that too.  */
10154
10155      switch (code)
10156	{
10157	case LT:
10158	  /* < C is equivalent to <= (C - 1) */
10159	  if (const_op > 0)
10160	    {
10161	      const_op -= 1;
10162	      op1 = GEN_INT (const_op);
10163	      code = LE;
10164	      /* ... fall through to LE case below.  */
10165	    }
10166	  else
10167	    break;
10168
10169	case LE:
10170	  /* <= C is equivalent to < (C + 1); we do this for C < 0  */
10171	  if (const_op < 0)
10172	    {
10173	      const_op += 1;
10174	      op1 = GEN_INT (const_op);
10175	      code = LT;
10176	    }
10177
10178	  /* If we are doing a <= 0 comparison on a value known to have
10179	     a zero sign bit, we can replace this with == 0.  */
10180	  else if (const_op == 0
10181		   && mode_width <= HOST_BITS_PER_WIDE_INT
10182		   && (nonzero_bits (op0, mode)
10183		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10184	    code = EQ;
10185	  break;
10186
10187	case GE:
10188	  /* >= C is equivalent to > (C - 1).  */
10189	  if (const_op > 0)
10190	    {
10191	      const_op -= 1;
10192	      op1 = GEN_INT (const_op);
10193	      code = GT;
10194	      /* ... fall through to GT below.  */
10195	    }
10196	  else
10197	    break;
10198
10199	case GT:
10200	  /* > C is equivalent to >= (C + 1); we do this for C < 0.  */
10201	  if (const_op < 0)
10202	    {
10203	      const_op += 1;
10204	      op1 = GEN_INT (const_op);
10205	      code = GE;
10206	    }
10207
10208	  /* If we are doing a > 0 comparison on a value known to have
10209	     a zero sign bit, we can replace this with != 0.  */
10210	  else if (const_op == 0
10211		   && mode_width <= HOST_BITS_PER_WIDE_INT
10212		   && (nonzero_bits (op0, mode)
10213		       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10214	    code = NE;
10215	  break;
10216
10217	case LTU:
10218	  /* < C is equivalent to <= (C - 1).  */
10219	  if (const_op > 0)
10220	    {
10221	      const_op -= 1;
10222	      op1 = GEN_INT (const_op);
10223	      code = LEU;
10224	      /* ... fall through ...  */
10225	    }
10226
10227	  /* (unsigned) < 0x80000000 is equivalent to >= 0.  */
10228	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10229		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10230	    {
10231	      const_op = 0, op1 = const0_rtx;
10232	      code = GE;
10233	      break;
10234	    }
10235	  else
10236	    break;
10237
10238	case LEU:
10239	  /* unsigned <= 0 is equivalent to == 0 */
10240	  if (const_op == 0)
10241	    code = EQ;
10242
10243	  /* (unsigned) <= 0x7fffffff is equivalent to >= 0.  */
10244	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10245		   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10246	    {
10247	      const_op = 0, op1 = const0_rtx;
10248	      code = GE;
10249	    }
10250	  break;
10251
10252	case GEU:
10253	  /* >= C is equivalent to < (C - 1).  */
10254	  if (const_op > 1)
10255	    {
10256	      const_op -= 1;
10257	      op1 = GEN_INT (const_op);
10258	      code = GTU;
10259	      /* ... fall through ...  */
10260	    }
10261
10262	  /* (unsigned) >= 0x80000000 is equivalent to < 0.  */
10263	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10264		   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10265	    {
10266	      const_op = 0, op1 = const0_rtx;
10267	      code = LT;
10268	      break;
10269	    }
10270	  else
10271	    break;
10272
10273	case GTU:
10274	  /* unsigned > 0 is equivalent to != 0 */
10275	  if (const_op == 0)
10276	    code = NE;
10277
10278	  /* (unsigned) > 0x7fffffff is equivalent to < 0.  */
10279	  else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10280		    && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10281	    {
10282	      const_op = 0, op1 = const0_rtx;
10283	      code = LT;
10284	    }
10285	  break;
10286
10287	default:
10288	  break;
10289	}
10290
10291      /* Compute some predicates to simplify code below.  */
10292
10293      equality_comparison_p = (code == EQ || code == NE);
10294      sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10295      unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10296			       || code == GEU);
10297
10298      /* If this is a sign bit comparison and we can do arithmetic in
10299	 MODE, say that we will only be needing the sign bit of OP0.  */
10300      if (sign_bit_comparison_p
10301	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10302	op0 = force_to_mode (op0, mode,
10303			     ((HOST_WIDE_INT) 1
10304			      << (GET_MODE_BITSIZE (mode) - 1)),
10305			     NULL_RTX, 0);
10306
10307      /* Now try cases based on the opcode of OP0.  If none of the cases
10308	 does a "continue", we exit this loop immediately after the
10309	 switch.  */
10310
10311      switch (GET_CODE (op0))
10312	{
10313	case ZERO_EXTRACT:
10314	  /* If we are extracting a single bit from a variable position in
10315	     a constant that has only a single bit set and are comparing it
10316	     with zero, we can convert this into an equality comparison
10317	     between the position and the location of the single bit.  */
10318
10319	  if (GET_CODE (XEXP (op0, 0)) == CONST_INT
10320	      && XEXP (op0, 1) == const1_rtx
10321	      && equality_comparison_p && const_op == 0
10322	      && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
10323	    {
10324	      if (BITS_BIG_ENDIAN)
10325		{
10326		  enum machine_mode new_mode
10327		    = mode_for_extraction (EP_extzv, 1);
10328		  if (new_mode == MAX_MACHINE_MODE)
10329		    i = BITS_PER_WORD - 1 - i;
10330		  else
10331		    {
10332		      mode = new_mode;
10333		      i = (GET_MODE_BITSIZE (mode) - 1 - i);
10334		    }
10335		}
10336
10337	      op0 = XEXP (op0, 2);
10338	      op1 = GEN_INT (i);
10339	      const_op = i;
10340
10341	      /* Result is nonzero iff shift count is equal to I.  */
10342	      code = reverse_condition (code);
10343	      continue;
10344	    }
10345
10346	  /* ... fall through ...  */
10347
10348	case SIGN_EXTRACT:
10349	  tem = expand_compound_operation (op0);
10350	  if (tem != op0)
10351	    {
10352	      op0 = tem;
10353	      continue;
10354	    }
10355	  break;
10356
10357	case NOT:
10358	  /* If testing for equality, we can take the NOT of the constant.  */
10359	  if (equality_comparison_p
10360	      && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10361	    {
10362	      op0 = XEXP (op0, 0);
10363	      op1 = tem;
10364	      continue;
10365	    }
10366
10367	  /* If just looking at the sign bit, reverse the sense of the
10368	     comparison.  */
10369	  if (sign_bit_comparison_p)
10370	    {
10371	      op0 = XEXP (op0, 0);
10372	      code = (code == GE ? LT : GE);
10373	      continue;
10374	    }
10375	  break;
10376
10377	case NEG:
10378	  /* If testing for equality, we can take the NEG of the constant.  */
10379	  if (equality_comparison_p
10380	      && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10381	    {
10382	      op0 = XEXP (op0, 0);
10383	      op1 = tem;
10384	      continue;
10385	    }
10386
10387	  /* The remaining cases only apply to comparisons with zero.  */
10388	  if (const_op != 0)
10389	    break;
10390
10391	  /* When X is ABS or is known positive,
10392	     (neg X) is < 0 if and only if X != 0.  */
10393
10394	  if (sign_bit_comparison_p
10395	      && (GET_CODE (XEXP (op0, 0)) == ABS
10396		  || (mode_width <= HOST_BITS_PER_WIDE_INT
10397		      && (nonzero_bits (XEXP (op0, 0), mode)
10398			  & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
10399	    {
10400	      op0 = XEXP (op0, 0);
10401	      code = (code == LT ? NE : EQ);
10402	      continue;
10403	    }
10404
10405	  /* If we have NEG of something whose two high-order bits are the
10406	     same, we know that "(-a) < 0" is equivalent to "a > 0".  */
10407	  if (num_sign_bit_copies (op0, mode) >= 2)
10408	    {
10409	      op0 = XEXP (op0, 0);
10410	      code = swap_condition (code);
10411	      continue;
10412	    }
10413	  break;
10414
10415	case ROTATE:
10416	  /* If we are testing equality and our count is a constant, we
10417	     can perform the inverse operation on our RHS.  */
10418	  if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10419	      && (tem = simplify_binary_operation (ROTATERT, mode,
10420						   op1, XEXP (op0, 1))) != 0)
10421	    {
10422	      op0 = XEXP (op0, 0);
10423	      op1 = tem;
10424	      continue;
10425	    }
10426
10427	  /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10428	     a particular bit.  Convert it to an AND of a constant of that
10429	     bit.  This will be converted into a ZERO_EXTRACT.  */
10430	  if (const_op == 0 && sign_bit_comparison_p
10431	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10432	      && mode_width <= HOST_BITS_PER_WIDE_INT)
10433	    {
10434	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10435					    ((HOST_WIDE_INT) 1
10436					     << (mode_width - 1
10437						 - INTVAL (XEXP (op0, 1)))));
10438	      code = (code == LT ? NE : EQ);
10439	      continue;
10440	    }
10441
10442	  /* Fall through.  */
10443
10444	case ABS:
10445	  /* ABS is ignorable inside an equality comparison with zero.  */
10446	  if (const_op == 0 && equality_comparison_p)
10447	    {
10448	      op0 = XEXP (op0, 0);
10449	      continue;
10450	    }
10451	  break;
10452
10453	case SIGN_EXTEND:
10454	  /* Can simplify (compare (zero/sign_extend FOO) CONST)
10455	     to (compare FOO CONST) if CONST fits in FOO's mode and we
10456	     are either testing inequality or have an unsigned comparison
10457	     with ZERO_EXTEND or a signed comparison with SIGN_EXTEND.  */
10458	  if (! unsigned_comparison_p
10459	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10460		  <= HOST_BITS_PER_WIDE_INT)
10461	      && ((unsigned HOST_WIDE_INT) const_op
10462		  < (((unsigned HOST_WIDE_INT) 1
10463		      << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
10464	    {
10465	      op0 = XEXP (op0, 0);
10466	      continue;
10467	    }
10468	  break;
10469
10470	case SUBREG:
10471	  /* Check for the case where we are comparing A - C1 with C2,
10472	     both constants are smaller than 1/2 the maximum positive
10473	     value in MODE, and the comparison is equality or unsigned.
10474	     In that case, if A is either zero-extended to MODE or has
10475	     sufficient sign bits so that the high-order bit in MODE
10476	     is a copy of the sign in the inner mode, we can prove that it is
10477	     safe to do the operation in the wider mode.  This simplifies
10478	     many range checks.  */
10479
10480	  if (mode_width <= HOST_BITS_PER_WIDE_INT
10481	      && subreg_lowpart_p (op0)
10482	      && GET_CODE (SUBREG_REG (op0)) == PLUS
10483	      && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
10484	      && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
10485	      && (-INTVAL (XEXP (SUBREG_REG (op0), 1))
10486		  < (HOST_WIDE_INT) (GET_MODE_MASK (mode) / 2))
10487	      && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
10488	      && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
10489				      GET_MODE (SUBREG_REG (op0)))
10490			& ~GET_MODE_MASK (mode))
10491		  || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
10492					   GET_MODE (SUBREG_REG (op0)))
10493		      > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10494			 - GET_MODE_BITSIZE (mode)))))
10495	    {
10496	      op0 = SUBREG_REG (op0);
10497	      continue;
10498	    }
10499
10500	  /* If the inner mode is narrower and we are extracting the low part,
10501	     we can treat the SUBREG as if it were a ZERO_EXTEND.  */
10502	  if (subreg_lowpart_p (op0)
10503	      && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10504	    /* Fall through */ ;
10505	  else
10506	    break;
10507
10508	  /* ... fall through ...  */
10509
10510	case ZERO_EXTEND:
10511	  if ((unsigned_comparison_p || equality_comparison_p)
10512	      && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10513		  <= HOST_BITS_PER_WIDE_INT)
10514	      && ((unsigned HOST_WIDE_INT) const_op
10515		  < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10516	    {
10517	      op0 = XEXP (op0, 0);
10518	      continue;
10519	    }
10520	  break;
10521
10522	case PLUS:
10523	  /* (eq (plus X A) B) -> (eq X (minus B A)).  We can only do
10524	     this for equality comparisons due to pathological cases involving
10525	     overflows.  */
10526	  if (equality_comparison_p
10527	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10528							op1, XEXP (op0, 1))))
10529	    {
10530	      op0 = XEXP (op0, 0);
10531	      op1 = tem;
10532	      continue;
10533	    }
10534
10535	  /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0.  */
10536	  if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10537	      && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10538	    {
10539	      op0 = XEXP (XEXP (op0, 0), 0);
10540	      code = (code == LT ? EQ : NE);
10541	      continue;
10542	    }
10543	  break;
10544
10545	case MINUS:
10546	  /* We used to optimize signed comparisons against zero, but that
10547	     was incorrect.  Unsigned comparisons against zero (GTU, LEU)
10548	     arrive here as equality comparisons, or (GEU, LTU) are
10549	     optimized away.  No need to special-case them.  */
10550
10551	  /* (eq (minus A B) C) -> (eq A (plus B C)) or
10552	     (eq B (minus A C)), whichever simplifies.  We can only do
10553	     this for equality comparisons due to pathological cases involving
10554	     overflows.  */
10555	  if (equality_comparison_p
10556	      && 0 != (tem = simplify_binary_operation (PLUS, mode,
10557							XEXP (op0, 1), op1)))
10558	    {
10559	      op0 = XEXP (op0, 0);
10560	      op1 = tem;
10561	      continue;
10562	    }
10563
10564	  if (equality_comparison_p
10565	      && 0 != (tem = simplify_binary_operation (MINUS, mode,
10566							XEXP (op0, 0), op1)))
10567	    {
10568	      op0 = XEXP (op0, 1);
10569	      op1 = tem;
10570	      continue;
10571	    }
10572
10573	  /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10574	     of bits in X minus 1, is one iff X > 0.  */
10575	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10576	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10577	      && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
10578	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10579	    {
10580	      op0 = XEXP (op0, 1);
10581	      code = (code == GE ? LE : GT);
10582	      continue;
10583	    }
10584	  break;
10585
10586	case XOR:
10587	  /* (eq (xor A B) C) -> (eq A (xor B C)).  This is a simplification
10588	     if C is zero or B is a constant.  */
10589	  if (equality_comparison_p
10590	      && 0 != (tem = simplify_binary_operation (XOR, mode,
10591							XEXP (op0, 1), op1)))
10592	    {
10593	      op0 = XEXP (op0, 0);
10594	      op1 = tem;
10595	      continue;
10596	    }
10597	  break;
10598
10599	case EQ:  case NE:
10600	case UNEQ:  case LTGT:
10601	case LT:  case LTU:  case UNLT:  case LE:  case LEU:  case UNLE:
10602	case GT:  case GTU:  case UNGT:  case GE:  case GEU:  case UNGE:
10603        case UNORDERED: case ORDERED:
10604	  /* We can't do anything if OP0 is a condition code value, rather
10605	     than an actual data value.  */
10606	  if (const_op != 0
10607#ifdef HAVE_cc0
10608	      || XEXP (op0, 0) == cc0_rtx
10609#endif
10610	      || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10611	    break;
10612
10613	  /* Get the two operands being compared.  */
10614	  if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10615	    tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10616	  else
10617	    tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10618
10619	  /* Check for the cases where we simply want the result of the
10620	     earlier test or the opposite of that result.  */
10621	  if (code == NE || code == EQ
10622	      || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10623		  && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10624		  && (STORE_FLAG_VALUE
10625		      & (((HOST_WIDE_INT) 1
10626			  << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
10627		  && (code == LT || code == GE)))
10628	    {
10629	      enum rtx_code new_code;
10630	      if (code == LT || code == NE)
10631		new_code = GET_CODE (op0);
10632	      else
10633		new_code = combine_reversed_comparison_code (op0);
10634
10635	      if (new_code != UNKNOWN)
10636		{
10637		  code = new_code;
10638		  op0 = tem;
10639		  op1 = tem1;
10640		  continue;
10641		}
10642	    }
10643	  break;
10644
10645	case IOR:
10646	  /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10647	     iff X <= 0.  */
10648	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10649	      && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10650	      && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10651	    {
10652	      op0 = XEXP (op0, 1);
10653	      code = (code == GE ? GT : LE);
10654	      continue;
10655	    }
10656	  break;
10657
10658	case AND:
10659	  /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1).  This
10660	     will be converted to a ZERO_EXTRACT later.  */
10661	  if (const_op == 0 && equality_comparison_p
10662	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
10663	      && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10664	    {
10665	      op0 = simplify_and_const_int
10666		(op0, mode, gen_rtx_LSHIFTRT (mode,
10667					      XEXP (op0, 1),
10668					      XEXP (XEXP (op0, 0), 1)),
10669		 (HOST_WIDE_INT) 1);
10670	      continue;
10671	    }
10672
10673	  /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10674	     zero and X is a comparison and C1 and C2 describe only bits set
10675	     in STORE_FLAG_VALUE, we can compare with X.  */
10676	  if (const_op == 0 && equality_comparison_p
10677	      && mode_width <= HOST_BITS_PER_WIDE_INT
10678	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10679	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10680	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10681	      && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
10682	      && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
10683	    {
10684	      mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10685		      << INTVAL (XEXP (XEXP (op0, 0), 1)));
10686	      if ((~STORE_FLAG_VALUE & mask) == 0
10687		  && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10688		      || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10689			  && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10690		{
10691		  op0 = XEXP (XEXP (op0, 0), 0);
10692		  continue;
10693		}
10694	    }
10695
10696	  /* If we are doing an equality comparison of an AND of a bit equal
10697	     to the sign bit, replace this with a LT or GE comparison of
10698	     the underlying value.  */
10699	  if (equality_comparison_p
10700	      && const_op == 0
10701	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10702	      && mode_width <= HOST_BITS_PER_WIDE_INT
10703	      && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10704		  == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
10705	    {
10706	      op0 = XEXP (op0, 0);
10707	      code = (code == EQ ? GE : LT);
10708	      continue;
10709	    }
10710
10711	  /* If this AND operation is really a ZERO_EXTEND from a narrower
10712	     mode, the constant fits within that mode, and this is either an
10713	     equality or unsigned comparison, try to do this comparison in
10714	     the narrower mode.  */
10715	  if ((equality_comparison_p || unsigned_comparison_p)
10716	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10717	      && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10718				   & GET_MODE_MASK (mode))
10719				  + 1)) >= 0
10720	      && const_op >> i == 0
10721	      && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10722	    {
10723	      op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10724	      continue;
10725	    }
10726
10727	  /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
10728	     in both M1 and M2 and the SUBREG is either paradoxical or
10729	     represents the low part, permute the SUBREG and the AND and
10730	     try again.  */
10731	  if (GET_CODE (XEXP (op0, 0)) == SUBREG
10732	      && (0
10733#ifdef WORD_REGISTER_OPERATIONS
10734		  || ((mode_width
10735		       > (GET_MODE_BITSIZE
10736			   (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10737		      && mode_width <= BITS_PER_WORD)
10738#endif
10739		  || ((mode_width
10740		       <= (GET_MODE_BITSIZE
10741			   (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10742		      && subreg_lowpart_p (XEXP (op0, 0))))
10743#ifndef WORD_REGISTER_OPERATIONS
10744	      /* It is unsafe to commute the AND into the SUBREG if the SUBREG
10745		 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
10746		 As originally written the upper bits have a defined value
10747		 due to the AND operation.  However, if we commute the AND
10748		 inside the SUBREG then they no longer have defined values
10749		 and the meaning of the code has been changed.  */
10750	      && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
10751		  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10752#endif
10753	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10754	      && mode_width <= HOST_BITS_PER_WIDE_INT
10755	      && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10756		  <= HOST_BITS_PER_WIDE_INT)
10757	      && (INTVAL (XEXP (op0, 1)) & ~mask) == 0
10758	      && 0 == (~GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10759		       & INTVAL (XEXP (op0, 1)))
10760	      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) != mask
10761	      && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10762		  != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10763
10764	    {
10765	      op0
10766		= gen_lowpart_for_combine
10767		  (mode,
10768		   gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
10769			       SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
10770	      continue;
10771	    }
10772
10773	  /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
10774	     (eq (and (lshiftrt X) 1) 0).  */
10775	  if (const_op == 0 && equality_comparison_p
10776	      && XEXP (op0, 1) == const1_rtx
10777	      && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10778	      && GET_CODE (XEXP (XEXP (op0, 0), 0)) == NOT)
10779	    {
10780	      op0 = simplify_and_const_int
10781		(op0, mode,
10782		 gen_rtx_LSHIFTRT (mode, XEXP (XEXP (XEXP (op0, 0), 0), 0),
10783				   XEXP (XEXP (op0, 0), 1)),
10784		 (HOST_WIDE_INT) 1);
10785	      code = (code == NE ? EQ : NE);
10786	      continue;
10787	    }
10788	  break;
10789
10790	case ASHIFT:
10791	  /* If we have (compare (ashift FOO N) (const_int C)) and
10792	     the high order N bits of FOO (N+1 if an inequality comparison)
10793	     are known to be zero, we can do this by comparing FOO with C
10794	     shifted right N bits so long as the low-order N bits of C are
10795	     zero.  */
10796	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10797	      && INTVAL (XEXP (op0, 1)) >= 0
10798	      && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
10799		  < HOST_BITS_PER_WIDE_INT)
10800	      && ((const_op
10801		   & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
10802	      && mode_width <= HOST_BITS_PER_WIDE_INT
10803	      && (nonzero_bits (XEXP (op0, 0), mode)
10804		  & ~(mask >> (INTVAL (XEXP (op0, 1))
10805			       + ! equality_comparison_p))) == 0)
10806	    {
10807	      /* We must perform a logical shift, not an arithmetic one,
10808		 as we want the top N bits of C to be zero.  */
10809	      unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
10810
10811	      temp >>= INTVAL (XEXP (op0, 1));
10812	      op1 = GEN_INT (trunc_int_for_mode (temp, mode));
10813	      op0 = XEXP (op0, 0);
10814	      continue;
10815	    }
10816
10817	  /* If we are doing a sign bit comparison, it means we are testing
10818	     a particular bit.  Convert it to the appropriate AND.  */
10819	  if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10820	      && mode_width <= HOST_BITS_PER_WIDE_INT)
10821	    {
10822	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10823					    ((HOST_WIDE_INT) 1
10824					     << (mode_width - 1
10825						 - INTVAL (XEXP (op0, 1)))));
10826	      code = (code == LT ? NE : EQ);
10827	      continue;
10828	    }
10829
10830	  /* If this an equality comparison with zero and we are shifting
10831	     the low bit to the sign bit, we can convert this to an AND of the
10832	     low-order bit.  */
10833	  if (const_op == 0 && equality_comparison_p
10834	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10835	      && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10836	    {
10837	      op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10838					    (HOST_WIDE_INT) 1);
10839	      continue;
10840	    }
10841	  break;
10842
10843	case ASHIFTRT:
10844	  /* If this is an equality comparison with zero, we can do this
10845	     as a logical shift, which might be much simpler.  */
10846	  if (equality_comparison_p && const_op == 0
10847	      && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10848	    {
10849	      op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10850					  XEXP (op0, 0),
10851					  INTVAL (XEXP (op0, 1)));
10852	      continue;
10853	    }
10854
10855	  /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10856	     do the comparison in a narrower mode.  */
10857	  if (! unsigned_comparison_p
10858	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10859	      && GET_CODE (XEXP (op0, 0)) == ASHIFT
10860	      && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10861	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10862					 MODE_INT, 1)) != BLKmode
10863	      && (((unsigned HOST_WIDE_INT) const_op
10864		   + (GET_MODE_MASK (tmode) >> 1) + 1)
10865		  <= GET_MODE_MASK (tmode)))
10866	    {
10867	      op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10868	      continue;
10869	    }
10870
10871	  /* Likewise if OP0 is a PLUS of a sign extension with a
10872	     constant, which is usually represented with the PLUS
10873	     between the shifts.  */
10874	  if (! unsigned_comparison_p
10875	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10876	      && GET_CODE (XEXP (op0, 0)) == PLUS
10877	      && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10878	      && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
10879	      && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
10880	      && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10881					 MODE_INT, 1)) != BLKmode
10882	      && (((unsigned HOST_WIDE_INT) const_op
10883		   + (GET_MODE_MASK (tmode) >> 1) + 1)
10884		  <= GET_MODE_MASK (tmode)))
10885	    {
10886	      rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
10887	      rtx add_const = XEXP (XEXP (op0, 0), 1);
10888	      rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const,
10889					  XEXP (op0, 1));
10890
10891	      op0 = gen_binary (PLUS, tmode,
10892				gen_lowpart_for_combine (tmode, inner),
10893				new_const);
10894	      continue;
10895	    }
10896
10897	  /* ... fall through ...  */
10898	case LSHIFTRT:
10899	  /* If we have (compare (xshiftrt FOO N) (const_int C)) and
10900	     the low order N bits of FOO are known to be zero, we can do this
10901	     by comparing FOO with C shifted left N bits so long as no
10902	     overflow occurs.  */
10903	  if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10904	      && INTVAL (XEXP (op0, 1)) >= 0
10905	      && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10906	      && mode_width <= HOST_BITS_PER_WIDE_INT
10907	      && (nonzero_bits (XEXP (op0, 0), mode)
10908		  & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
10909	      && (((unsigned HOST_WIDE_INT) const_op
10910		   + (GET_CODE (op0) != LSHIFTRT
10911		      ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
10912			 + 1)
10913		      : 0))
10914		  <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
10915	    {
10916	      /* If the shift was logical, then we must make the condition
10917		 unsigned.  */
10918	      if (GET_CODE (op0) == LSHIFTRT)
10919		code = unsigned_condition (code);
10920
10921	      const_op <<= INTVAL (XEXP (op0, 1));
10922	      op1 = GEN_INT (const_op);
10923	      op0 = XEXP (op0, 0);
10924	      continue;
10925	    }
10926
10927	  /* If we are using this shift to extract just the sign bit, we
10928	     can replace this with an LT or GE comparison.  */
10929	  if (const_op == 0
10930	      && (equality_comparison_p || sign_bit_comparison_p)
10931	      && GET_CODE (XEXP (op0, 1)) == CONST_INT
10932	      && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10933	    {
10934	      op0 = XEXP (op0, 0);
10935	      code = (code == NE || code == GT ? LT : GE);
10936	      continue;
10937	    }
10938	  break;
10939
10940	default:
10941	  break;
10942	}
10943
10944      break;
10945    }
10946
10947  /* Now make any compound operations involved in this comparison.  Then,
10948     check for an outmost SUBREG on OP0 that is not doing anything or is
10949     paradoxical.  The latter transformation must only be performed when
10950     it is known that the "extra" bits will be the same in op0 and op1 or
10951     that they don't matter.  There are three cases to consider:
10952
10953     1. SUBREG_REG (op0) is a register.  In this case the bits are don't
10954     care bits and we can assume they have any convenient value.  So
10955     making the transformation is safe.
10956
10957     2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
10958     In this case the upper bits of op0 are undefined.  We should not make
10959     the simplification in that case as we do not know the contents of
10960     those bits.
10961
10962     3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
10963     NIL.  In that case we know those bits are zeros or ones.  We must
10964     also be sure that they are the same as the upper bits of op1.
10965
10966     We can never remove a SUBREG for a non-equality comparison because
10967     the sign bit is in a different place in the underlying object.  */
10968
10969  op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10970  op1 = make_compound_operation (op1, SET);
10971
10972  if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10973      /* Case 3 above, to sometimes allow (subreg (mem x)), isn't
10974	 implemented.  */
10975      && GET_CODE (SUBREG_REG (op0)) == REG
10976      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10977      && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
10978      && (code == NE || code == EQ))
10979    {
10980      if (GET_MODE_SIZE (GET_MODE (op0))
10981	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
10982	{
10983	  op0 = SUBREG_REG (op0);
10984	  op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
10985	}
10986      else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10987		<= HOST_BITS_PER_WIDE_INT)
10988	       && (nonzero_bits (SUBREG_REG (op0),
10989				 GET_MODE (SUBREG_REG (op0)))
10990		   & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
10991	{
10992	  tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)), op1);
10993
10994	  if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
10995	       & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
10996	    op0 = SUBREG_REG (op0), op1 = tem;
10997	}
10998    }
10999
11000  /* We now do the opposite procedure: Some machines don't have compare
11001     insns in all modes.  If OP0's mode is an integer mode smaller than a
11002     word and we can't do a compare in that mode, see if there is a larger
11003     mode for which we can do the compare.  There are a number of cases in
11004     which we can use the wider mode.  */
11005
11006  mode = GET_MODE (op0);
11007  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
11008      && GET_MODE_SIZE (mode) < UNITS_PER_WORD
11009      && ! have_insn_for (COMPARE, mode))
11010    for (tmode = GET_MODE_WIDER_MODE (mode);
11011	 (tmode != VOIDmode
11012	  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
11013	 tmode = GET_MODE_WIDER_MODE (tmode))
11014      if (have_insn_for (COMPARE, tmode))
11015	{
11016	  int zero_extended;
11017
11018	  /* If the only nonzero bits in OP0 and OP1 are those in the
11019	     narrower mode and this is an equality or unsigned comparison,
11020	     we can use the wider mode.  Similarly for sign-extended
11021	     values, in which case it is true for all comparisons.  */
11022	  zero_extended = ((code == EQ || code == NE
11023			    || code == GEU || code == GTU
11024			    || code == LEU || code == LTU)
11025			   && (nonzero_bits (op0, tmode)
11026			       & ~GET_MODE_MASK (mode)) == 0
11027			   && ((GET_CODE (op1) == CONST_INT
11028				|| (nonzero_bits (op1, tmode)
11029				    & ~GET_MODE_MASK (mode)) == 0)));
11030
11031	  if (zero_extended
11032	      || ((num_sign_bit_copies (op0, tmode)
11033		   > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
11034		  && (num_sign_bit_copies (op1, tmode)
11035		      > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
11036	    {
11037	      /* If OP0 is an AND and we don't have an AND in MODE either,
11038		 make a new AND in the proper mode.  */
11039	      if (GET_CODE (op0) == AND
11040		  && !have_insn_for (AND, mode))
11041		op0 = gen_binary (AND, tmode,
11042				  gen_lowpart_for_combine (tmode,
11043							   XEXP (op0, 0)),
11044				  gen_lowpart_for_combine (tmode,
11045							   XEXP (op0, 1)));
11046
11047	      op0 = gen_lowpart_for_combine (tmode, op0);
11048	      if (zero_extended && GET_CODE (op1) == CONST_INT)
11049		op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode));
11050	      op1 = gen_lowpart_for_combine (tmode, op1);
11051	      break;
11052	    }
11053
11054	  /* If this is a test for negative, we can make an explicit
11055	     test of the sign bit.  */
11056
11057	  if (op1 == const0_rtx && (code == LT || code == GE)
11058	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11059	    {
11060	      op0 = gen_binary (AND, tmode,
11061				gen_lowpart_for_combine (tmode, op0),
11062				GEN_INT ((HOST_WIDE_INT) 1
11063					 << (GET_MODE_BITSIZE (mode) - 1)));
11064	      code = (code == LT) ? NE : EQ;
11065	      break;
11066	    }
11067	}
11068
11069#ifdef CANONICALIZE_COMPARISON
11070  /* If this machine only supports a subset of valid comparisons, see if we
11071     can convert an unsupported one into a supported one.  */
11072  CANONICALIZE_COMPARISON (code, op0, op1);
11073#endif
11074
11075  *pop0 = op0;
11076  *pop1 = op1;
11077
11078  return code;
11079}
11080
11081/* Like jump.c' reversed_comparison_code, but use combine infrastructure for
11082   searching backward.  */
11083static enum rtx_code
11084combine_reversed_comparison_code (exp)
11085     rtx exp;
11086{
11087  enum rtx_code code1 = reversed_comparison_code (exp, NULL);
11088  rtx x;
11089
11090  if (code1 != UNKNOWN
11091      || GET_MODE_CLASS (GET_MODE (XEXP (exp, 0))) != MODE_CC)
11092    return code1;
11093  /* Otherwise try and find where the condition codes were last set and
11094     use that.  */
11095  x = get_last_value (XEXP (exp, 0));
11096  if (!x || GET_CODE (x) != COMPARE)
11097    return UNKNOWN;
11098  return reversed_comparison_code_parts (GET_CODE (exp),
11099					 XEXP (x, 0), XEXP (x, 1), NULL);
11100}
11101/* Return comparison with reversed code of EXP and operands OP0 and OP1.
11102   Return NULL_RTX in case we fail to do the reversal.  */
11103static rtx
11104reversed_comparison (exp, mode, op0, op1)
11105     rtx exp, op0, op1;
11106     enum machine_mode mode;
11107{
11108  enum rtx_code reversed_code = combine_reversed_comparison_code (exp);
11109  if (reversed_code == UNKNOWN)
11110    return NULL_RTX;
11111  else
11112    return gen_binary (reversed_code, mode, op0, op1);
11113}
11114
11115/* Utility function for following routine.  Called when X is part of a value
11116   being stored into reg_last_set_value.  Sets reg_last_set_table_tick
11117   for each register mentioned.  Similar to mention_regs in cse.c  */
11118
11119static void
11120update_table_tick (x)
11121     rtx x;
11122{
11123  enum rtx_code code = GET_CODE (x);
11124  const char *fmt = GET_RTX_FORMAT (code);
11125  int i;
11126
11127  if (code == REG)
11128    {
11129      unsigned int regno = REGNO (x);
11130      unsigned int endregno
11131	= regno + (regno < FIRST_PSEUDO_REGISTER
11132		   ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11133      unsigned int r;
11134
11135      for (r = regno; r < endregno; r++)
11136	reg_last_set_table_tick[r] = label_tick;
11137
11138      return;
11139    }
11140
11141  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11142    /* Note that we can't have an "E" in values stored; see
11143       get_last_value_validate.  */
11144    if (fmt[i] == 'e')
11145      update_table_tick (XEXP (x, i));
11146}
11147
11148/* Record that REG is set to VALUE in insn INSN.  If VALUE is zero, we
11149   are saying that the register is clobbered and we no longer know its
11150   value.  If INSN is zero, don't update reg_last_set; this is only permitted
11151   with VALUE also zero and is used to invalidate the register.  */
11152
11153static void
11154record_value_for_reg (reg, insn, value)
11155     rtx reg;
11156     rtx insn;
11157     rtx value;
11158{
11159  unsigned int regno = REGNO (reg);
11160  unsigned int endregno
11161    = regno + (regno < FIRST_PSEUDO_REGISTER
11162	       ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
11163  unsigned int i;
11164
11165  /* If VALUE contains REG and we have a previous value for REG, substitute
11166     the previous value.  */
11167  if (value && insn && reg_overlap_mentioned_p (reg, value))
11168    {
11169      rtx tem;
11170
11171      /* Set things up so get_last_value is allowed to see anything set up to
11172	 our insn.  */
11173      subst_low_cuid = INSN_CUID (insn);
11174      tem = get_last_value (reg);
11175
11176      /* If TEM is simply a binary operation with two CLOBBERs as operands,
11177	 it isn't going to be useful and will take a lot of time to process,
11178	 so just use the CLOBBER.  */
11179
11180      if (tem)
11181	{
11182	  if ((GET_RTX_CLASS (GET_CODE (tem)) == '2'
11183	       || GET_RTX_CLASS (GET_CODE (tem)) == 'c')
11184	      && GET_CODE (XEXP (tem, 0)) == CLOBBER
11185	      && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11186	    tem = XEXP (tem, 0);
11187
11188	  value = replace_rtx (copy_rtx (value), reg, tem);
11189	}
11190    }
11191
11192  /* For each register modified, show we don't know its value, that
11193     we don't know about its bitwise content, that its value has been
11194     updated, and that we don't know the location of the death of the
11195     register.  */
11196  for (i = regno; i < endregno; i++)
11197    {
11198      if (insn)
11199	reg_last_set[i] = insn;
11200
11201      reg_last_set_value[i] = 0;
11202      reg_last_set_mode[i] = 0;
11203      reg_last_set_nonzero_bits[i] = 0;
11204      reg_last_set_sign_bit_copies[i] = 0;
11205      reg_last_death[i] = 0;
11206    }
11207
11208  /* Mark registers that are being referenced in this value.  */
11209  if (value)
11210    update_table_tick (value);
11211
11212  /* Now update the status of each register being set.
11213     If someone is using this register in this block, set this register
11214     to invalid since we will get confused between the two lives in this
11215     basic block.  This makes using this register always invalid.  In cse, we
11216     scan the table to invalidate all entries using this register, but this
11217     is too much work for us.  */
11218
11219  for (i = regno; i < endregno; i++)
11220    {
11221      reg_last_set_label[i] = label_tick;
11222      if (value && reg_last_set_table_tick[i] == label_tick)
11223	reg_last_set_invalid[i] = 1;
11224      else
11225	reg_last_set_invalid[i] = 0;
11226    }
11227
11228  /* The value being assigned might refer to X (like in "x++;").  In that
11229     case, we must replace it with (clobber (const_int 0)) to prevent
11230     infinite loops.  */
11231  if (value && ! get_last_value_validate (&value, insn,
11232					  reg_last_set_label[regno], 0))
11233    {
11234      value = copy_rtx (value);
11235      if (! get_last_value_validate (&value, insn,
11236				     reg_last_set_label[regno], 1))
11237	value = 0;
11238    }
11239
11240  /* For the main register being modified, update the value, the mode, the
11241     nonzero bits, and the number of sign bit copies.  */
11242
11243  reg_last_set_value[regno] = value;
11244
11245  if (value)
11246    {
11247      enum machine_mode mode = GET_MODE (reg);
11248      subst_low_cuid = INSN_CUID (insn);
11249      reg_last_set_mode[regno] = mode;
11250      if (GET_MODE_CLASS (mode) == MODE_INT
11251	  && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11252	mode = nonzero_bits_mode;
11253      reg_last_set_nonzero_bits[regno] = nonzero_bits (value, mode);
11254      reg_last_set_sign_bit_copies[regno]
11255	= num_sign_bit_copies (value, GET_MODE (reg));
11256    }
11257}
11258
11259/* Called via note_stores from record_dead_and_set_regs to handle one
11260   SET or CLOBBER in an insn.  DATA is the instruction in which the
11261   set is occurring.  */
11262
11263static void
11264record_dead_and_set_regs_1 (dest, setter, data)
11265     rtx dest, setter;
11266     void *data;
11267{
11268  rtx record_dead_insn = (rtx) data;
11269
11270  if (GET_CODE (dest) == SUBREG)
11271    dest = SUBREG_REG (dest);
11272
11273  if (GET_CODE (dest) == REG)
11274    {
11275      /* If we are setting the whole register, we know its value.  Otherwise
11276	 show that we don't know the value.  We can handle SUBREG in
11277	 some cases.  */
11278      if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11279	record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11280      else if (GET_CODE (setter) == SET
11281	       && GET_CODE (SET_DEST (setter)) == SUBREG
11282	       && SUBREG_REG (SET_DEST (setter)) == dest
11283	       && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
11284	       && subreg_lowpart_p (SET_DEST (setter)))
11285	record_value_for_reg (dest, record_dead_insn,
11286			      gen_lowpart_for_combine (GET_MODE (dest),
11287						       SET_SRC (setter)));
11288      else
11289	record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11290    }
11291  else if (GET_CODE (dest) == MEM
11292	   /* Ignore pushes, they clobber nothing.  */
11293	   && ! push_operand (dest, GET_MODE (dest)))
11294    mem_last_set = INSN_CUID (record_dead_insn);
11295}
11296
11297/* Update the records of when each REG was most recently set or killed
11298   for the things done by INSN.  This is the last thing done in processing
11299   INSN in the combiner loop.
11300
11301   We update reg_last_set, reg_last_set_value, reg_last_set_mode,
11302   reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
11303   and also the similar information mem_last_set (which insn most recently
11304   modified memory) and last_call_cuid (which insn was the most recent
11305   subroutine call).  */
11306
11307static void
11308record_dead_and_set_regs (insn)
11309     rtx insn;
11310{
11311  rtx link;
11312  unsigned int i;
11313
11314  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11315    {
11316      if (REG_NOTE_KIND (link) == REG_DEAD
11317	  && GET_CODE (XEXP (link, 0)) == REG)
11318	{
11319	  unsigned int regno = REGNO (XEXP (link, 0));
11320	  unsigned int endregno
11321	    = regno + (regno < FIRST_PSEUDO_REGISTER
11322		       ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
11323		       : 1);
11324
11325	  for (i = regno; i < endregno; i++)
11326	    reg_last_death[i] = insn;
11327	}
11328      else if (REG_NOTE_KIND (link) == REG_INC)
11329	record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11330    }
11331
11332  if (GET_CODE (insn) == CALL_INSN)
11333    {
11334      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11335	if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
11336	  {
11337	    reg_last_set_value[i] = 0;
11338	    reg_last_set_mode[i] = 0;
11339	    reg_last_set_nonzero_bits[i] = 0;
11340	    reg_last_set_sign_bit_copies[i] = 0;
11341	    reg_last_death[i] = 0;
11342	  }
11343
11344      last_call_cuid = mem_last_set = INSN_CUID (insn);
11345
11346      /* Don't bother recording what this insn does.  It might set the
11347	 return value register, but we can't combine into a call
11348	 pattern anyway, so there's no point trying (and it may cause
11349	 a crash, if e.g. we wind up asking for last_set_value of a
11350	 SUBREG of the return value register).  */
11351      return;
11352    }
11353
11354  note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11355}
11356
11357/* If a SUBREG has the promoted bit set, it is in fact a property of the
11358   register present in the SUBREG, so for each such SUBREG go back and
11359   adjust nonzero and sign bit information of the registers that are
11360   known to have some zero/sign bits set.
11361
11362   This is needed because when combine blows the SUBREGs away, the
11363   information on zero/sign bits is lost and further combines can be
11364   missed because of that.  */
11365
11366static void
11367record_promoted_value (insn, subreg)
11368     rtx insn;
11369     rtx subreg;
11370{
11371  rtx links, set;
11372  unsigned int regno = REGNO (SUBREG_REG (subreg));
11373  enum machine_mode mode = GET_MODE (subreg);
11374
11375  if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11376    return;
11377
11378  for (links = LOG_LINKS (insn); links;)
11379    {
11380      insn = XEXP (links, 0);
11381      set = single_set (insn);
11382
11383      if (! set || GET_CODE (SET_DEST (set)) != REG
11384	  || REGNO (SET_DEST (set)) != regno
11385	  || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11386	{
11387	  links = XEXP (links, 1);
11388	  continue;
11389	}
11390
11391      if (reg_last_set[regno] == insn)
11392	{
11393	  if (SUBREG_PROMOTED_UNSIGNED_P (subreg))
11394	    reg_last_set_nonzero_bits[regno] &= GET_MODE_MASK (mode);
11395	}
11396
11397      if (GET_CODE (SET_SRC (set)) == REG)
11398	{
11399	  regno = REGNO (SET_SRC (set));
11400	  links = LOG_LINKS (insn);
11401	}
11402      else
11403	break;
11404    }
11405}
11406
11407/* Scan X for promoted SUBREGs.  For each one found,
11408   note what it implies to the registers used in it.  */
11409
11410static void
11411check_promoted_subreg (insn, x)
11412     rtx insn;
11413     rtx x;
11414{
11415  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11416      && GET_CODE (SUBREG_REG (x)) == REG)
11417    record_promoted_value (insn, x);
11418  else
11419    {
11420      const char *format = GET_RTX_FORMAT (GET_CODE (x));
11421      int i, j;
11422
11423      for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
11424	switch (format[i])
11425	  {
11426	  case 'e':
11427	    check_promoted_subreg (insn, XEXP (x, i));
11428	    break;
11429	  case 'V':
11430	  case 'E':
11431	    if (XVEC (x, i) != 0)
11432	      for (j = 0; j < XVECLEN (x, i); j++)
11433		check_promoted_subreg (insn, XVECEXP (x, i, j));
11434	    break;
11435	  }
11436    }
11437}
11438
11439/* Utility routine for the following function.  Verify that all the registers
11440   mentioned in *LOC are valid when *LOC was part of a value set when
11441   label_tick == TICK.  Return 0 if some are not.
11442
11443   If REPLACE is non-zero, replace the invalid reference with
11444   (clobber (const_int 0)) and return 1.  This replacement is useful because
11445   we often can get useful information about the form of a value (e.g., if
11446   it was produced by a shift that always produces -1 or 0) even though
11447   we don't know exactly what registers it was produced from.  */
11448
11449static int
11450get_last_value_validate (loc, insn, tick, replace)
11451     rtx *loc;
11452     rtx insn;
11453     int tick;
11454     int replace;
11455{
11456  rtx x = *loc;
11457  const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
11458  int len = GET_RTX_LENGTH (GET_CODE (x));
11459  int i;
11460
11461  if (GET_CODE (x) == REG)
11462    {
11463      unsigned int regno = REGNO (x);
11464      unsigned int endregno
11465	= regno + (regno < FIRST_PSEUDO_REGISTER
11466		   ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11467      unsigned int j;
11468
11469      for (j = regno; j < endregno; j++)
11470	if (reg_last_set_invalid[j]
11471	    /* If this is a pseudo-register that was only set once and not
11472	       live at the beginning of the function, it is always valid.  */
11473	    || (! (regno >= FIRST_PSEUDO_REGISTER
11474		   && REG_N_SETS (regno) == 1
11475		   && (! REGNO_REG_SET_P
11476		       (BASIC_BLOCK (0)->global_live_at_start, regno)))
11477		&& reg_last_set_label[j] > tick))
11478	  {
11479	    if (replace)
11480	      *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11481	    return replace;
11482	  }
11483
11484      return 1;
11485    }
11486  /* If this is a memory reference, make sure that there were
11487     no stores after it that might have clobbered the value.  We don't
11488     have alias info, so we assume any store invalidates it.  */
11489  else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
11490	   && INSN_CUID (insn) <= mem_last_set)
11491    {
11492      if (replace)
11493	*loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11494      return replace;
11495    }
11496
11497  for (i = 0; i < len; i++)
11498    if ((fmt[i] == 'e'
11499	 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
11500	/* Don't bother with these.  They shouldn't occur anyway.  */
11501	|| fmt[i] == 'E')
11502      return 0;
11503
11504  /* If we haven't found a reason for it to be invalid, it is valid.  */
11505  return 1;
11506}
11507
11508/* Get the last value assigned to X, if known.  Some registers
11509   in the value may be replaced with (clobber (const_int 0)) if their value
11510   is known longer known reliably.  */
11511
11512static rtx
11513get_last_value (x)
11514     rtx x;
11515{
11516  unsigned int regno;
11517  rtx value;
11518
11519  /* If this is a non-paradoxical SUBREG, get the value of its operand and
11520     then convert it to the desired mode.  If this is a paradoxical SUBREG,
11521     we cannot predict what values the "extra" bits might have.  */
11522  if (GET_CODE (x) == SUBREG
11523      && subreg_lowpart_p (x)
11524      && (GET_MODE_SIZE (GET_MODE (x))
11525	  <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11526      && (value = get_last_value (SUBREG_REG (x))) != 0)
11527    return gen_lowpart_for_combine (GET_MODE (x), value);
11528
11529  if (GET_CODE (x) != REG)
11530    return 0;
11531
11532  regno = REGNO (x);
11533  value = reg_last_set_value[regno];
11534
11535  /* If we don't have a value, or if it isn't for this basic block and
11536     it's either a hard register, set more than once, or it's a live
11537     at the beginning of the function, return 0.
11538
11539     Because if it's not live at the beginning of the function then the reg
11540     is always set before being used (is never used without being set).
11541     And, if it's set only once, and it's always set before use, then all
11542     uses must have the same last value, even if it's not from this basic
11543     block.  */
11544
11545  if (value == 0
11546      || (reg_last_set_label[regno] != label_tick
11547	  && (regno < FIRST_PSEUDO_REGISTER
11548	      || REG_N_SETS (regno) != 1
11549	      || (REGNO_REG_SET_P
11550		  (BASIC_BLOCK (0)->global_live_at_start, regno)))))
11551    return 0;
11552
11553  /* If the value was set in a later insn than the ones we are processing,
11554     we can't use it even if the register was only set once.  */
11555  if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
11556    return 0;
11557
11558  /* If the value has all its registers valid, return it.  */
11559  if (get_last_value_validate (&value, reg_last_set[regno],
11560			       reg_last_set_label[regno], 0))
11561    return value;
11562
11563  /* Otherwise, make a copy and replace any invalid register with
11564     (clobber (const_int 0)).  If that fails for some reason, return 0.  */
11565
11566  value = copy_rtx (value);
11567  if (get_last_value_validate (&value, reg_last_set[regno],
11568			       reg_last_set_label[regno], 1))
11569    return value;
11570
11571  return 0;
11572}
11573
11574/* Return nonzero if expression X refers to a REG or to memory
11575   that is set in an instruction more recent than FROM_CUID.  */
11576
11577static int
11578use_crosses_set_p (x, from_cuid)
11579     rtx x;
11580     int from_cuid;
11581{
11582  const char *fmt;
11583  int i;
11584  enum rtx_code code = GET_CODE (x);
11585
11586  if (code == REG)
11587    {
11588      unsigned int regno = REGNO (x);
11589      unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
11590				 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11591
11592#ifdef PUSH_ROUNDING
11593      /* Don't allow uses of the stack pointer to be moved,
11594	 because we don't know whether the move crosses a push insn.  */
11595      if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
11596	return 1;
11597#endif
11598      for (; regno < endreg; regno++)
11599	if (reg_last_set[regno]
11600	    && INSN_CUID (reg_last_set[regno]) > from_cuid)
11601	  return 1;
11602      return 0;
11603    }
11604
11605  if (code == MEM && mem_last_set > from_cuid)
11606    return 1;
11607
11608  fmt = GET_RTX_FORMAT (code);
11609
11610  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11611    {
11612      if (fmt[i] == 'E')
11613	{
11614	  int j;
11615	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11616	    if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11617	      return 1;
11618	}
11619      else if (fmt[i] == 'e'
11620	       && use_crosses_set_p (XEXP (x, i), from_cuid))
11621	return 1;
11622    }
11623  return 0;
11624}
11625
11626/* Define three variables used for communication between the following
11627   routines.  */
11628
11629static unsigned int reg_dead_regno, reg_dead_endregno;
11630static int reg_dead_flag;
11631
11632/* Function called via note_stores from reg_dead_at_p.
11633
11634   If DEST is within [reg_dead_regno, reg_dead_endregno), set
11635   reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET.  */
11636
11637static void
11638reg_dead_at_p_1 (dest, x, data)
11639     rtx dest;
11640     rtx x;
11641     void *data ATTRIBUTE_UNUSED;
11642{
11643  unsigned int regno, endregno;
11644
11645  if (GET_CODE (dest) != REG)
11646    return;
11647
11648  regno = REGNO (dest);
11649  endregno = regno + (regno < FIRST_PSEUDO_REGISTER
11650		      ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
11651
11652  if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11653    reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11654}
11655
11656/* Return non-zero if REG is known to be dead at INSN.
11657
11658   We scan backwards from INSN.  If we hit a REG_DEAD note or a CLOBBER
11659   referencing REG, it is dead.  If we hit a SET referencing REG, it is
11660   live.  Otherwise, see if it is live or dead at the start of the basic
11661   block we are in.  Hard regs marked as being live in NEWPAT_USED_REGS
11662   must be assumed to be always live.  */
11663
11664static int
11665reg_dead_at_p (reg, insn)
11666     rtx reg;
11667     rtx insn;
11668{
11669  int block;
11670  unsigned int i;
11671
11672  /* Set variables for reg_dead_at_p_1.  */
11673  reg_dead_regno = REGNO (reg);
11674  reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11675					? HARD_REGNO_NREGS (reg_dead_regno,
11676							    GET_MODE (reg))
11677					: 1);
11678
11679  reg_dead_flag = 0;
11680
11681  /* Check that reg isn't mentioned in NEWPAT_USED_REGS.  */
11682  if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11683    {
11684      for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11685	if (TEST_HARD_REG_BIT (newpat_used_regs, i))
11686	  return 0;
11687    }
11688
11689  /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11690     beginning of function.  */
11691  for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
11692       insn = prev_nonnote_insn (insn))
11693    {
11694      note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
11695      if (reg_dead_flag)
11696	return reg_dead_flag == 1 ? 1 : 0;
11697
11698      if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11699	return 1;
11700    }
11701
11702  /* Get the basic block number that we were in.  */
11703  if (insn == 0)
11704    block = 0;
11705  else
11706    {
11707      for (block = 0; block < n_basic_blocks; block++)
11708	if (insn == BLOCK_HEAD (block))
11709	  break;
11710
11711      if (block == n_basic_blocks)
11712	return 0;
11713    }
11714
11715  for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11716    if (REGNO_REG_SET_P (BASIC_BLOCK (block)->global_live_at_start, i))
11717      return 0;
11718
11719  return 1;
11720}
11721
11722/* Note hard registers in X that are used.  This code is similar to
11723   that in flow.c, but much simpler since we don't care about pseudos.  */
11724
11725static void
11726mark_used_regs_combine (x)
11727     rtx x;
11728{
11729  RTX_CODE code = GET_CODE (x);
11730  unsigned int regno;
11731  int i;
11732
11733  switch (code)
11734    {
11735    case LABEL_REF:
11736    case SYMBOL_REF:
11737    case CONST_INT:
11738    case CONST:
11739    case CONST_DOUBLE:
11740    case CONST_VECTOR:
11741    case PC:
11742    case ADDR_VEC:
11743    case ADDR_DIFF_VEC:
11744    case ASM_INPUT:
11745#ifdef HAVE_cc0
11746    /* CC0 must die in the insn after it is set, so we don't need to take
11747       special note of it here.  */
11748    case CC0:
11749#endif
11750      return;
11751
11752    case CLOBBER:
11753      /* If we are clobbering a MEM, mark any hard registers inside the
11754	 address as used.  */
11755      if (GET_CODE (XEXP (x, 0)) == MEM)
11756	mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11757      return;
11758
11759    case REG:
11760      regno = REGNO (x);
11761      /* A hard reg in a wide mode may really be multiple registers.
11762	 If so, mark all of them just like the first.  */
11763      if (regno < FIRST_PSEUDO_REGISTER)
11764	{
11765	  unsigned int endregno, r;
11766
11767	  /* None of this applies to the stack, frame or arg pointers */
11768	  if (regno == STACK_POINTER_REGNUM
11769#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11770	      || regno == HARD_FRAME_POINTER_REGNUM
11771#endif
11772#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11773	      || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11774#endif
11775	      || regno == FRAME_POINTER_REGNUM)
11776	    return;
11777
11778	  endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11779	  for (r = regno; r < endregno; r++)
11780	    SET_HARD_REG_BIT (newpat_used_regs, r);
11781	}
11782      return;
11783
11784    case SET:
11785      {
11786	/* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11787	   the address.  */
11788	rtx testreg = SET_DEST (x);
11789
11790	while (GET_CODE (testreg) == SUBREG
11791	       || GET_CODE (testreg) == ZERO_EXTRACT
11792	       || GET_CODE (testreg) == SIGN_EXTRACT
11793	       || GET_CODE (testreg) == STRICT_LOW_PART)
11794	  testreg = XEXP (testreg, 0);
11795
11796	if (GET_CODE (testreg) == MEM)
11797	  mark_used_regs_combine (XEXP (testreg, 0));
11798
11799	mark_used_regs_combine (SET_SRC (x));
11800      }
11801      return;
11802
11803    default:
11804      break;
11805    }
11806
11807  /* Recursively scan the operands of this expression.  */
11808
11809  {
11810    const char *fmt = GET_RTX_FORMAT (code);
11811
11812    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11813      {
11814	if (fmt[i] == 'e')
11815	  mark_used_regs_combine (XEXP (x, i));
11816	else if (fmt[i] == 'E')
11817	  {
11818	    int j;
11819
11820	    for (j = 0; j < XVECLEN (x, i); j++)
11821	      mark_used_regs_combine (XVECEXP (x, i, j));
11822	  }
11823      }
11824  }
11825}
11826
11827/* Remove register number REGNO from the dead registers list of INSN.
11828
11829   Return the note used to record the death, if there was one.  */
11830
11831rtx
11832remove_death (regno, insn)
11833     unsigned int regno;
11834     rtx insn;
11835{
11836  rtx note = find_regno_note (insn, REG_DEAD, regno);
11837
11838  if (note)
11839    {
11840      REG_N_DEATHS (regno)--;
11841      remove_note (insn, note);
11842    }
11843
11844  return note;
11845}
11846
11847/* For each register (hardware or pseudo) used within expression X, if its
11848   death is in an instruction with cuid between FROM_CUID (inclusive) and
11849   TO_INSN (exclusive), put a REG_DEAD note for that register in the
11850   list headed by PNOTES.
11851
11852   That said, don't move registers killed by maybe_kill_insn.
11853
11854   This is done when X is being merged by combination into TO_INSN.  These
11855   notes will then be distributed as needed.  */
11856
11857static void
11858move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
11859     rtx x;
11860     rtx maybe_kill_insn;
11861     int from_cuid;
11862     rtx to_insn;
11863     rtx *pnotes;
11864{
11865  const char *fmt;
11866  int len, i;
11867  enum rtx_code code = GET_CODE (x);
11868
11869  if (code == REG)
11870    {
11871      unsigned int regno = REGNO (x);
11872      rtx where_dead = reg_last_death[regno];
11873      rtx before_dead, after_dead;
11874
11875      /* Don't move the register if it gets killed in between from and to */
11876      if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11877	  && ! reg_referenced_p (x, maybe_kill_insn))
11878	return;
11879
11880      /* WHERE_DEAD could be a USE insn made by combine, so first we
11881	 make sure that we have insns with valid INSN_CUID values.  */
11882      before_dead = where_dead;
11883      while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11884	before_dead = PREV_INSN (before_dead);
11885
11886      after_dead = where_dead;
11887      while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11888	after_dead = NEXT_INSN (after_dead);
11889
11890      if (before_dead && after_dead
11891	  && INSN_CUID (before_dead) >= from_cuid
11892	  && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11893	      || (where_dead != after_dead
11894		  && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
11895	{
11896	  rtx note = remove_death (regno, where_dead);
11897
11898	  /* It is possible for the call above to return 0.  This can occur
11899	     when reg_last_death points to I2 or I1 that we combined with.
11900	     In that case make a new note.
11901
11902	     We must also check for the case where X is a hard register
11903	     and NOTE is a death note for a range of hard registers
11904	     including X.  In that case, we must put REG_DEAD notes for
11905	     the remaining registers in place of NOTE.  */
11906
11907	  if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11908	      && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11909		  > GET_MODE_SIZE (GET_MODE (x))))
11910	    {
11911	      unsigned int deadregno = REGNO (XEXP (note, 0));
11912	      unsigned int deadend
11913		= (deadregno + HARD_REGNO_NREGS (deadregno,
11914						 GET_MODE (XEXP (note, 0))));
11915	      unsigned int ourend
11916		= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11917	      unsigned int i;
11918
11919	      for (i = deadregno; i < deadend; i++)
11920		if (i < regno || i >= ourend)
11921		  REG_NOTES (where_dead)
11922		    = gen_rtx_EXPR_LIST (REG_DEAD,
11923					 gen_rtx_REG (reg_raw_mode[i], i),
11924					 REG_NOTES (where_dead));
11925	    }
11926
11927	  /* If we didn't find any note, or if we found a REG_DEAD note that
11928	     covers only part of the given reg, and we have a multi-reg hard
11929	     register, then to be safe we must check for REG_DEAD notes
11930	     for each register other than the first.  They could have
11931	     their own REG_DEAD notes lying around.  */
11932	  else if ((note == 0
11933		    || (note != 0
11934			&& (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11935			    < GET_MODE_SIZE (GET_MODE (x)))))
11936		   && regno < FIRST_PSEUDO_REGISTER
11937		   && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
11938	    {
11939	      unsigned int ourend
11940		= regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11941	      unsigned int i, offset;
11942	      rtx oldnotes = 0;
11943
11944	      if (note)
11945		offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
11946	      else
11947		offset = 1;
11948
11949	      for (i = regno + offset; i < ourend; i++)
11950		move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
11951			     maybe_kill_insn, from_cuid, to_insn, &oldnotes);
11952	    }
11953
11954	  if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
11955	    {
11956	      XEXP (note, 1) = *pnotes;
11957	      *pnotes = note;
11958	    }
11959	  else
11960	    *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
11961
11962	  REG_N_DEATHS (regno)++;
11963	}
11964
11965      return;
11966    }
11967
11968  else if (GET_CODE (x) == SET)
11969    {
11970      rtx dest = SET_DEST (x);
11971
11972      move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
11973
11974      /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11975	 that accesses one word of a multi-word item, some
11976	 piece of everything register in the expression is used by
11977	 this insn, so remove any old death.  */
11978      /* ??? So why do we test for equality of the sizes?  */
11979
11980      if (GET_CODE (dest) == ZERO_EXTRACT
11981	  || GET_CODE (dest) == STRICT_LOW_PART
11982	  || (GET_CODE (dest) == SUBREG
11983	      && (((GET_MODE_SIZE (GET_MODE (dest))
11984		    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11985		  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11986		       + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
11987	{
11988	  move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
11989	  return;
11990	}
11991
11992      /* If this is some other SUBREG, we know it replaces the entire
11993	 value, so use that as the destination.  */
11994      if (GET_CODE (dest) == SUBREG)
11995	dest = SUBREG_REG (dest);
11996
11997      /* If this is a MEM, adjust deaths of anything used in the address.
11998	 For a REG (the only other possibility), the entire value is
11999	 being replaced so the old value is not used in this insn.  */
12000
12001      if (GET_CODE (dest) == MEM)
12002	move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
12003		     to_insn, pnotes);
12004      return;
12005    }
12006
12007  else if (GET_CODE (x) == CLOBBER)
12008    return;
12009
12010  len = GET_RTX_LENGTH (code);
12011  fmt = GET_RTX_FORMAT (code);
12012
12013  for (i = 0; i < len; i++)
12014    {
12015      if (fmt[i] == 'E')
12016	{
12017	  int j;
12018	  for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12019	    move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
12020			 to_insn, pnotes);
12021	}
12022      else if (fmt[i] == 'e')
12023	move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
12024    }
12025}
12026
12027/* Return 1 if X is the target of a bit-field assignment in BODY, the
12028   pattern of an insn.  X must be a REG.  */
12029
12030static int
12031reg_bitfield_target_p (x, body)
12032     rtx x;
12033     rtx body;
12034{
12035  int i;
12036
12037  if (GET_CODE (body) == SET)
12038    {
12039      rtx dest = SET_DEST (body);
12040      rtx target;
12041      unsigned int regno, tregno, endregno, endtregno;
12042
12043      if (GET_CODE (dest) == ZERO_EXTRACT)
12044	target = XEXP (dest, 0);
12045      else if (GET_CODE (dest) == STRICT_LOW_PART)
12046	target = SUBREG_REG (XEXP (dest, 0));
12047      else
12048	return 0;
12049
12050      if (GET_CODE (target) == SUBREG)
12051	target = SUBREG_REG (target);
12052
12053      if (GET_CODE (target) != REG)
12054	return 0;
12055
12056      tregno = REGNO (target), regno = REGNO (x);
12057      if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
12058	return target == x;
12059
12060      endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
12061      endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
12062
12063      return endregno > tregno && regno < endtregno;
12064    }
12065
12066  else if (GET_CODE (body) == PARALLEL)
12067    for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
12068      if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
12069	return 1;
12070
12071  return 0;
12072}
12073
12074/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
12075   as appropriate.  I3 and I2 are the insns resulting from the combination
12076   insns including FROM (I2 may be zero).
12077
12078   ELIM_I2 and ELIM_I1 are either zero or registers that we know will
12079   not need REG_DEAD notes because they are being substituted for.  This
12080   saves searching in the most common cases.
12081
12082   Each note in the list is either ignored or placed on some insns, depending
12083   on the type of note.  */
12084
12085static void
12086distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
12087     rtx notes;
12088     rtx from_insn;
12089     rtx i3, i2;
12090     rtx elim_i2, elim_i1;
12091{
12092  rtx note, next_note;
12093  rtx tem;
12094
12095  for (note = notes; note; note = next_note)
12096    {
12097      rtx place = 0, place2 = 0;
12098
12099      /* If this NOTE references a pseudo register, ensure it references
12100	 the latest copy of that register.  */
12101      if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
12102	  && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
12103	XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
12104
12105      next_note = XEXP (note, 1);
12106      switch (REG_NOTE_KIND (note))
12107	{
12108	case REG_BR_PROB:
12109	case REG_BR_PRED:
12110	case REG_EXEC_COUNT:
12111	  /* Doesn't matter much where we put this, as long as it's somewhere.
12112	     It is preferable to keep these notes on branches, which is most
12113	     likely to be i3.  */
12114	  place = i3;
12115	  break;
12116
12117	case REG_VTABLE_REF:
12118	  /* ??? Should remain with *a particular* memory load.  Given the
12119	     nature of vtable data, the last insn seems relatively safe.  */
12120	  place = i3;
12121	  break;
12122
12123	case REG_NON_LOCAL_GOTO:
12124	  if (GET_CODE (i3) == JUMP_INSN)
12125	    place = i3;
12126	  else if (i2 && GET_CODE (i2) == JUMP_INSN)
12127	    place = i2;
12128	  else
12129	    abort ();
12130	  break;
12131
12132	case REG_EH_REGION:
12133	  /* These notes must remain with the call or trapping instruction.  */
12134	  if (GET_CODE (i3) == CALL_INSN)
12135	    place = i3;
12136	  else if (i2 && GET_CODE (i2) == CALL_INSN)
12137	    place = i2;
12138	  else if (flag_non_call_exceptions)
12139	    {
12140	      if (may_trap_p (i3))
12141		place = i3;
12142	      else if (i2 && may_trap_p (i2))
12143		place = i2;
12144	      /* ??? Otherwise assume we've combined things such that we
12145		 can now prove that the instructions can't trap.  Drop the
12146		 note in this case.  */
12147	    }
12148	  else
12149	    abort ();
12150	  break;
12151
12152	case REG_NORETURN:
12153	case REG_SETJMP:
12154	  /* These notes must remain with the call.  It should not be
12155	     possible for both I2 and I3 to be a call.  */
12156	  if (GET_CODE (i3) == CALL_INSN)
12157	    place = i3;
12158	  else if (i2 && GET_CODE (i2) == CALL_INSN)
12159	    place = i2;
12160	  else
12161	    abort ();
12162	  break;
12163
12164	case REG_UNUSED:
12165	  /* Any clobbers for i3 may still exist, and so we must process
12166	     REG_UNUSED notes from that insn.
12167
12168	     Any clobbers from i2 or i1 can only exist if they were added by
12169	     recog_for_combine.  In that case, recog_for_combine created the
12170	     necessary REG_UNUSED notes.  Trying to keep any original
12171	     REG_UNUSED notes from these insns can cause incorrect output
12172	     if it is for the same register as the original i3 dest.
12173	     In that case, we will notice that the register is set in i3,
12174	     and then add a REG_UNUSED note for the destination of i3, which
12175	     is wrong.  However, it is possible to have REG_UNUSED notes from
12176	     i2 or i1 for register which were both used and clobbered, so
12177	     we keep notes from i2 or i1 if they will turn into REG_DEAD
12178	     notes.  */
12179
12180	  /* If this register is set or clobbered in I3, put the note there
12181	     unless there is one already.  */
12182	  if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
12183	    {
12184	      if (from_insn != i3)
12185		break;
12186
12187	      if (! (GET_CODE (XEXP (note, 0)) == REG
12188		     ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12189		     : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12190		place = i3;
12191	    }
12192	  /* Otherwise, if this register is used by I3, then this register
12193	     now dies here, so we must put a REG_DEAD note here unless there
12194	     is one already.  */
12195	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12196		   && ! (GET_CODE (XEXP (note, 0)) == REG
12197			 ? find_regno_note (i3, REG_DEAD,
12198					    REGNO (XEXP (note, 0)))
12199			 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12200	    {
12201	      PUT_REG_NOTE_KIND (note, REG_DEAD);
12202	      place = i3;
12203	    }
12204	  break;
12205
12206	case REG_EQUAL:
12207	case REG_EQUIV:
12208	case REG_NOALIAS:
12209	  /* These notes say something about results of an insn.  We can
12210	     only support them if they used to be on I3 in which case they
12211	     remain on I3.  Otherwise they are ignored.
12212
12213	     If the note refers to an expression that is not a constant, we
12214	     must also ignore the note since we cannot tell whether the
12215	     equivalence is still true.  It might be possible to do
12216	     slightly better than this (we only have a problem if I2DEST
12217	     or I1DEST is present in the expression), but it doesn't
12218	     seem worth the trouble.  */
12219
12220	  if (from_insn == i3
12221	      && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12222	    place = i3;
12223	  break;
12224
12225	case REG_INC:
12226	case REG_NO_CONFLICT:
12227	  /* These notes say something about how a register is used.  They must
12228	     be present on any use of the register in I2 or I3.  */
12229	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12230	    place = i3;
12231
12232	  if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12233	    {
12234	      if (place)
12235		place2 = i2;
12236	      else
12237		place = i2;
12238	    }
12239	  break;
12240
12241	case REG_LABEL:
12242	  /* This can show up in several ways -- either directly in the
12243	     pattern, or hidden off in the constant pool with (or without?)
12244	     a REG_EQUAL note.  */
12245	  /* ??? Ignore the without-reg_equal-note problem for now.  */
12246	  if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12247	      || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12248		  && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12249		  && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12250	    place = i3;
12251
12252	  if (i2
12253	      && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12254		  || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12255		      && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12256		      && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12257	    {
12258	      if (place)
12259		place2 = i2;
12260	      else
12261		place = i2;
12262	    }
12263
12264	  /* Don't attach REG_LABEL note to a JUMP_INSN which has
12265	     JUMP_LABEL already.  Instead, decrement LABEL_NUSES.  */
12266	  if (place && GET_CODE (place) == JUMP_INSN && JUMP_LABEL (place))
12267	    {
12268	      if (JUMP_LABEL (place) != XEXP (note, 0))
12269		abort ();
12270	      if (GET_CODE (JUMP_LABEL (place)) == CODE_LABEL)
12271		LABEL_NUSES (JUMP_LABEL (place))--;
12272	      place = 0;
12273	    }
12274	  if (place2 && GET_CODE (place2) == JUMP_INSN && JUMP_LABEL (place2))
12275	    {
12276	      if (JUMP_LABEL (place2) != XEXP (note, 0))
12277		abort ();
12278	      if (GET_CODE (JUMP_LABEL (place2)) == CODE_LABEL)
12279		LABEL_NUSES (JUMP_LABEL (place2))--;
12280	      place2 = 0;
12281	    }
12282	  break;
12283
12284	case REG_NONNEG:
12285	case REG_WAS_0:
12286	  /* These notes say something about the value of a register prior
12287	     to the execution of an insn.  It is too much trouble to see
12288	     if the note is still correct in all situations.  It is better
12289	     to simply delete it.  */
12290	  break;
12291
12292	case REG_RETVAL:
12293	  /* If the insn previously containing this note still exists,
12294	     put it back where it was.  Otherwise move it to the previous
12295	     insn.  Adjust the corresponding REG_LIBCALL note.  */
12296	  if (GET_CODE (from_insn) != NOTE)
12297	    place = from_insn;
12298	  else
12299	    {
12300	      tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
12301	      place = prev_real_insn (from_insn);
12302	      if (tem && place)
12303		XEXP (tem, 0) = place;
12304	      /* If we're deleting the last remaining instruction of a
12305		 libcall sequence, don't add the notes.  */
12306	      else if (XEXP (note, 0) == from_insn)
12307		tem = place = 0;
12308	    }
12309	  break;
12310
12311	case REG_LIBCALL:
12312	  /* This is handled similarly to REG_RETVAL.  */
12313	  if (GET_CODE (from_insn) != NOTE)
12314	    place = from_insn;
12315	  else
12316	    {
12317	      tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
12318	      place = next_real_insn (from_insn);
12319	      if (tem && place)
12320		XEXP (tem, 0) = place;
12321	      /* If we're deleting the last remaining instruction of a
12322		 libcall sequence, don't add the notes.  */
12323	      else if (XEXP (note, 0) == from_insn)
12324		tem = place = 0;
12325	    }
12326	  break;
12327
12328	case REG_DEAD:
12329	  /* If the register is used as an input in I3, it dies there.
12330	     Similarly for I2, if it is non-zero and adjacent to I3.
12331
12332	     If the register is not used as an input in either I3 or I2
12333	     and it is not one of the registers we were supposed to eliminate,
12334	     there are two possibilities.  We might have a non-adjacent I2
12335	     or we might have somehow eliminated an additional register
12336	     from a computation.  For example, we might have had A & B where
12337	     we discover that B will always be zero.  In this case we will
12338	     eliminate the reference to A.
12339
12340	     In both cases, we must search to see if we can find a previous
12341	     use of A and put the death note there.  */
12342
12343	  if (from_insn
12344	      && GET_CODE (from_insn) == CALL_INSN
12345	      && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12346	    place = from_insn;
12347	  else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12348	    place = i3;
12349	  else if (i2 != 0 && next_nonnote_insn (i2) == i3
12350		   && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12351	    place = i2;
12352
12353	  if (rtx_equal_p (XEXP (note, 0), elim_i2)
12354	      || rtx_equal_p (XEXP (note, 0), elim_i1))
12355	    break;
12356
12357	  if (place == 0)
12358	    {
12359	      basic_block bb = BASIC_BLOCK (this_basic_block);
12360
12361	      for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
12362		{
12363		  if (! INSN_P (tem))
12364		    {
12365		      if (tem == bb->head)
12366			break;
12367		      continue;
12368		    }
12369
12370		  /* If the register is being set at TEM, see if that is all
12371		     TEM is doing.  If so, delete TEM.  Otherwise, make this
12372		     into a REG_UNUSED note instead.  */
12373		  if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
12374		    {
12375		      rtx set = single_set (tem);
12376		      rtx inner_dest = 0;
12377#ifdef HAVE_cc0
12378		      rtx cc0_setter = NULL_RTX;
12379#endif
12380
12381		      if (set != 0)
12382			for (inner_dest = SET_DEST (set);
12383			     (GET_CODE (inner_dest) == STRICT_LOW_PART
12384			      || GET_CODE (inner_dest) == SUBREG
12385			      || GET_CODE (inner_dest) == ZERO_EXTRACT);
12386			     inner_dest = XEXP (inner_dest, 0))
12387			  ;
12388
12389		      /* Verify that it was the set, and not a clobber that
12390			 modified the register.
12391
12392			 CC0 targets must be careful to maintain setter/user
12393			 pairs.  If we cannot delete the setter due to side
12394			 effects, mark the user with an UNUSED note instead
12395			 of deleting it.  */
12396
12397		      if (set != 0 && ! side_effects_p (SET_SRC (set))
12398			  && rtx_equal_p (XEXP (note, 0), inner_dest)
12399#ifdef HAVE_cc0
12400			  && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12401			      || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12402				  && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12403#endif
12404			  )
12405			{
12406			  /* Move the notes and links of TEM elsewhere.
12407			     This might delete other dead insns recursively.
12408			     First set the pattern to something that won't use
12409			     any register.  */
12410
12411			  PATTERN (tem) = pc_rtx;
12412
12413			  distribute_notes (REG_NOTES (tem), tem, tem,
12414					    NULL_RTX, NULL_RTX, NULL_RTX);
12415			  distribute_links (LOG_LINKS (tem));
12416
12417			  PUT_CODE (tem, NOTE);
12418			  NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
12419			  NOTE_SOURCE_FILE (tem) = 0;
12420
12421#ifdef HAVE_cc0
12422			  /* Delete the setter too.  */
12423			  if (cc0_setter)
12424			    {
12425			      PATTERN (cc0_setter) = pc_rtx;
12426
12427			      distribute_notes (REG_NOTES (cc0_setter),
12428						cc0_setter, cc0_setter,
12429						NULL_RTX, NULL_RTX, NULL_RTX);
12430			      distribute_links (LOG_LINKS (cc0_setter));
12431
12432			      PUT_CODE (cc0_setter, NOTE);
12433			      NOTE_LINE_NUMBER (cc0_setter)
12434				= NOTE_INSN_DELETED;
12435			      NOTE_SOURCE_FILE (cc0_setter) = 0;
12436			    }
12437#endif
12438			}
12439		      /* If the register is both set and used here, put the
12440			 REG_DEAD note here, but place a REG_UNUSED note
12441			 here too unless there already is one.  */
12442		      else if (reg_referenced_p (XEXP (note, 0),
12443						 PATTERN (tem)))
12444			{
12445			  place = tem;
12446
12447			  if (! find_regno_note (tem, REG_UNUSED,
12448						 REGNO (XEXP (note, 0))))
12449			    REG_NOTES (tem)
12450			      = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0),
12451						   REG_NOTES (tem));
12452			}
12453		      else
12454			{
12455			  PUT_REG_NOTE_KIND (note, REG_UNUSED);
12456
12457			  /*  If there isn't already a REG_UNUSED note, put one
12458			      here.  */
12459			  if (! find_regno_note (tem, REG_UNUSED,
12460						 REGNO (XEXP (note, 0))))
12461			    place = tem;
12462			  break;
12463			}
12464		    }
12465		  else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12466			   || (GET_CODE (tem) == CALL_INSN
12467			       && find_reg_fusage (tem, USE, XEXP (note, 0))))
12468		    {
12469		      place = tem;
12470
12471		      /* If we are doing a 3->2 combination, and we have a
12472			 register which formerly died in i3 and was not used
12473			 by i2, which now no longer dies in i3 and is used in
12474			 i2 but does not die in i2, and place is between i2
12475			 and i3, then we may need to move a link from place to
12476			 i2.  */
12477		      if (i2 && INSN_UID (place) <= max_uid_cuid
12478			  && INSN_CUID (place) > INSN_CUID (i2)
12479			  && from_insn
12480			  && INSN_CUID (from_insn) > INSN_CUID (i2)
12481			  && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12482			{
12483			  rtx links = LOG_LINKS (place);
12484			  LOG_LINKS (place) = 0;
12485			  distribute_links (links);
12486			}
12487		      break;
12488		    }
12489
12490		  if (tem == bb->head)
12491		    break;
12492		}
12493
12494	      /* We haven't found an insn for the death note and it
12495		 is still a REG_DEAD note, but we have hit the beginning
12496		 of the block.  If the existing life info says the reg
12497		 was dead, there's nothing left to do.  Otherwise, we'll
12498		 need to do a global life update after combine.  */
12499	      if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12500		  && REGNO_REG_SET_P (bb->global_live_at_start,
12501				      REGNO (XEXP (note, 0))))
12502		{
12503		  SET_BIT (refresh_blocks, this_basic_block);
12504		  need_refresh = 1;
12505		}
12506	    }
12507
12508	  /* If the register is set or already dead at PLACE, we needn't do
12509	     anything with this note if it is still a REG_DEAD note.
12510	     We can here if it is set at all, not if is it totally replace,
12511	     which is what `dead_or_set_p' checks, so also check for it being
12512	     set partially.  */
12513
12514	  if (place && REG_NOTE_KIND (note) == REG_DEAD)
12515	    {
12516	      unsigned int regno = REGNO (XEXP (note, 0));
12517
12518	      /* Similarly, if the instruction on which we want to place
12519		 the note is a noop, we'll need do a global live update
12520		 after we remove them in delete_noop_moves.  */
12521	      if (noop_move_p (place))
12522		{
12523		  SET_BIT (refresh_blocks, this_basic_block);
12524		  need_refresh = 1;
12525		}
12526
12527	      if (dead_or_set_p (place, XEXP (note, 0))
12528		  || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12529		{
12530		  /* Unless the register previously died in PLACE, clear
12531		     reg_last_death.  [I no longer understand why this is
12532		     being done.] */
12533		  if (reg_last_death[regno] != place)
12534		    reg_last_death[regno] = 0;
12535		  place = 0;
12536		}
12537	      else
12538		reg_last_death[regno] = place;
12539
12540	      /* If this is a death note for a hard reg that is occupying
12541		 multiple registers, ensure that we are still using all
12542		 parts of the object.  If we find a piece of the object
12543		 that is unused, we must arrange for an appropriate REG_DEAD
12544		 note to be added for it.  However, we can't just emit a USE
12545		 and tag the note to it, since the register might actually
12546		 be dead; so we recourse, and the recursive call then finds
12547		 the previous insn that used this register.  */
12548
12549	      if (place && regno < FIRST_PSEUDO_REGISTER
12550		  && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
12551		{
12552		  unsigned int endregno
12553		    = regno + HARD_REGNO_NREGS (regno,
12554						GET_MODE (XEXP (note, 0)));
12555		  int all_used = 1;
12556		  unsigned int i;
12557
12558		  for (i = regno; i < endregno; i++)
12559		    if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12560			 && ! find_regno_fusage (place, USE, i))
12561			|| dead_or_set_regno_p (place, i))
12562		      all_used = 0;
12563
12564		  if (! all_used)
12565		    {
12566		      /* Put only REG_DEAD notes for pieces that are
12567			 not already dead or set.  */
12568
12569		      for (i = regno; i < endregno;
12570			   i += HARD_REGNO_NREGS (i, reg_raw_mode[i]))
12571			{
12572			  rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
12573			  basic_block bb = BASIC_BLOCK (this_basic_block);
12574
12575			  if (! dead_or_set_p (place, piece)
12576			      && ! reg_bitfield_target_p (piece,
12577							  PATTERN (place)))
12578			    {
12579			      rtx new_note
12580				= gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
12581
12582			      distribute_notes (new_note, place, place,
12583						NULL_RTX, NULL_RTX, NULL_RTX);
12584			    }
12585			  else if (! refers_to_regno_p (i, i + 1,
12586							PATTERN (place), 0)
12587				   && ! find_regno_fusage (place, USE, i))
12588			    for (tem = PREV_INSN (place); ;
12589				 tem = PREV_INSN (tem))
12590			      {
12591				if (! INSN_P (tem))
12592				  {
12593				    if (tem == bb->head)
12594				      {
12595					SET_BIT (refresh_blocks,
12596						 this_basic_block);
12597					need_refresh = 1;
12598					break;
12599				      }
12600				    continue;
12601				  }
12602				if (dead_or_set_p (tem, piece)
12603				    || reg_bitfield_target_p (piece,
12604							      PATTERN (tem)))
12605				  {
12606				    REG_NOTES (tem)
12607				      = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
12608							   REG_NOTES (tem));
12609				    break;
12610				  }
12611			      }
12612
12613			}
12614
12615		      place = 0;
12616		    }
12617		}
12618	    }
12619	  break;
12620
12621	default:
12622	  /* Any other notes should not be present at this point in the
12623	     compilation.  */
12624	  abort ();
12625	}
12626
12627      if (place)
12628	{
12629	  XEXP (note, 1) = REG_NOTES (place);
12630	  REG_NOTES (place) = note;
12631	}
12632      else if ((REG_NOTE_KIND (note) == REG_DEAD
12633		|| REG_NOTE_KIND (note) == REG_UNUSED)
12634	       && GET_CODE (XEXP (note, 0)) == REG)
12635	REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
12636
12637      if (place2)
12638	{
12639	  if ((REG_NOTE_KIND (note) == REG_DEAD
12640	       || REG_NOTE_KIND (note) == REG_UNUSED)
12641	      && GET_CODE (XEXP (note, 0)) == REG)
12642	    REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
12643
12644	  REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12645					       REG_NOTE_KIND (note),
12646					       XEXP (note, 0),
12647					       REG_NOTES (place2));
12648	}
12649    }
12650}
12651
12652/* Similarly to above, distribute the LOG_LINKS that used to be present on
12653   I3, I2, and I1 to new locations.  This is also called in one case to
12654   add a link pointing at I3 when I3's destination is changed.  */
12655
12656static void
12657distribute_links (links)
12658     rtx links;
12659{
12660  rtx link, next_link;
12661
12662  for (link = links; link; link = next_link)
12663    {
12664      rtx place = 0;
12665      rtx insn;
12666      rtx set, reg;
12667
12668      next_link = XEXP (link, 1);
12669
12670      /* If the insn that this link points to is a NOTE or isn't a single
12671	 set, ignore it.  In the latter case, it isn't clear what we
12672	 can do other than ignore the link, since we can't tell which
12673	 register it was for.  Such links wouldn't be used by combine
12674	 anyway.
12675
12676	 It is not possible for the destination of the target of the link to
12677	 have been changed by combine.  The only potential of this is if we
12678	 replace I3, I2, and I1 by I3 and I2.  But in that case the
12679	 destination of I2 also remains unchanged.  */
12680
12681      if (GET_CODE (XEXP (link, 0)) == NOTE
12682	  || (set = single_set (XEXP (link, 0))) == 0)
12683	continue;
12684
12685      reg = SET_DEST (set);
12686      while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12687	     || GET_CODE (reg) == SIGN_EXTRACT
12688	     || GET_CODE (reg) == STRICT_LOW_PART)
12689	reg = XEXP (reg, 0);
12690
12691      /* A LOG_LINK is defined as being placed on the first insn that uses
12692	 a register and points to the insn that sets the register.  Start
12693	 searching at the next insn after the target of the link and stop
12694	 when we reach a set of the register or the end of the basic block.
12695
12696	 Note that this correctly handles the link that used to point from
12697	 I3 to I2.  Also note that not much searching is typically done here
12698	 since most links don't point very far away.  */
12699
12700      for (insn = NEXT_INSN (XEXP (link, 0));
12701	   (insn && (this_basic_block == n_basic_blocks - 1
12702		     || BLOCK_HEAD (this_basic_block + 1) != insn));
12703	   insn = NEXT_INSN (insn))
12704	if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
12705	  {
12706	    if (reg_referenced_p (reg, PATTERN (insn)))
12707	      place = insn;
12708	    break;
12709	  }
12710	else if (GET_CODE (insn) == CALL_INSN
12711		 && find_reg_fusage (insn, USE, reg))
12712	  {
12713	    place = insn;
12714	    break;
12715	  }
12716
12717      /* If we found a place to put the link, place it there unless there
12718	 is already a link to the same insn as LINK at that point.  */
12719
12720      if (place)
12721	{
12722	  rtx link2;
12723
12724	  for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
12725	    if (XEXP (link2, 0) == XEXP (link, 0))
12726	      break;
12727
12728	  if (link2 == 0)
12729	    {
12730	      XEXP (link, 1) = LOG_LINKS (place);
12731	      LOG_LINKS (place) = link;
12732
12733	      /* Set added_links_insn to the earliest insn we added a
12734		 link to.  */
12735	      if (added_links_insn == 0
12736		  || INSN_CUID (added_links_insn) > INSN_CUID (place))
12737		added_links_insn = place;
12738	    }
12739	}
12740    }
12741}
12742
12743/* Compute INSN_CUID for INSN, which is an insn made by combine.  */
12744
12745static int
12746insn_cuid (insn)
12747     rtx insn;
12748{
12749  while (insn != 0 && INSN_UID (insn) > max_uid_cuid
12750	 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
12751    insn = NEXT_INSN (insn);
12752
12753  if (INSN_UID (insn) > max_uid_cuid)
12754    abort ();
12755
12756  return INSN_CUID (insn);
12757}
12758
12759void
12760dump_combine_stats (file)
12761     FILE *file;
12762{
12763  fnotice
12764    (file,
12765     ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12766     combine_attempts, combine_merges, combine_extras, combine_successes);
12767}
12768
12769void
12770dump_combine_total_stats (file)
12771     FILE *file;
12772{
12773  fnotice
12774    (file,
12775     "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12776     total_attempts, total_merges, total_extras, total_successes);
12777}
12778