Deleted Added
full compact
combine.c (18334) combine.c (50397)
1/* Optimize by combining instructions for GNU compiler.
1/* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 88, 92, 93, 94, 1995 Free Software Foundation, Inc.
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
20
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
54
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
62 linking
63
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
67
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
76
77#include "config.h"
78#ifdef __STDC__
79#include <stdarg.h>
80#else
81#include <varargs.h>
82#endif
83
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
20
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
54
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
62 linking
63
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
67
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
76
77#include "config.h"
78#ifdef __STDC__
79#include <stdarg.h>
80#else
81#include <varargs.h>
82#endif
83
84/* Must precede rtl.h for FFS. */
85#include <stdio.h>
84/* stdio.h must precede rtl.h for FFS. */
85#include "system.h"
86
87#include "rtl.h"
88#include "flags.h"
89#include "regs.h"
90#include "hard-reg-set.h"
86
87#include "rtl.h"
88#include "flags.h"
89#include "regs.h"
90#include "hard-reg-set.h"
91#include "expr.h"
92#include "basic-block.h"
93#include "insn-config.h"
91#include "basic-block.h"
92#include "insn-config.h"
93/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
94#include "expr.h"
94#include "insn-flags.h"
95#include "insn-codes.h"
96#include "insn-attr.h"
97#include "recog.h"
98#include "real.h"
95#include "insn-flags.h"
96#include "insn-codes.h"
97#include "insn-attr.h"
98#include "recog.h"
99#include "real.h"
100#include "toplev.h"
99
100/* It is not safe to use ordinary gen_lowpart in combine.
101 Use gen_lowpart_for_combine instead. See comments there. */
102#define gen_lowpart dont_use_gen_lowpart_you_dummy
103
104/* Number of attempts to combine instructions in this function. */
105
106static int combine_attempts;
107
108/* Number of attempts that got as far as substitution in this function. */
109
110static int combine_merges;
111
112/* Number of instructions combined with added SETs in this function. */
113
114static int combine_extras;
115
116/* Number of instructions combined in this function. */
117
118static int combine_successes;
119
120/* Totals over entire compilation. */
121
122static int total_attempts, total_merges, total_extras, total_successes;
123
124/* Define a default value for REVERSIBLE_CC_MODE.
125 We can never assume that a condition code mode is safe to reverse unless
126 the md tells us so. */
127#ifndef REVERSIBLE_CC_MODE
128#define REVERSIBLE_CC_MODE(MODE) 0
129#endif
130
131/* Vector mapping INSN_UIDs to cuids.
132 The cuids are like uids but increase monotonically always.
133 Combine always uses cuids so that it can compare them.
134 But actually renumbering the uids, which we used to do,
135 proves to be a bad idea because it makes it hard to compare
136 the dumps produced by earlier passes with those from later passes. */
137
138static int *uid_cuid;
139static int max_uid_cuid;
140
141/* Get the cuid of an insn. */
142
101
102/* It is not safe to use ordinary gen_lowpart in combine.
103 Use gen_lowpart_for_combine instead. See comments there. */
104#define gen_lowpart dont_use_gen_lowpart_you_dummy
105
106/* Number of attempts to combine instructions in this function. */
107
108static int combine_attempts;
109
110/* Number of attempts that got as far as substitution in this function. */
111
112static int combine_merges;
113
114/* Number of instructions combined with added SETs in this function. */
115
116static int combine_extras;
117
118/* Number of instructions combined in this function. */
119
120static int combine_successes;
121
122/* Totals over entire compilation. */
123
124static int total_attempts, total_merges, total_extras, total_successes;
125
126/* Define a default value for REVERSIBLE_CC_MODE.
127 We can never assume that a condition code mode is safe to reverse unless
128 the md tells us so. */
129#ifndef REVERSIBLE_CC_MODE
130#define REVERSIBLE_CC_MODE(MODE) 0
131#endif
132
133/* Vector mapping INSN_UIDs to cuids.
134 The cuids are like uids but increase monotonically always.
135 Combine always uses cuids so that it can compare them.
136 But actually renumbering the uids, which we used to do,
137 proves to be a bad idea because it makes it hard to compare
138 the dumps produced by earlier passes with those from later passes. */
139
140static int *uid_cuid;
141static int max_uid_cuid;
142
143/* Get the cuid of an insn. */
144
143#define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid_cuid \
144 ? (abort(), 0) \
145 : uid_cuid[INSN_UID (INSN)])
145#define INSN_CUID(INSN) \
146(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
146
147/* Maximum register number, which is the size of the tables below. */
148
149static int combine_max_regno;
150
151/* Record last point of death of (hard or pseudo) register n. */
152
153static rtx *reg_last_death;
154
155/* Record last point of modification of (hard or pseudo) register n. */
156
157static rtx *reg_last_set;
158
159/* Record the cuid of the last insn that invalidated memory
160 (anything that writes memory, and subroutine calls, but not pushes). */
161
162static int mem_last_set;
163
164/* Record the cuid of the last CALL_INSN
165 so we can tell whether a potential combination crosses any calls. */
166
167static int last_call_cuid;
168
169/* When `subst' is called, this is the insn that is being modified
170 (by combining in a previous insn). The PATTERN of this insn
171 is still the old pattern partially modified and it should not be
172 looked at, but this may be used to examine the successors of the insn
173 to judge whether a simplification is valid. */
174
175static rtx subst_insn;
176
177/* This is an insn that belongs before subst_insn, but is not currently
178 on the insn chain. */
179
180static rtx subst_prev_insn;
181
182/* This is the lowest CUID that `subst' is currently dealing with.
183 get_last_value will not return a value if the register was set at or
184 after this CUID. If not for this mechanism, we could get confused if
185 I2 or I1 in try_combine were an insn that used the old value of a register
186 to obtain a new value. In that case, we might erroneously get the
187 new value of the register when we wanted the old one. */
188
189static int subst_low_cuid;
190
191/* This contains any hard registers that are used in newpat; reg_dead_at_p
192 must consider all these registers to be always live. */
193
194static HARD_REG_SET newpat_used_regs;
195
196/* This is an insn to which a LOG_LINKS entry has been added. If this
197 insn is the earlier than I2 or I3, combine should rescan starting at
198 that location. */
199
200static rtx added_links_insn;
201
147
148/* Maximum register number, which is the size of the tables below. */
149
150static int combine_max_regno;
151
152/* Record last point of death of (hard or pseudo) register n. */
153
154static rtx *reg_last_death;
155
156/* Record last point of modification of (hard or pseudo) register n. */
157
158static rtx *reg_last_set;
159
160/* Record the cuid of the last insn that invalidated memory
161 (anything that writes memory, and subroutine calls, but not pushes). */
162
163static int mem_last_set;
164
165/* Record the cuid of the last CALL_INSN
166 so we can tell whether a potential combination crosses any calls. */
167
168static int last_call_cuid;
169
170/* When `subst' is called, this is the insn that is being modified
171 (by combining in a previous insn). The PATTERN of this insn
172 is still the old pattern partially modified and it should not be
173 looked at, but this may be used to examine the successors of the insn
174 to judge whether a simplification is valid. */
175
176static rtx subst_insn;
177
178/* This is an insn that belongs before subst_insn, but is not currently
179 on the insn chain. */
180
181static rtx subst_prev_insn;
182
183/* This is the lowest CUID that `subst' is currently dealing with.
184 get_last_value will not return a value if the register was set at or
185 after this CUID. If not for this mechanism, we could get confused if
186 I2 or I1 in try_combine were an insn that used the old value of a register
187 to obtain a new value. In that case, we might erroneously get the
188 new value of the register when we wanted the old one. */
189
190static int subst_low_cuid;
191
192/* This contains any hard registers that are used in newpat; reg_dead_at_p
193 must consider all these registers to be always live. */
194
195static HARD_REG_SET newpat_used_regs;
196
197/* This is an insn to which a LOG_LINKS entry has been added. If this
198 insn is the earlier than I2 or I3, combine should rescan starting at
199 that location. */
200
201static rtx added_links_insn;
202
202/* This is the value of undobuf.num_undo when we started processing this
203 substitution. This will prevent gen_rtx_combine from re-used a piece
204 from the previous expression. Doing so can produce circular rtl
205 structures. */
206
207static int previous_num_undos;
208
209/* Basic block number of the block in which we are performing combines. */
210static int this_basic_block;
211
212/* The next group of arrays allows the recording of the last value assigned
213 to (hard or pseudo) register n. We use this information to see if a
214 operation being processed is redundant given a prior operation performed
215 on the register. For example, an `and' with a constant is redundant if
216 all the zero bits are already known to be turned off.
217
218 We use an approach similar to that used by cse, but change it in the
219 following ways:
220
221 (1) We do not want to reinitialize at each label.
222 (2) It is useful, but not critical, to know the actual value assigned
223 to a register. Often just its form is helpful.
224
225 Therefore, we maintain the following arrays:
226
227 reg_last_set_value the last value assigned
228 reg_last_set_label records the value of label_tick when the
229 register was assigned
230 reg_last_set_table_tick records the value of label_tick when a
231 value using the register is assigned
232 reg_last_set_invalid set to non-zero when it is not valid
233 to use the value of this register in some
234 register's value
235
236 To understand the usage of these tables, it is important to understand
237 the distinction between the value in reg_last_set_value being valid
238 and the register being validly contained in some other expression in the
239 table.
240
241 Entry I in reg_last_set_value is valid if it is non-zero, and either
242 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
243
244 Register I may validly appear in any expression returned for the value
245 of another register if reg_n_sets[i] is 1. It may also appear in the
246 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
247 reg_last_set_invalid[j] is zero.
248
249 If an expression is found in the table containing a register which may
250 not validly appear in an expression, the register is replaced by
251 something that won't match, (clobber (const_int 0)).
252
253 reg_last_set_invalid[i] is set non-zero when register I is being assigned
254 to and reg_last_set_table_tick[i] == label_tick. */
255
203/* Basic block number of the block in which we are performing combines. */
204static int this_basic_block;
205
206/* The next group of arrays allows the recording of the last value assigned
207 to (hard or pseudo) register n. We use this information to see if a
208 operation being processed is redundant given a prior operation performed
209 on the register. For example, an `and' with a constant is redundant if
210 all the zero bits are already known to be turned off.
211
212 We use an approach similar to that used by cse, but change it in the
213 following ways:
214
215 (1) We do not want to reinitialize at each label.
216 (2) It is useful, but not critical, to know the actual value assigned
217 to a register. Often just its form is helpful.
218
219 Therefore, we maintain the following arrays:
220
221 reg_last_set_value the last value assigned
222 reg_last_set_label records the value of label_tick when the
223 register was assigned
224 reg_last_set_table_tick records the value of label_tick when a
225 value using the register is assigned
226 reg_last_set_invalid set to non-zero when it is not valid
227 to use the value of this register in some
228 register's value
229
230 To understand the usage of these tables, it is important to understand
231 the distinction between the value in reg_last_set_value being valid
232 and the register being validly contained in some other expression in the
233 table.
234
235 Entry I in reg_last_set_value is valid if it is non-zero, and either
236 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
237
238 Register I may validly appear in any expression returned for the value
239 of another register if reg_n_sets[i] is 1. It may also appear in the
240 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
241 reg_last_set_invalid[j] is zero.
242
243 If an expression is found in the table containing a register which may
244 not validly appear in an expression, the register is replaced by
245 something that won't match, (clobber (const_int 0)).
246
247 reg_last_set_invalid[i] is set non-zero when register I is being assigned
248 to and reg_last_set_table_tick[i] == label_tick. */
249
256/* Record last value assigned to (hard or pseudo) register n. */
250/* Record last value assigned to (hard or pseudo) register n. */
257
258static rtx *reg_last_set_value;
259
260/* Record the value of label_tick when the value for register n is placed in
261 reg_last_set_value[n]. */
262
263static int *reg_last_set_label;
264
265/* Record the value of label_tick when an expression involving register n
251
252static rtx *reg_last_set_value;
253
254/* Record the value of label_tick when the value for register n is placed in
255 reg_last_set_value[n]. */
256
257static int *reg_last_set_label;
258
259/* Record the value of label_tick when an expression involving register n
266 is placed in reg_last_set_value. */
260 is placed in reg_last_set_value. */
267
268static int *reg_last_set_table_tick;
269
270/* Set non-zero if references to register n in expressions should not be
271 used. */
272
273static char *reg_last_set_invalid;
274
261
262static int *reg_last_set_table_tick;
263
264/* Set non-zero if references to register n in expressions should not be
265 used. */
266
267static char *reg_last_set_invalid;
268
275/* Incremented for each label. */
269/* Incremented for each label. */
276
277static int label_tick;
278
279/* Some registers that are set more than once and used in more than one
280 basic block are nevertheless always set in similar ways. For example,
281 a QImode register may be loaded from memory in two places on a machine
282 where byte loads zero extend.
283
284 We record in the following array what we know about the nonzero
285 bits of a register, specifically which bits are known to be zero.
286
287 If an entry is zero, it means that we don't know anything special. */
288
289static unsigned HOST_WIDE_INT *reg_nonzero_bits;
290
291/* Mode used to compute significance in reg_nonzero_bits. It is the largest
292 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
293
294static enum machine_mode nonzero_bits_mode;
295
296/* Nonzero if we know that a register has some leading bits that are always
297 equal to the sign bit. */
298
299static char *reg_sign_bit_copies;
300
301/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
302 It is zero while computing them and after combine has completed. This
303 former test prevents propagating values based on previously set values,
304 which can be incorrect if a variable is modified in a loop. */
305
306static int nonzero_sign_valid;
307
308/* These arrays are maintained in parallel with reg_last_set_value
309 and are used to store the mode in which the register was last set,
310 the bits that were known to be zero when it was last set, and the
311 number of sign bits copies it was known to have when it was last set. */
312
313static enum machine_mode *reg_last_set_mode;
314static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
315static char *reg_last_set_sign_bit_copies;
316
317/* Record one modification to rtl structure
318 to be undone by storing old_contents into *where.
319 is_int is 1 if the contents are an int. */
320
321struct undo
322{
270
271static int label_tick;
272
273/* Some registers that are set more than once and used in more than one
274 basic block are nevertheless always set in similar ways. For example,
275 a QImode register may be loaded from memory in two places on a machine
276 where byte loads zero extend.
277
278 We record in the following array what we know about the nonzero
279 bits of a register, specifically which bits are known to be zero.
280
281 If an entry is zero, it means that we don't know anything special. */
282
283static unsigned HOST_WIDE_INT *reg_nonzero_bits;
284
285/* Mode used to compute significance in reg_nonzero_bits. It is the largest
286 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
287
288static enum machine_mode nonzero_bits_mode;
289
290/* Nonzero if we know that a register has some leading bits that are always
291 equal to the sign bit. */
292
293static char *reg_sign_bit_copies;
294
295/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
296 It is zero while computing them and after combine has completed. This
297 former test prevents propagating values based on previously set values,
298 which can be incorrect if a variable is modified in a loop. */
299
300static int nonzero_sign_valid;
301
302/* These arrays are maintained in parallel with reg_last_set_value
303 and are used to store the mode in which the register was last set,
304 the bits that were known to be zero when it was last set, and the
305 number of sign bits copies it was known to have when it was last set. */
306
307static enum machine_mode *reg_last_set_mode;
308static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
309static char *reg_last_set_sign_bit_copies;
310
311/* Record one modification to rtl structure
312 to be undone by storing old_contents into *where.
313 is_int is 1 if the contents are an int. */
314
315struct undo
316{
317 struct undo *next;
323 int is_int;
324 union {rtx r; int i;} old_contents;
325 union {rtx *r; int *i;} where;
326};
327
328/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
329 num_undo says how many are currently recorded.
330
331 storage is nonzero if we must undo the allocation of new storage.
332 The value of storage is what to pass to obfree.
333
334 other_insn is nonzero if we have modified some other insn in the process
318 int is_int;
319 union {rtx r; int i;} old_contents;
320 union {rtx *r; int *i;} where;
321};
322
323/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
324 num_undo says how many are currently recorded.
325
326 storage is nonzero if we must undo the allocation of new storage.
327 The value of storage is what to pass to obfree.
328
329 other_insn is nonzero if we have modified some other insn in the process
335 of working on subst_insn. It must be verified too. */
330 of working on subst_insn. It must be verified too.
336
331
337#define MAX_UNDO 50
332 previous_undos is the value of undobuf.undos when we started processing
333 this substitution. This will prevent gen_rtx_combine from re-used a piece
334 from the previous expression. Doing so can produce circular rtl
335 structures. */
338
339struct undobuf
340{
336
337struct undobuf
338{
341 int num_undo;
342 char *storage;
339 char *storage;
343 struct undo undo[MAX_UNDO];
340 struct undo *undos;
341 struct undo *frees;
342 struct undo *previous_undos;
344 rtx other_insn;
345};
346
347static struct undobuf undobuf;
348
349/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
350 insn. The substitution can be undone by undo_all. If INTO is already
351 set to NEWVAL, do not record this change. Because computing NEWVAL might
352 also call SUBST, we have to compute it before we put anything into
353 the undo table. */
354
355#define SUBST(INTO, NEWVAL) \
343 rtx other_insn;
344};
345
346static struct undobuf undobuf;
347
348/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
349 insn. The substitution can be undone by undo_all. If INTO is already
350 set to NEWVAL, do not record this change. Because computing NEWVAL might
351 also call SUBST, we have to compute it before we put anything into
352 the undo table. */
353
354#define SUBST(INTO, NEWVAL) \
356 do { rtx _new = (NEWVAL); \
357 if (undobuf.num_undo < MAX_UNDO) \
358 { \
359 undobuf.undo[undobuf.num_undo].is_int = 0; \
360 undobuf.undo[undobuf.num_undo].where.r = &INTO; \
361 undobuf.undo[undobuf.num_undo].old_contents.r = INTO; \
362 INTO = _new; \
363 if (undobuf.undo[undobuf.num_undo].old_contents.r != INTO) \
364 undobuf.num_undo++; \
365 } \
355 do { rtx _new = (NEWVAL); \
356 struct undo *_buf; \
357 \
358 if (undobuf.frees) \
359 _buf = undobuf.frees, undobuf.frees = _buf->next; \
360 else \
361 _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
362 \
363 _buf->is_int = 0; \
364 _buf->where.r = &INTO; \
365 _buf->old_contents.r = INTO; \
366 INTO = _new; \
367 if (_buf->old_contents.r == INTO) \
368 _buf->next = undobuf.frees, undobuf.frees = _buf; \
369 else \
370 _buf->next = undobuf.undos, undobuf.undos = _buf; \
366 } while (0)
367
371 } while (0)
372
368/* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
369 expression.
370 Note that substitution for the value of a CONST_INT is not safe. */
373/* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
374 for the value of a HOST_WIDE_INT value (including CONST_INT) is
375 not safe. */
371
372#define SUBST_INT(INTO, NEWVAL) \
376
377#define SUBST_INT(INTO, NEWVAL) \
373 do { if (undobuf.num_undo < MAX_UNDO) \
374{ \
375 undobuf.undo[undobuf.num_undo].is_int = 1; \
376 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
377 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
378 INTO = NEWVAL; \
379 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
380 undobuf.num_undo++; \
381 } \
378 do { struct undo *_buf; \
379 \
380 if (undobuf.frees) \
381 _buf = undobuf.frees, undobuf.frees = _buf->next; \
382 else \
383 _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
384 \
385 _buf->is_int = 1; \
386 _buf->where.i = (int *) &INTO; \
387 _buf->old_contents.i = INTO; \
388 INTO = NEWVAL; \
389 if (_buf->old_contents.i == INTO) \
390 _buf->next = undobuf.frees, undobuf.frees = _buf; \
391 else \
392 _buf->next = undobuf.undos, undobuf.undos = _buf; \
382 } while (0)
383
384/* Number of times the pseudo being substituted for
385 was found and replaced. */
386
387static int n_occurrences;
388
393 } while (0)
394
395/* Number of times the pseudo being substituted for
396 was found and replaced. */
397
398static int n_occurrences;
399
389static void init_reg_last_arrays PROTO(());
390static void setup_incoming_promotions PROTO(());
400static void init_reg_last_arrays PROTO((void));
401static void setup_incoming_promotions PROTO((void));
391static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
392static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
402static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
403static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
404static int sets_function_arg_p PROTO((rtx));
393static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
394static rtx try_combine PROTO((rtx, rtx, rtx));
395static void undo_all PROTO((void));
396static rtx *find_split_point PROTO((rtx *, rtx));
397static rtx subst PROTO((rtx, rtx, rtx, int, int));
398static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int));
399static rtx simplify_if_then_else PROTO((rtx));
400static rtx simplify_set PROTO((rtx));
401static rtx simplify_logical PROTO((rtx, int));
402static rtx expand_compound_operation PROTO((rtx));
403static rtx expand_field_assignment PROTO((rtx));
404static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
405 int, int, int));
406static rtx extract_left_shift PROTO((rtx, int));
407static rtx make_compound_operation PROTO((rtx, enum rtx_code));
408static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
409static rtx force_to_mode PROTO((rtx, enum machine_mode,
410 unsigned HOST_WIDE_INT, rtx, int));
411static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
412static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
405static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
406static rtx try_combine PROTO((rtx, rtx, rtx));
407static void undo_all PROTO((void));
408static rtx *find_split_point PROTO((rtx *, rtx));
409static rtx subst PROTO((rtx, rtx, rtx, int, int));
410static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int));
411static rtx simplify_if_then_else PROTO((rtx));
412static rtx simplify_set PROTO((rtx));
413static rtx simplify_logical PROTO((rtx, int));
414static rtx expand_compound_operation PROTO((rtx));
415static rtx expand_field_assignment PROTO((rtx));
416static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
417 int, int, int));
418static rtx extract_left_shift PROTO((rtx, int));
419static rtx make_compound_operation PROTO((rtx, enum rtx_code));
420static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
421static rtx force_to_mode PROTO((rtx, enum machine_mode,
422 unsigned HOST_WIDE_INT, rtx, int));
423static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
424static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
425static int rtx_equal_for_field_assignment_p PROTO((rtx, rtx));
413static rtx make_field_assignment PROTO((rtx));
414static rtx apply_distributive_law PROTO((rtx));
415static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
416 unsigned HOST_WIDE_INT));
417static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
418static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
419static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
420 enum rtx_code, HOST_WIDE_INT,
421 enum machine_mode, int *));
422static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
423 rtx, int));
424static int recog_for_combine PROTO((rtx *, rtx, rtx *, int *));
425static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
426static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
427 ...));
428static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
429 rtx, rtx));
430static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
431 enum machine_mode, rtx));
432static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
433static int reversible_comparison_p PROTO((rtx));
434static void update_table_tick PROTO((rtx));
435static void record_value_for_reg PROTO((rtx, rtx, rtx));
436static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
437static void record_dead_and_set_regs PROTO((rtx));
426static rtx make_field_assignment PROTO((rtx));
427static rtx apply_distributive_law PROTO((rtx));
428static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
429 unsigned HOST_WIDE_INT));
430static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
431static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
432static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
433 enum rtx_code, HOST_WIDE_INT,
434 enum machine_mode, int *));
435static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
436 rtx, int));
437static int recog_for_combine PROTO((rtx *, rtx, rtx *, int *));
438static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
439static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
440 ...));
441static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
442 rtx, rtx));
443static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
444 enum machine_mode, rtx));
445static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
446static int reversible_comparison_p PROTO((rtx));
447static void update_table_tick PROTO((rtx));
448static void record_value_for_reg PROTO((rtx, rtx, rtx));
449static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
450static void record_dead_and_set_regs PROTO((rtx));
438static int get_last_value_validate PROTO((rtx *, int, int));
451static int get_last_value_validate PROTO((rtx *, rtx, int, int));
439static rtx get_last_value PROTO((rtx));
440static int use_crosses_set_p PROTO((rtx, int));
441static void reg_dead_at_p_1 PROTO((rtx, rtx));
442static int reg_dead_at_p PROTO((rtx, rtx));
452static rtx get_last_value PROTO((rtx));
453static int use_crosses_set_p PROTO((rtx, int));
454static void reg_dead_at_p_1 PROTO((rtx, rtx));
455static int reg_dead_at_p PROTO((rtx, rtx));
443static void move_deaths PROTO((rtx, int, rtx, rtx *));
456static void move_deaths PROTO((rtx, rtx, int, rtx, rtx *));
444static int reg_bitfield_target_p PROTO((rtx, rtx));
445static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
446static void distribute_links PROTO((rtx));
447static void mark_used_regs_combine PROTO((rtx));
457static int reg_bitfield_target_p PROTO((rtx, rtx));
458static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
459static void distribute_links PROTO((rtx));
460static void mark_used_regs_combine PROTO((rtx));
461static int insn_cuid PROTO((rtx));
448
449/* Main entry point for combiner. F is the first insn of the function.
450 NREGS is the first unused pseudo-reg number. */
451
452void
453combine_instructions (f, nregs)
454 rtx f;
455 int nregs;
456{
462
463/* Main entry point for combiner. F is the first insn of the function.
464 NREGS is the first unused pseudo-reg number. */
465
466void
467combine_instructions (f, nregs)
468 rtx f;
469 int nregs;
470{
457 register rtx insn, next, prev;
471 register rtx insn, next;
472#ifdef HAVE_cc0
473 register rtx prev;
474#endif
458 register int i;
459 register rtx links, nextlinks;
460
461 combine_attempts = 0;
462 combine_merges = 0;
463 combine_extras = 0;
464 combine_successes = 0;
475 register int i;
476 register rtx links, nextlinks;
477
478 combine_attempts = 0;
479 combine_merges = 0;
480 combine_extras = 0;
481 combine_successes = 0;
465 undobuf.num_undo = previous_num_undos = 0;
482 undobuf.undos = undobuf.previous_undos = 0;
466
467 combine_max_regno = nregs;
468
469 reg_nonzero_bits
470 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
471 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
472
473 bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
474 bzero (reg_sign_bit_copies, nregs * sizeof (char));
475
476 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
477 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
478 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
479 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
480 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
481 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
482 reg_last_set_mode
483 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
484 reg_last_set_nonzero_bits
485 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
486 reg_last_set_sign_bit_copies
487 = (char *) alloca (nregs * sizeof (char));
488
489 init_reg_last_arrays ();
490
491 init_recog_no_volatile ();
492
493 /* Compute maximum uid value so uid_cuid can be allocated. */
494
495 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
496 if (INSN_UID (insn) > i)
497 i = INSN_UID (insn);
498
499 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
500 max_uid_cuid = i;
501
502 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
503
504 /* Don't use reg_nonzero_bits when computing it. This can cause problems
505 when, for example, we have j <<= 1 in a loop. */
506
507 nonzero_sign_valid = 0;
508
509 /* Compute the mapping from uids to cuids.
510 Cuids are numbers assigned to insns, like uids,
511 except that cuids increase monotonically through the code.
512
513 Scan all SETs and see if we can deduce anything about what
514 bits are known to be zero for some registers and how many copies
515 of the sign bit are known to exist for those registers.
516
517 Also set any known values so that we can use it while searching
518 for what bits are known to be set. */
519
520 label_tick = 1;
521
522 /* We need to initialize it here, because record_dead_and_set_regs may call
523 get_last_value. */
524 subst_prev_insn = NULL_RTX;
525
526 setup_incoming_promotions ();
527
528 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
529 {
530 uid_cuid[INSN_UID (insn)] = ++i;
531 subst_low_cuid = i;
532 subst_insn = insn;
533
534 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
535 {
536 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
537 record_dead_and_set_regs (insn);
483
484 combine_max_regno = nregs;
485
486 reg_nonzero_bits
487 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
488 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
489
490 bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
491 bzero (reg_sign_bit_copies, nregs * sizeof (char));
492
493 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
494 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
495 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
496 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
497 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
498 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
499 reg_last_set_mode
500 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
501 reg_last_set_nonzero_bits
502 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
503 reg_last_set_sign_bit_copies
504 = (char *) alloca (nregs * sizeof (char));
505
506 init_reg_last_arrays ();
507
508 init_recog_no_volatile ();
509
510 /* Compute maximum uid value so uid_cuid can be allocated. */
511
512 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
513 if (INSN_UID (insn) > i)
514 i = INSN_UID (insn);
515
516 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
517 max_uid_cuid = i;
518
519 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
520
521 /* Don't use reg_nonzero_bits when computing it. This can cause problems
522 when, for example, we have j <<= 1 in a loop. */
523
524 nonzero_sign_valid = 0;
525
526 /* Compute the mapping from uids to cuids.
527 Cuids are numbers assigned to insns, like uids,
528 except that cuids increase monotonically through the code.
529
530 Scan all SETs and see if we can deduce anything about what
531 bits are known to be zero for some registers and how many copies
532 of the sign bit are known to exist for those registers.
533
534 Also set any known values so that we can use it while searching
535 for what bits are known to be set. */
536
537 label_tick = 1;
538
539 /* We need to initialize it here, because record_dead_and_set_regs may call
540 get_last_value. */
541 subst_prev_insn = NULL_RTX;
542
543 setup_incoming_promotions ();
544
545 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
546 {
547 uid_cuid[INSN_UID (insn)] = ++i;
548 subst_low_cuid = i;
549 subst_insn = insn;
550
551 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
552 {
553 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
554 record_dead_and_set_regs (insn);
555
556#ifdef AUTO_INC_DEC
557 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
558 if (REG_NOTE_KIND (links) == REG_INC)
559 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX);
560#endif
538 }
539
540 if (GET_CODE (insn) == CODE_LABEL)
541 label_tick++;
542 }
543
544 nonzero_sign_valid = 1;
545
546 /* Now scan all the insns in forward order. */
547
548 this_basic_block = -1;
549 label_tick = 1;
550 last_call_cuid = 0;
551 mem_last_set = 0;
552 init_reg_last_arrays ();
553 setup_incoming_promotions ();
554
555 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
556 {
557 next = 0;
558
559 /* If INSN starts a new basic block, update our basic block number. */
560 if (this_basic_block + 1 < n_basic_blocks
561 && basic_block_head[this_basic_block + 1] == insn)
562 this_basic_block++;
563
564 if (GET_CODE (insn) == CODE_LABEL)
565 label_tick++;
566
567 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
568 {
569 /* Try this insn with each insn it links back to. */
570
571 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
572 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
573 goto retry;
574
575 /* Try each sequence of three linked insns ending with this one. */
576
577 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
578 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
579 nextlinks = XEXP (nextlinks, 1))
580 if ((next = try_combine (insn, XEXP (links, 0),
581 XEXP (nextlinks, 0))) != 0)
582 goto retry;
583
584#ifdef HAVE_cc0
585 /* Try to combine a jump insn that uses CC0
586 with a preceding insn that sets CC0, and maybe with its
587 logical predecessor as well.
588 This is how we make decrement-and-branch insns.
589 We need this special code because data flow connections
590 via CC0 do not get entered in LOG_LINKS. */
591
592 if (GET_CODE (insn) == JUMP_INSN
593 && (prev = prev_nonnote_insn (insn)) != 0
594 && GET_CODE (prev) == INSN
595 && sets_cc0_p (PATTERN (prev)))
596 {
597 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
598 goto retry;
599
600 for (nextlinks = LOG_LINKS (prev); nextlinks;
601 nextlinks = XEXP (nextlinks, 1))
602 if ((next = try_combine (insn, prev,
603 XEXP (nextlinks, 0))) != 0)
604 goto retry;
605 }
606
607 /* Do the same for an insn that explicitly references CC0. */
608 if (GET_CODE (insn) == INSN
609 && (prev = prev_nonnote_insn (insn)) != 0
610 && GET_CODE (prev) == INSN
611 && sets_cc0_p (PATTERN (prev))
612 && GET_CODE (PATTERN (insn)) == SET
613 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
614 {
615 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
616 goto retry;
617
618 for (nextlinks = LOG_LINKS (prev); nextlinks;
619 nextlinks = XEXP (nextlinks, 1))
620 if ((next = try_combine (insn, prev,
621 XEXP (nextlinks, 0))) != 0)
622 goto retry;
623 }
624
625 /* Finally, see if any of the insns that this insn links to
626 explicitly references CC0. If so, try this insn, that insn,
627 and its predecessor if it sets CC0. */
628 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
629 if (GET_CODE (XEXP (links, 0)) == INSN
630 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
631 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
632 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
633 && GET_CODE (prev) == INSN
634 && sets_cc0_p (PATTERN (prev))
635 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
636 goto retry;
637#endif
638
639 /* Try combining an insn with two different insns whose results it
640 uses. */
641 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
642 for (nextlinks = XEXP (links, 1); nextlinks;
643 nextlinks = XEXP (nextlinks, 1))
644 if ((next = try_combine (insn, XEXP (links, 0),
645 XEXP (nextlinks, 0))) != 0)
646 goto retry;
647
648 if (GET_CODE (insn) != NOTE)
649 record_dead_and_set_regs (insn);
650
651 retry:
652 ;
653 }
654 }
655
656 total_attempts += combine_attempts;
657 total_merges += combine_merges;
658 total_extras += combine_extras;
659 total_successes += combine_successes;
660
661 nonzero_sign_valid = 0;
662}
663
664/* Wipe the reg_last_xxx arrays in preparation for another pass. */
665
666static void
667init_reg_last_arrays ()
668{
669 int nregs = combine_max_regno;
670
671 bzero ((char *) reg_last_death, nregs * sizeof (rtx));
672 bzero ((char *) reg_last_set, nregs * sizeof (rtx));
673 bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
674 bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
675 bzero ((char *) reg_last_set_label, nregs * sizeof (int));
676 bzero (reg_last_set_invalid, nregs * sizeof (char));
677 bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
678 bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
679 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
680}
681
682/* Set up any promoted values for incoming argument registers. */
683
684static void
685setup_incoming_promotions ()
686{
687#ifdef PROMOTE_FUNCTION_ARGS
688 int regno;
689 rtx reg;
690 enum machine_mode mode;
691 int unsignedp;
692 rtx first = get_insns ();
693
694 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
695 if (FUNCTION_ARG_REGNO_P (regno)
696 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
561 }
562
563 if (GET_CODE (insn) == CODE_LABEL)
564 label_tick++;
565 }
566
567 nonzero_sign_valid = 1;
568
569 /* Now scan all the insns in forward order. */
570
571 this_basic_block = -1;
572 label_tick = 1;
573 last_call_cuid = 0;
574 mem_last_set = 0;
575 init_reg_last_arrays ();
576 setup_incoming_promotions ();
577
578 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
579 {
580 next = 0;
581
582 /* If INSN starts a new basic block, update our basic block number. */
583 if (this_basic_block + 1 < n_basic_blocks
584 && basic_block_head[this_basic_block + 1] == insn)
585 this_basic_block++;
586
587 if (GET_CODE (insn) == CODE_LABEL)
588 label_tick++;
589
590 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
591 {
592 /* Try this insn with each insn it links back to. */
593
594 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
595 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
596 goto retry;
597
598 /* Try each sequence of three linked insns ending with this one. */
599
600 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
601 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
602 nextlinks = XEXP (nextlinks, 1))
603 if ((next = try_combine (insn, XEXP (links, 0),
604 XEXP (nextlinks, 0))) != 0)
605 goto retry;
606
607#ifdef HAVE_cc0
608 /* Try to combine a jump insn that uses CC0
609 with a preceding insn that sets CC0, and maybe with its
610 logical predecessor as well.
611 This is how we make decrement-and-branch insns.
612 We need this special code because data flow connections
613 via CC0 do not get entered in LOG_LINKS. */
614
615 if (GET_CODE (insn) == JUMP_INSN
616 && (prev = prev_nonnote_insn (insn)) != 0
617 && GET_CODE (prev) == INSN
618 && sets_cc0_p (PATTERN (prev)))
619 {
620 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
621 goto retry;
622
623 for (nextlinks = LOG_LINKS (prev); nextlinks;
624 nextlinks = XEXP (nextlinks, 1))
625 if ((next = try_combine (insn, prev,
626 XEXP (nextlinks, 0))) != 0)
627 goto retry;
628 }
629
630 /* Do the same for an insn that explicitly references CC0. */
631 if (GET_CODE (insn) == INSN
632 && (prev = prev_nonnote_insn (insn)) != 0
633 && GET_CODE (prev) == INSN
634 && sets_cc0_p (PATTERN (prev))
635 && GET_CODE (PATTERN (insn)) == SET
636 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
637 {
638 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
639 goto retry;
640
641 for (nextlinks = LOG_LINKS (prev); nextlinks;
642 nextlinks = XEXP (nextlinks, 1))
643 if ((next = try_combine (insn, prev,
644 XEXP (nextlinks, 0))) != 0)
645 goto retry;
646 }
647
648 /* Finally, see if any of the insns that this insn links to
649 explicitly references CC0. If so, try this insn, that insn,
650 and its predecessor if it sets CC0. */
651 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
652 if (GET_CODE (XEXP (links, 0)) == INSN
653 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
654 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
655 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
656 && GET_CODE (prev) == INSN
657 && sets_cc0_p (PATTERN (prev))
658 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
659 goto retry;
660#endif
661
662 /* Try combining an insn with two different insns whose results it
663 uses. */
664 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
665 for (nextlinks = XEXP (links, 1); nextlinks;
666 nextlinks = XEXP (nextlinks, 1))
667 if ((next = try_combine (insn, XEXP (links, 0),
668 XEXP (nextlinks, 0))) != 0)
669 goto retry;
670
671 if (GET_CODE (insn) != NOTE)
672 record_dead_and_set_regs (insn);
673
674 retry:
675 ;
676 }
677 }
678
679 total_attempts += combine_attempts;
680 total_merges += combine_merges;
681 total_extras += combine_extras;
682 total_successes += combine_successes;
683
684 nonzero_sign_valid = 0;
685}
686
687/* Wipe the reg_last_xxx arrays in preparation for another pass. */
688
689static void
690init_reg_last_arrays ()
691{
692 int nregs = combine_max_regno;
693
694 bzero ((char *) reg_last_death, nregs * sizeof (rtx));
695 bzero ((char *) reg_last_set, nregs * sizeof (rtx));
696 bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
697 bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
698 bzero ((char *) reg_last_set_label, nregs * sizeof (int));
699 bzero (reg_last_set_invalid, nregs * sizeof (char));
700 bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
701 bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
702 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
703}
704
705/* Set up any promoted values for incoming argument registers. */
706
707static void
708setup_incoming_promotions ()
709{
710#ifdef PROMOTE_FUNCTION_ARGS
711 int regno;
712 rtx reg;
713 enum machine_mode mode;
714 int unsignedp;
715 rtx first = get_insns ();
716
717 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
718 if (FUNCTION_ARG_REGNO_P (regno)
719 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
697 record_value_for_reg (reg, first,
698 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
699 GET_MODE (reg),
700 gen_rtx (CLOBBER, mode, const0_rtx)));
720 {
721 record_value_for_reg
722 (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
723 : SIGN_EXTEND),
724 GET_MODE (reg),
725 gen_rtx_CLOBBER (mode, const0_rtx)));
726 }
701#endif
702}
703
727#endif
728}
729
704/* Called via note_stores. If X is a pseudo that is used in more than
705 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
706 set, record what bits are known zero. If we are clobbering X,
707 ignore this "set" because the clobbered value won't be used.
730/* Called via note_stores. If X is a pseudo that is narrower than
731 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
708
709 If we are setting only a portion of X and we can't figure out what
710 portion, assume all bits will be used since we don't know what will
711 be happening.
712
713 Similarly, set how many bits of X are known to be copies of the sign bit
714 at all locations in the function. This is the smallest number implied
715 by any set of X. */
716
717static void
718set_nonzero_bits_and_sign_copies (x, set)
719 rtx x;
720 rtx set;
721{
722 int num;
723
724 if (GET_CODE (x) == REG
725 && REGNO (x) >= FIRST_PSEUDO_REGISTER
732
733 If we are setting only a portion of X and we can't figure out what
734 portion, assume all bits will be used since we don't know what will
735 be happening.
736
737 Similarly, set how many bits of X are known to be copies of the sign bit
738 at all locations in the function. This is the smallest number implied
739 by any set of X. */
740
741static void
742set_nonzero_bits_and_sign_copies (x, set)
743 rtx x;
744 rtx set;
745{
746 int num;
747
748 if (GET_CODE (x) == REG
749 && REGNO (x) >= FIRST_PSEUDO_REGISTER
726 && reg_n_sets[REGNO (x)] > 1
727 && reg_basic_block[REGNO (x)] < 0
728 /* If this register is undefined at the start of the file, we can't
729 say what its contents were. */
750 /* If this register is undefined at the start of the file, we can't
751 say what its contents were. */
730 && ! (basic_block_live_at_start[0][REGNO (x) / REGSET_ELT_BITS]
731 & ((REGSET_ELT_TYPE) 1 << (REGNO (x) % REGSET_ELT_BITS)))
752 && ! REGNO_REG_SET_P (basic_block_live_at_start[0], REGNO (x))
732 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
733 {
753 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
754 {
734 if (GET_CODE (set) == CLOBBER)
755 if (set == 0 || GET_CODE (set) == CLOBBER)
735 {
736 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
756 {
757 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
737 reg_sign_bit_copies[REGNO (x)] = 0;
758 reg_sign_bit_copies[REGNO (x)] = 1;
738 return;
739 }
740
741 /* If this is a complex assignment, see if we can convert it into a
742 simple assignment. */
743 set = expand_field_assignment (set);
744
745 /* If this is a simple assignment, or we have a paradoxical SUBREG,
746 set what we know about X. */
747
748 if (SET_DEST (set) == x
749 || (GET_CODE (SET_DEST (set)) == SUBREG
750 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
751 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
752 && SUBREG_REG (SET_DEST (set)) == x))
753 {
754 rtx src = SET_SRC (set);
755
756#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
757 /* If X is narrower than a word and SRC is a non-negative
758 constant that would appear negative in the mode of X,
759 sign-extend it for use in reg_nonzero_bits because some
760 machines (maybe most) will actually do the sign-extension
761 and this is the conservative approach.
762
763 ??? For 2.5, try to tighten up the MD files in this regard
764 instead of this kludge. */
765
766 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
767 && GET_CODE (src) == CONST_INT
768 && INTVAL (src) > 0
769 && 0 != (INTVAL (src)
770 & ((HOST_WIDE_INT) 1
771 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
772 src = GEN_INT (INTVAL (src)
773 | ((HOST_WIDE_INT) (-1)
774 << GET_MODE_BITSIZE (GET_MODE (x))));
775#endif
776
777 reg_nonzero_bits[REGNO (x)]
778 |= nonzero_bits (src, nonzero_bits_mode);
779 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
780 if (reg_sign_bit_copies[REGNO (x)] == 0
781 || reg_sign_bit_copies[REGNO (x)] > num)
782 reg_sign_bit_copies[REGNO (x)] = num;
783 }
784 else
785 {
786 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
759 return;
760 }
761
762 /* If this is a complex assignment, see if we can convert it into a
763 simple assignment. */
764 set = expand_field_assignment (set);
765
766 /* If this is a simple assignment, or we have a paradoxical SUBREG,
767 set what we know about X. */
768
769 if (SET_DEST (set) == x
770 || (GET_CODE (SET_DEST (set)) == SUBREG
771 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
772 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
773 && SUBREG_REG (SET_DEST (set)) == x))
774 {
775 rtx src = SET_SRC (set);
776
777#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
778 /* If X is narrower than a word and SRC is a non-negative
779 constant that would appear negative in the mode of X,
780 sign-extend it for use in reg_nonzero_bits because some
781 machines (maybe most) will actually do the sign-extension
782 and this is the conservative approach.
783
784 ??? For 2.5, try to tighten up the MD files in this regard
785 instead of this kludge. */
786
787 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
788 && GET_CODE (src) == CONST_INT
789 && INTVAL (src) > 0
790 && 0 != (INTVAL (src)
791 & ((HOST_WIDE_INT) 1
792 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
793 src = GEN_INT (INTVAL (src)
794 | ((HOST_WIDE_INT) (-1)
795 << GET_MODE_BITSIZE (GET_MODE (x))));
796#endif
797
798 reg_nonzero_bits[REGNO (x)]
799 |= nonzero_bits (src, nonzero_bits_mode);
800 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
801 if (reg_sign_bit_copies[REGNO (x)] == 0
802 || reg_sign_bit_copies[REGNO (x)] > num)
803 reg_sign_bit_copies[REGNO (x)] = num;
804 }
805 else
806 {
807 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
787 reg_sign_bit_copies[REGNO (x)] = 0;
808 reg_sign_bit_copies[REGNO (x)] = 1;
788 }
789 }
790}
791
792/* See if INSN can be combined into I3. PRED and SUCC are optionally
793 insns that were previously combined into I3 or that will be combined
794 into the merger of INSN and I3.
795
796 Return 0 if the combination is not allowed for any reason.
797
798 If the combination is allowed, *PDEST will be set to the single
799 destination of INSN and *PSRC to the single source, and this function
800 will return 1. */
801
802static int
803can_combine_p (insn, i3, pred, succ, pdest, psrc)
804 rtx insn;
805 rtx i3;
806 rtx pred, succ;
807 rtx *pdest, *psrc;
808{
809 int i;
810 rtx set = 0, src, dest;
809 }
810 }
811}
812
813/* See if INSN can be combined into I3. PRED and SUCC are optionally
814 insns that were previously combined into I3 or that will be combined
815 into the merger of INSN and I3.
816
817 Return 0 if the combination is not allowed for any reason.
818
819 If the combination is allowed, *PDEST will be set to the single
820 destination of INSN and *PSRC to the single source, and this function
821 will return 1. */
822
823static int
824can_combine_p (insn, i3, pred, succ, pdest, psrc)
825 rtx insn;
826 rtx i3;
827 rtx pred, succ;
828 rtx *pdest, *psrc;
829{
830 int i;
831 rtx set = 0, src, dest;
811 rtx p, link;
832 rtx p;
833#ifdef AUTO_INC_DEC
834 rtx link;
835#endif
812 int all_adjacent = (succ ? (next_active_insn (insn) == succ
813 && next_active_insn (succ) == i3)
814 : next_active_insn (insn) == i3);
815
816 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
817 or a PARALLEL consisting of such a SET and CLOBBERs.
818
819 If INSN has CLOBBER parallel parts, ignore them for our processing.
820 By definition, these happen during the execution of the insn. When it
821 is merged with another insn, all bets are off. If they are, in fact,
822 needed and aren't also supplied in I3, they may be added by
823 recog_for_combine. Otherwise, it won't match.
824
825 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
826 note.
827
828 Get the source and destination of INSN. If more than one, can't
829 combine. */
830
831 if (GET_CODE (PATTERN (insn)) == SET)
832 set = PATTERN (insn);
833 else if (GET_CODE (PATTERN (insn)) == PARALLEL
834 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
835 {
836 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
837 {
838 rtx elt = XVECEXP (PATTERN (insn), 0, i);
839
840 switch (GET_CODE (elt))
841 {
836 int all_adjacent = (succ ? (next_active_insn (insn) == succ
837 && next_active_insn (succ) == i3)
838 : next_active_insn (insn) == i3);
839
840 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
841 or a PARALLEL consisting of such a SET and CLOBBERs.
842
843 If INSN has CLOBBER parallel parts, ignore them for our processing.
844 By definition, these happen during the execution of the insn. When it
845 is merged with another insn, all bets are off. If they are, in fact,
846 needed and aren't also supplied in I3, they may be added by
847 recog_for_combine. Otherwise, it won't match.
848
849 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
850 note.
851
852 Get the source and destination of INSN. If more than one, can't
853 combine. */
854
855 if (GET_CODE (PATTERN (insn)) == SET)
856 set = PATTERN (insn);
857 else if (GET_CODE (PATTERN (insn)) == PARALLEL
858 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
859 {
860 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
861 {
862 rtx elt = XVECEXP (PATTERN (insn), 0, i);
863
864 switch (GET_CODE (elt))
865 {
866 /* This is important to combine floating point insns
867 for the SH4 port. */
868 case USE:
869 /* Combining an isolated USE doesn't make sense.
870 We depend here on combinable_i3_pat to reject them. */
871 /* The code below this loop only verifies that the inputs of
872 the SET in INSN do not change. We call reg_set_between_p
873 to verify that the REG in the USE does not change betweeen
874 I3 and INSN.
875 If the USE in INSN was for a pseudo register, the matching
876 insn pattern will likely match any register; combining this
877 with any other USE would only be safe if we knew that the
878 used registers have identical values, or if there was
879 something to tell them apart, e.g. different modes. For
880 now, we forgo such compilcated tests and simply disallow
881 combining of USES of pseudo registers with any other USE. */
882 if (GET_CODE (XEXP (elt, 0)) == REG
883 && GET_CODE (PATTERN (i3)) == PARALLEL)
884 {
885 rtx i3pat = PATTERN (i3);
886 int i = XVECLEN (i3pat, 0) - 1;
887 int regno = REGNO (XEXP (elt, 0));
888 do
889 {
890 rtx i3elt = XVECEXP (i3pat, 0, i);
891 if (GET_CODE (i3elt) == USE
892 && GET_CODE (XEXP (i3elt, 0)) == REG
893 && (REGNO (XEXP (i3elt, 0)) == regno
894 ? reg_set_between_p (XEXP (elt, 0),
895 PREV_INSN (insn), i3)
896 : regno >= FIRST_PSEUDO_REGISTER))
897 return 0;
898 }
899 while (--i >= 0);
900 }
901 break;
902
842 /* We can ignore CLOBBERs. */
843 case CLOBBER:
844 break;
845
846 case SET:
847 /* Ignore SETs whose result isn't used but not those that
848 have side-effects. */
849 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
850 && ! side_effects_p (elt))
851 break;
852
853 /* If we have already found a SET, this is a second one and
854 so we cannot combine with this insn. */
855 if (set)
856 return 0;
857
858 set = elt;
859 break;
860
861 default:
862 /* Anything else means we can't combine. */
863 return 0;
864 }
865 }
866
867 if (set == 0
868 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
869 so don't do anything with it. */
870 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
871 return 0;
872 }
873 else
874 return 0;
875
876 if (set == 0)
877 return 0;
878
879 set = expand_field_assignment (set);
880 src = SET_SRC (set), dest = SET_DEST (set);
881
882 /* Don't eliminate a store in the stack pointer. */
883 if (dest == stack_pointer_rtx
884 /* If we couldn't eliminate a field assignment, we can't combine. */
885 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
886 /* Don't combine with an insn that sets a register to itself if it has
887 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
888 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
889 /* Can't merge a function call. */
890 || GET_CODE (src) == CALL
891 /* Don't eliminate a function call argument. */
892 || (GET_CODE (i3) == CALL_INSN
893 && (find_reg_fusage (i3, USE, dest)
894 || (GET_CODE (dest) == REG
895 && REGNO (dest) < FIRST_PSEUDO_REGISTER
896 && global_regs[REGNO (dest)])))
897 /* Don't substitute into an incremented register. */
898 || FIND_REG_INC_NOTE (i3, dest)
899 || (succ && FIND_REG_INC_NOTE (succ, dest))
900 /* Don't combine the end of a libcall into anything. */
901 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
902 /* Make sure that DEST is not used after SUCC but before I3. */
903 || (succ && ! all_adjacent
904 && reg_used_between_p (dest, succ, i3))
905 /* Make sure that the value that is to be substituted for the register
906 does not use any registers whose values alter in between. However,
907 If the insns are adjacent, a use can't cross a set even though we
908 think it might (this can happen for a sequence of insns each setting
909 the same destination; reg_last_set of that register might point to
910 a NOTE). If INSN has a REG_EQUIV note, the register is always
911 equivalent to the memory so the substitution is valid even if there
912 are intervening stores. Also, don't move a volatile asm or
913 UNSPEC_VOLATILE across any other insns. */
914 || (! all_adjacent
915 && (((GET_CODE (src) != MEM
916 || ! find_reg_note (insn, REG_EQUIV, src))
917 && use_crosses_set_p (src, INSN_CUID (insn)))
918 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
919 || GET_CODE (src) == UNSPEC_VOLATILE))
920 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
921 better register allocation by not doing the combine. */
922 || find_reg_note (i3, REG_NO_CONFLICT, dest)
923 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
924 /* Don't combine across a CALL_INSN, because that would possibly
925 change whether the life span of some REGs crosses calls or not,
926 and it is a pain to update that information.
927 Exception: if source is a constant, moving it later can't hurt.
928 Accept that special case, because it helps -fforce-addr a lot. */
929 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
930 return 0;
931
932 /* DEST must either be a REG or CC0. */
933 if (GET_CODE (dest) == REG)
934 {
935 /* If register alignment is being enforced for multi-word items in all
936 cases except for parameters, it is possible to have a register copy
937 insn referencing a hard register that is not allowed to contain the
938 mode being copied and which would not be valid as an operand of most
939 insns. Eliminate this problem by not combining with such an insn.
940
941 Also, on some machines we don't want to extend the life of a hard
903 /* We can ignore CLOBBERs. */
904 case CLOBBER:
905 break;
906
907 case SET:
908 /* Ignore SETs whose result isn't used but not those that
909 have side-effects. */
910 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
911 && ! side_effects_p (elt))
912 break;
913
914 /* If we have already found a SET, this is a second one and
915 so we cannot combine with this insn. */
916 if (set)
917 return 0;
918
919 set = elt;
920 break;
921
922 default:
923 /* Anything else means we can't combine. */
924 return 0;
925 }
926 }
927
928 if (set == 0
929 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
930 so don't do anything with it. */
931 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
932 return 0;
933 }
934 else
935 return 0;
936
937 if (set == 0)
938 return 0;
939
940 set = expand_field_assignment (set);
941 src = SET_SRC (set), dest = SET_DEST (set);
942
943 /* Don't eliminate a store in the stack pointer. */
944 if (dest == stack_pointer_rtx
945 /* If we couldn't eliminate a field assignment, we can't combine. */
946 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
947 /* Don't combine with an insn that sets a register to itself if it has
948 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
949 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
950 /* Can't merge a function call. */
951 || GET_CODE (src) == CALL
952 /* Don't eliminate a function call argument. */
953 || (GET_CODE (i3) == CALL_INSN
954 && (find_reg_fusage (i3, USE, dest)
955 || (GET_CODE (dest) == REG
956 && REGNO (dest) < FIRST_PSEUDO_REGISTER
957 && global_regs[REGNO (dest)])))
958 /* Don't substitute into an incremented register. */
959 || FIND_REG_INC_NOTE (i3, dest)
960 || (succ && FIND_REG_INC_NOTE (succ, dest))
961 /* Don't combine the end of a libcall into anything. */
962 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
963 /* Make sure that DEST is not used after SUCC but before I3. */
964 || (succ && ! all_adjacent
965 && reg_used_between_p (dest, succ, i3))
966 /* Make sure that the value that is to be substituted for the register
967 does not use any registers whose values alter in between. However,
968 If the insns are adjacent, a use can't cross a set even though we
969 think it might (this can happen for a sequence of insns each setting
970 the same destination; reg_last_set of that register might point to
971 a NOTE). If INSN has a REG_EQUIV note, the register is always
972 equivalent to the memory so the substitution is valid even if there
973 are intervening stores. Also, don't move a volatile asm or
974 UNSPEC_VOLATILE across any other insns. */
975 || (! all_adjacent
976 && (((GET_CODE (src) != MEM
977 || ! find_reg_note (insn, REG_EQUIV, src))
978 && use_crosses_set_p (src, INSN_CUID (insn)))
979 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
980 || GET_CODE (src) == UNSPEC_VOLATILE))
981 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
982 better register allocation by not doing the combine. */
983 || find_reg_note (i3, REG_NO_CONFLICT, dest)
984 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
985 /* Don't combine across a CALL_INSN, because that would possibly
986 change whether the life span of some REGs crosses calls or not,
987 and it is a pain to update that information.
988 Exception: if source is a constant, moving it later can't hurt.
989 Accept that special case, because it helps -fforce-addr a lot. */
990 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
991 return 0;
992
993 /* DEST must either be a REG or CC0. */
994 if (GET_CODE (dest) == REG)
995 {
996 /* If register alignment is being enforced for multi-word items in all
997 cases except for parameters, it is possible to have a register copy
998 insn referencing a hard register that is not allowed to contain the
999 mode being copied and which would not be valid as an operand of most
1000 insns. Eliminate this problem by not combining with such an insn.
1001
1002 Also, on some machines we don't want to extend the life of a hard
942 register. */
1003 register.
943
1004
1005 This is the same test done in can_combine except that we don't test
1006 if SRC is a CALL operation to permit a hard register with
1007 SMALL_REGISTER_CLASSES, and that we have to take all_adjacent
1008 into account. */
1009
944 if (GET_CODE (src) == REG
945 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
946 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
947 /* Don't extend the life of a hard register unless it is
948 user variable (if we have few registers) or it can't
949 fit into the desired register (meaning something special
1010 if (GET_CODE (src) == REG
1011 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1012 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1013 /* Don't extend the life of a hard register unless it is
1014 user variable (if we have few registers) or it can't
1015 fit into the desired register (meaning something special
950 is going on). */
1016 is going on).
1017 Also avoid substituting a return register into I3, because
1018 reload can't handle a conflict with constraints of other
1019 inputs. */
951 || (REGNO (src) < FIRST_PSEUDO_REGISTER
952 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
1020 || (REGNO (src) < FIRST_PSEUDO_REGISTER
1021 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
953#ifdef SMALL_REGISTER_CLASSES
954 || ! REG_USERVAR_P (src)
955#endif
956 ))))
1022 || (SMALL_REGISTER_CLASSES
1023 && ((! all_adjacent && ! REG_USERVAR_P (src))
1024 || (FUNCTION_VALUE_REGNO_P (REGNO (src))
1025 && ! REG_USERVAR_P (src))))))))
957 return 0;
958 }
959 else if (GET_CODE (dest) != CC0)
960 return 0;
961
962 /* Don't substitute for a register intended as a clobberable operand.
963 Similarly, don't substitute an expression containing a register that
964 will be clobbered in I3. */
965 if (GET_CODE (PATTERN (i3)) == PARALLEL)
966 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
967 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
968 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
969 src)
970 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
971 return 0;
972
973 /* If INSN contains anything volatile, or is an `asm' (whether volatile
1026 return 0;
1027 }
1028 else if (GET_CODE (dest) != CC0)
1029 return 0;
1030
1031 /* Don't substitute for a register intended as a clobberable operand.
1032 Similarly, don't substitute an expression containing a register that
1033 will be clobbered in I3. */
1034 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1035 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1036 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
1037 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1038 src)
1039 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
1040 return 0;
1041
1042 /* If INSN contains anything volatile, or is an `asm' (whether volatile
974 or not), reject, unless nothing volatile comes between it and I3,
975 with the exception of SUCC. */
1043 or not), reject, unless nothing volatile comes between it and I3 */
976
977 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1044
1045 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
978 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
979 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
980 && p != succ && volatile_refs_p (PATTERN (p)))
981 return 0;
1046 {
1047 /* Make sure succ doesn't contain a volatile reference. */
1048 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1049 return 0;
1050
1051 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1052 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1053 && p != succ && volatile_refs_p (PATTERN (p)))
1054 return 0;
1055 }
982
1056
1057 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1058 to be an explicit register variable, and was chosen for a reason. */
1059
1060 if (GET_CODE (src) == ASM_OPERANDS
1061 && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1062 return 0;
1063
983 /* If there are any volatile insns between INSN and I3, reject, because
984 they might affect machine state. */
985
986 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
987 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
988 && p != succ && volatile_insn_p (PATTERN (p)))
989 return 0;
990
991 /* If INSN or I2 contains an autoincrement or autodecrement,
992 make sure that register is not used between there and I3,
993 and not already used in I3 either.
994 Also insist that I3 not be a jump; if it were one
995 and the incremented register were spilled, we would lose. */
996
997#ifdef AUTO_INC_DEC
998 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
999 if (REG_NOTE_KIND (link) == REG_INC
1000 && (GET_CODE (i3) == JUMP_INSN
1001 || reg_used_between_p (XEXP (link, 0), insn, i3)
1002 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1003 return 0;
1004#endif
1005
1006#ifdef HAVE_cc0
1007 /* Don't combine an insn that follows a CC0-setting insn.
1008 An insn that uses CC0 must not be separated from the one that sets it.
1009 We do, however, allow I2 to follow a CC0-setting insn if that insn
1010 is passed as I1; in that case it will be deleted also.
1011 We also allow combining in this case if all the insns are adjacent
1012 because that would leave the two CC0 insns adjacent as well.
1013 It would be more logical to test whether CC0 occurs inside I1 or I2,
1014 but that would be much slower, and this ought to be equivalent. */
1015
1016 p = prev_nonnote_insn (insn);
1017 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1018 && ! all_adjacent)
1019 return 0;
1020#endif
1021
1022 /* If we get here, we have passed all the tests and the combination is
1023 to be allowed. */
1024
1025 *pdest = dest;
1026 *psrc = src;
1027
1028 return 1;
1029}
1030
1064 /* If there are any volatile insns between INSN and I3, reject, because
1065 they might affect machine state. */
1066
1067 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1068 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1069 && p != succ && volatile_insn_p (PATTERN (p)))
1070 return 0;
1071
1072 /* If INSN or I2 contains an autoincrement or autodecrement,
1073 make sure that register is not used between there and I3,
1074 and not already used in I3 either.
1075 Also insist that I3 not be a jump; if it were one
1076 and the incremented register were spilled, we would lose. */
1077
1078#ifdef AUTO_INC_DEC
1079 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1080 if (REG_NOTE_KIND (link) == REG_INC
1081 && (GET_CODE (i3) == JUMP_INSN
1082 || reg_used_between_p (XEXP (link, 0), insn, i3)
1083 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1084 return 0;
1085#endif
1086
1087#ifdef HAVE_cc0
1088 /* Don't combine an insn that follows a CC0-setting insn.
1089 An insn that uses CC0 must not be separated from the one that sets it.
1090 We do, however, allow I2 to follow a CC0-setting insn if that insn
1091 is passed as I1; in that case it will be deleted also.
1092 We also allow combining in this case if all the insns are adjacent
1093 because that would leave the two CC0 insns adjacent as well.
1094 It would be more logical to test whether CC0 occurs inside I1 or I2,
1095 but that would be much slower, and this ought to be equivalent. */
1096
1097 p = prev_nonnote_insn (insn);
1098 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1099 && ! all_adjacent)
1100 return 0;
1101#endif
1102
1103 /* If we get here, we have passed all the tests and the combination is
1104 to be allowed. */
1105
1106 *pdest = dest;
1107 *psrc = src;
1108
1109 return 1;
1110}
1111
1112/* Check if PAT is an insn - or a part of it - used to set up an
1113 argument for a function in a hard register. */
1114
1115static int
1116sets_function_arg_p (pat)
1117 rtx pat;
1118{
1119 int i;
1120 rtx inner_dest;
1121
1122 switch (GET_CODE (pat))
1123 {
1124 case INSN:
1125 return sets_function_arg_p (PATTERN (pat));
1126
1127 case PARALLEL:
1128 for (i = XVECLEN (pat, 0); --i >= 0;)
1129 if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1130 return 1;
1131
1132 break;
1133
1134 case SET:
1135 inner_dest = SET_DEST (pat);
1136 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1137 || GET_CODE (inner_dest) == SUBREG
1138 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1139 inner_dest = XEXP (inner_dest, 0);
1140
1141 return (GET_CODE (inner_dest) == REG
1142 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1143 && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1144
1145 default:
1146 break;
1147 }
1148
1149 return 0;
1150}
1151
1031/* LOC is the location within I3 that contains its pattern or the component
1032 of a PARALLEL of the pattern. We validate that it is valid for combining.
1033
1034 One problem is if I3 modifies its output, as opposed to replacing it
1035 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1036 so would produce an insn that is not equivalent to the original insns.
1037
1038 Consider:
1039
1040 (set (reg:DI 101) (reg:DI 100))
1041 (set (subreg:SI (reg:DI 101) 0) <foo>)
1042
1043 This is NOT equivalent to:
1044
1045 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1046 (set (reg:DI 101) (reg:DI 100))])
1047
1048 Not only does this modify 100 (in which case it might still be valid
1049 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1050
1051 We can also run into a problem if I2 sets a register that I1
1052 uses and I1 gets directly substituted into I3 (not via I2). In that
1053 case, we would be getting the wrong value of I2DEST into I3, so we
1054 must reject the combination. This case occurs when I2 and I1 both
1055 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1056 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1057 of a SET must prevent combination from occurring.
1058
1152/* LOC is the location within I3 that contains its pattern or the component
1153 of a PARALLEL of the pattern. We validate that it is valid for combining.
1154
1155 One problem is if I3 modifies its output, as opposed to replacing it
1156 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1157 so would produce an insn that is not equivalent to the original insns.
1158
1159 Consider:
1160
1161 (set (reg:DI 101) (reg:DI 100))
1162 (set (subreg:SI (reg:DI 101) 0) <foo>)
1163
1164 This is NOT equivalent to:
1165
1166 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1167 (set (reg:DI 101) (reg:DI 100))])
1168
1169 Not only does this modify 100 (in which case it might still be valid
1170 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1171
1172 We can also run into a problem if I2 sets a register that I1
1173 uses and I1 gets directly substituted into I3 (not via I2). In that
1174 case, we would be getting the wrong value of I2DEST into I3, so we
1175 must reject the combination. This case occurs when I2 and I1 both
1176 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1177 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1178 of a SET must prevent combination from occurring.
1179
1059 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
1180 On machines where SMALL_REGISTER_CLASSES is non-zero, we don't combine
1060 if the destination of a SET is a hard register that isn't a user
1061 variable.
1062
1063 Before doing the above check, we first try to expand a field assignment
1064 into a set of logical operations.
1065
1066 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1067 we place a register that is both set and used within I3. If more than one
1068 such register is detected, we fail.
1069
1070 Return 1 if the combination is valid, zero otherwise. */
1071
1072static int
1073combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1074 rtx i3;
1075 rtx *loc;
1076 rtx i2dest;
1077 rtx i1dest;
1078 int i1_not_in_src;
1079 rtx *pi3dest_killed;
1080{
1081 rtx x = *loc;
1082
1083 if (GET_CODE (x) == SET)
1084 {
1085 rtx set = expand_field_assignment (x);
1086 rtx dest = SET_DEST (set);
1087 rtx src = SET_SRC (set);
1181 if the destination of a SET is a hard register that isn't a user
1182 variable.
1183
1184 Before doing the above check, we first try to expand a field assignment
1185 into a set of logical operations.
1186
1187 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1188 we place a register that is both set and used within I3. If more than one
1189 such register is detected, we fail.
1190
1191 Return 1 if the combination is valid, zero otherwise. */
1192
1193static int
1194combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1195 rtx i3;
1196 rtx *loc;
1197 rtx i2dest;
1198 rtx i1dest;
1199 int i1_not_in_src;
1200 rtx *pi3dest_killed;
1201{
1202 rtx x = *loc;
1203
1204 if (GET_CODE (x) == SET)
1205 {
1206 rtx set = expand_field_assignment (x);
1207 rtx dest = SET_DEST (set);
1208 rtx src = SET_SRC (set);
1088 rtx inner_dest = dest, inner_src = src;
1209 rtx inner_dest = dest;
1210
1211#if 0
1212 rtx inner_src = src;
1213#endif
1089
1090 SUBST (*loc, set);
1091
1092 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1093 || GET_CODE (inner_dest) == SUBREG
1094 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1095 inner_dest = XEXP (inner_dest, 0);
1096
1097 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1098 was added. */
1099#if 0
1100 while (GET_CODE (inner_src) == STRICT_LOW_PART
1101 || GET_CODE (inner_src) == SUBREG
1102 || GET_CODE (inner_src) == ZERO_EXTRACT)
1103 inner_src = XEXP (inner_src, 0);
1104
1105 /* If it is better that two different modes keep two different pseudos,
1106 avoid combining them. This avoids producing the following pattern
1107 on a 386:
1108 (set (subreg:SI (reg/v:QI 21) 0)
1109 (lshiftrt:SI (reg/v:SI 20)
1110 (const_int 24)))
1111 If that were made, reload could not handle the pair of
1112 reg 20/21, since it would try to get any GENERAL_REGS
1113 but some of them don't handle QImode. */
1114
1115 if (rtx_equal_p (inner_src, i2dest)
1116 && GET_CODE (inner_dest) == REG
1117 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1118 return 0;
1119#endif
1120
1121 /* Check for the case where I3 modifies its output, as
1122 discussed above. */
1123 if ((inner_dest != dest
1124 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1125 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1214
1215 SUBST (*loc, set);
1216
1217 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1218 || GET_CODE (inner_dest) == SUBREG
1219 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1220 inner_dest = XEXP (inner_dest, 0);
1221
1222 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1223 was added. */
1224#if 0
1225 while (GET_CODE (inner_src) == STRICT_LOW_PART
1226 || GET_CODE (inner_src) == SUBREG
1227 || GET_CODE (inner_src) == ZERO_EXTRACT)
1228 inner_src = XEXP (inner_src, 0);
1229
1230 /* If it is better that two different modes keep two different pseudos,
1231 avoid combining them. This avoids producing the following pattern
1232 on a 386:
1233 (set (subreg:SI (reg/v:QI 21) 0)
1234 (lshiftrt:SI (reg/v:SI 20)
1235 (const_int 24)))
1236 If that were made, reload could not handle the pair of
1237 reg 20/21, since it would try to get any GENERAL_REGS
1238 but some of them don't handle QImode. */
1239
1240 if (rtx_equal_p (inner_src, i2dest)
1241 && GET_CODE (inner_dest) == REG
1242 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1243 return 0;
1244#endif
1245
1246 /* Check for the case where I3 modifies its output, as
1247 discussed above. */
1248 if ((inner_dest != dest
1249 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1250 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1251
1126 /* This is the same test done in can_combine_p except that we
1127 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1252 /* This is the same test done in can_combine_p except that we
1253 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1128 CALL operation. */
1254 CALL operation. Moreover, we can't test all_adjacent; we don't
1255 have to, since this instruction will stay in place, thus we are
1256 not considering increasing the lifetime of INNER_DEST.
1257
1258 Also, if this insn sets a function argument, combining it with
1259 something that might need a spill could clobber a previous
1260 function argument; the all_adjacent test in can_combine_p also
1261 checks this; here, we do a more specific test for this case. */
1262
1129 || (GET_CODE (inner_dest) == REG
1130 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1131 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1132 GET_MODE (inner_dest))
1263 || (GET_CODE (inner_dest) == REG
1264 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1265 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1266 GET_MODE (inner_dest))
1133#ifdef SMALL_REGISTER_CLASSES
1134 || (GET_CODE (src) != CALL && ! REG_USERVAR_P (inner_dest))
1135#endif
1136 ))
1267 || (SMALL_REGISTER_CLASSES && GET_CODE (src) != CALL
1268 && ! REG_USERVAR_P (inner_dest)
1269 && (FUNCTION_VALUE_REGNO_P (REGNO (inner_dest))
1270 || (FUNCTION_ARG_REGNO_P (REGNO (inner_dest))
1271 && i3 != 0
1272 && sets_function_arg_p (prev_nonnote_insn (i3)))))))
1137 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1138 return 0;
1139
1140 /* If DEST is used in I3, it is being killed in this insn,
1141 so record that for later.
1142 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1143 STACK_POINTER_REGNUM, since these are always considered to be
1144 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
1145 if (pi3dest_killed && GET_CODE (dest) == REG
1146 && reg_referenced_p (dest, PATTERN (i3))
1147 && REGNO (dest) != FRAME_POINTER_REGNUM
1148#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1149 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1150#endif
1151#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1152 && (REGNO (dest) != ARG_POINTER_REGNUM
1153 || ! fixed_regs [REGNO (dest)])
1154#endif
1155 && REGNO (dest) != STACK_POINTER_REGNUM)
1156 {
1157 if (*pi3dest_killed)
1158 return 0;
1159
1160 *pi3dest_killed = dest;
1161 }
1162 }
1163
1164 else if (GET_CODE (x) == PARALLEL)
1165 {
1166 int i;
1167
1168 for (i = 0; i < XVECLEN (x, 0); i++)
1169 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1170 i1_not_in_src, pi3dest_killed))
1171 return 0;
1172 }
1173
1174 return 1;
1175}
1176
1177/* Try to combine the insns I1 and I2 into I3.
1178 Here I1 and I2 appear earlier than I3.
1179 I1 can be zero; then we combine just I2 into I3.
1180
1181 It we are combining three insns and the resulting insn is not recognized,
1182 try splitting it into two insns. If that happens, I2 and I3 are retained
1183 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1184 are pseudo-deleted.
1185
1186 Return 0 if the combination does not work. Then nothing is changed.
1187 If we did the combination, return the insn at which combine should
1188 resume scanning. */
1189
1190static rtx
1191try_combine (i3, i2, i1)
1192 register rtx i3, i2, i1;
1193{
1194 /* New patterns for I3 and I3, respectively. */
1195 rtx newpat, newi2pat = 0;
1196 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1197 int added_sets_1, added_sets_2;
1198 /* Total number of SETs to put into I3. */
1199 int total_sets;
1200 /* Nonzero is I2's body now appears in I3. */
1201 int i2_is_used;
1202 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1203 int insn_code_number, i2_code_number, other_code_number;
1204 /* Contains I3 if the destination of I3 is used in its source, which means
1205 that the old life of I3 is being killed. If that usage is placed into
1206 I2 and not in I3, a REG_DEAD note must be made. */
1207 rtx i3dest_killed = 0;
1208 /* SET_DEST and SET_SRC of I2 and I1. */
1209 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1210 /* PATTERN (I2), or a copy of it in certain cases. */
1211 rtx i2pat;
1212 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1213 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1214 int i1_feeds_i3 = 0;
1215 /* Notes that must be added to REG_NOTES in I3 and I2. */
1216 rtx new_i3_notes, new_i2_notes;
1217 /* Notes that we substituted I3 into I2 instead of the normal case. */
1218 int i3_subst_into_i2 = 0;
1219 /* Notes that I1, I2 or I3 is a MULT operation. */
1220 int have_mult = 0;
1221 /* Number of clobbers of SCRATCH we had to add. */
1222 int i3_scratches = 0, i2_scratches = 0, other_scratches = 0;
1223
1224 int maxreg;
1225 rtx temp;
1226 register rtx link;
1227 int i;
1228
1229 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1230 This can occur when flow deletes an insn that it has merged into an
1231 auto-increment address. We also can't do anything if I3 has a
1232 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1233 libcall. */
1234
1235 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1236 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1237 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1238 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1239 return 0;
1240
1241 combine_attempts++;
1242
1273 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1274 return 0;
1275
1276 /* If DEST is used in I3, it is being killed in this insn,
1277 so record that for later.
1278 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1279 STACK_POINTER_REGNUM, since these are always considered to be
1280 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
1281 if (pi3dest_killed && GET_CODE (dest) == REG
1282 && reg_referenced_p (dest, PATTERN (i3))
1283 && REGNO (dest) != FRAME_POINTER_REGNUM
1284#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1285 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1286#endif
1287#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1288 && (REGNO (dest) != ARG_POINTER_REGNUM
1289 || ! fixed_regs [REGNO (dest)])
1290#endif
1291 && REGNO (dest) != STACK_POINTER_REGNUM)
1292 {
1293 if (*pi3dest_killed)
1294 return 0;
1295
1296 *pi3dest_killed = dest;
1297 }
1298 }
1299
1300 else if (GET_CODE (x) == PARALLEL)
1301 {
1302 int i;
1303
1304 for (i = 0; i < XVECLEN (x, 0); i++)
1305 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1306 i1_not_in_src, pi3dest_killed))
1307 return 0;
1308 }
1309
1310 return 1;
1311}
1312
1313/* Try to combine the insns I1 and I2 into I3.
1314 Here I1 and I2 appear earlier than I3.
1315 I1 can be zero; then we combine just I2 into I3.
1316
1317 It we are combining three insns and the resulting insn is not recognized,
1318 try splitting it into two insns. If that happens, I2 and I3 are retained
1319 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1320 are pseudo-deleted.
1321
1322 Return 0 if the combination does not work. Then nothing is changed.
1323 If we did the combination, return the insn at which combine should
1324 resume scanning. */
1325
1326static rtx
1327try_combine (i3, i2, i1)
1328 register rtx i3, i2, i1;
1329{
1330 /* New patterns for I3 and I3, respectively. */
1331 rtx newpat, newi2pat = 0;
1332 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1333 int added_sets_1, added_sets_2;
1334 /* Total number of SETs to put into I3. */
1335 int total_sets;
1336 /* Nonzero is I2's body now appears in I3. */
1337 int i2_is_used;
1338 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1339 int insn_code_number, i2_code_number, other_code_number;
1340 /* Contains I3 if the destination of I3 is used in its source, which means
1341 that the old life of I3 is being killed. If that usage is placed into
1342 I2 and not in I3, a REG_DEAD note must be made. */
1343 rtx i3dest_killed = 0;
1344 /* SET_DEST and SET_SRC of I2 and I1. */
1345 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1346 /* PATTERN (I2), or a copy of it in certain cases. */
1347 rtx i2pat;
1348 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1349 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1350 int i1_feeds_i3 = 0;
1351 /* Notes that must be added to REG_NOTES in I3 and I2. */
1352 rtx new_i3_notes, new_i2_notes;
1353 /* Notes that we substituted I3 into I2 instead of the normal case. */
1354 int i3_subst_into_i2 = 0;
1355 /* Notes that I1, I2 or I3 is a MULT operation. */
1356 int have_mult = 0;
1357 /* Number of clobbers of SCRATCH we had to add. */
1358 int i3_scratches = 0, i2_scratches = 0, other_scratches = 0;
1359
1360 int maxreg;
1361 rtx temp;
1362 register rtx link;
1363 int i;
1364
1365 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1366 This can occur when flow deletes an insn that it has merged into an
1367 auto-increment address. We also can't do anything if I3 has a
1368 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1369 libcall. */
1370
1371 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1372 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1373 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1374 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1375 return 0;
1376
1377 combine_attempts++;
1378
1243 undobuf.num_undo = previous_num_undos = 0;
1379 undobuf.undos = undobuf.previous_undos = 0;
1244 undobuf.other_insn = 0;
1245
1246 /* Save the current high-water-mark so we can free storage if we didn't
1247 accept this combination. */
1248 undobuf.storage = (char *) oballoc (0);
1249
1250 /* Reset the hard register usage information. */
1251 CLEAR_HARD_REG_SET (newpat_used_regs);
1252
1253 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1254 code below, set I1 to be the earlier of the two insns. */
1255 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1256 temp = i1, i1 = i2, i2 = temp;
1257
1258 added_links_insn = 0;
1259
1260 /* First check for one important special-case that the code below will
1261 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1262 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1263 we may be able to replace that destination with the destination of I3.
1264 This occurs in the common code where we compute both a quotient and
1265 remainder into a structure, in which case we want to do the computation
1266 directly into the structure to avoid register-register copies.
1267
1268 We make very conservative checks below and only try to handle the
1269 most common cases of this. For example, we only handle the case
1270 where I2 and I3 are adjacent to avoid making difficult register
1271 usage tests. */
1272
1273 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1274 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1275 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1380 undobuf.other_insn = 0;
1381
1382 /* Save the current high-water-mark so we can free storage if we didn't
1383 accept this combination. */
1384 undobuf.storage = (char *) oballoc (0);
1385
1386 /* Reset the hard register usage information. */
1387 CLEAR_HARD_REG_SET (newpat_used_regs);
1388
1389 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1390 code below, set I1 to be the earlier of the two insns. */
1391 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1392 temp = i1, i1 = i2, i2 = temp;
1393
1394 added_links_insn = 0;
1395
1396 /* First check for one important special-case that the code below will
1397 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1398 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1399 we may be able to replace that destination with the destination of I3.
1400 This occurs in the common code where we compute both a quotient and
1401 remainder into a structure, in which case we want to do the computation
1402 directly into the structure to avoid register-register copies.
1403
1404 We make very conservative checks below and only try to handle the
1405 most common cases of this. For example, we only handle the case
1406 where I2 and I3 are adjacent to avoid making difficult register
1407 usage tests. */
1408
1409 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1410 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1411 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1276#ifdef SMALL_REGISTER_CLASSES
1277 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1278 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1279 || REG_USERVAR_P (SET_DEST (PATTERN (i3))))
1280#endif
1412 && (! SMALL_REGISTER_CLASSES
1413 || (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1414 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1415 || REG_USERVAR_P (SET_DEST (PATTERN (i3)))))
1281 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1282 && GET_CODE (PATTERN (i2)) == PARALLEL
1283 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1284 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1285 below would need to check what is inside (and reg_overlap_mentioned_p
1286 doesn't support those codes anyway). Don't allow those destinations;
1287 the resulting insn isn't likely to be recognized anyway. */
1288 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1289 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1290 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1291 SET_DEST (PATTERN (i3)))
1292 && next_real_insn (i2) == i3)
1293 {
1294 rtx p2 = PATTERN (i2);
1295
1296 /* Make sure that the destination of I3,
1297 which we are going to substitute into one output of I2,
1298 is not used within another output of I2. We must avoid making this:
1299 (parallel [(set (mem (reg 69)) ...)
1300 (set (reg 69) ...)])
1301 which is not well-defined as to order of actions.
1302 (Besides, reload can't handle output reloads for this.)
1303
1304 The problem can also happen if the dest of I3 is a memory ref,
1305 if another dest in I2 is an indirect memory ref. */
1306 for (i = 0; i < XVECLEN (p2, 0); i++)
1416 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1417 && GET_CODE (PATTERN (i2)) == PARALLEL
1418 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1419 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1420 below would need to check what is inside (and reg_overlap_mentioned_p
1421 doesn't support those codes anyway). Don't allow those destinations;
1422 the resulting insn isn't likely to be recognized anyway. */
1423 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1424 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1425 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1426 SET_DEST (PATTERN (i3)))
1427 && next_real_insn (i2) == i3)
1428 {
1429 rtx p2 = PATTERN (i2);
1430
1431 /* Make sure that the destination of I3,
1432 which we are going to substitute into one output of I2,
1433 is not used within another output of I2. We must avoid making this:
1434 (parallel [(set (mem (reg 69)) ...)
1435 (set (reg 69) ...)])
1436 which is not well-defined as to order of actions.
1437 (Besides, reload can't handle output reloads for this.)
1438
1439 The problem can also happen if the dest of I3 is a memory ref,
1440 if another dest in I2 is an indirect memory ref. */
1441 for (i = 0; i < XVECLEN (p2, 0); i++)
1307 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1442 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1443 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1308 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1309 SET_DEST (XVECEXP (p2, 0, i))))
1310 break;
1311
1312 if (i == XVECLEN (p2, 0))
1313 for (i = 0; i < XVECLEN (p2, 0); i++)
1314 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1315 {
1316 combine_merges++;
1317
1318 subst_insn = i3;
1319 subst_low_cuid = INSN_CUID (i2);
1320
1321 added_sets_2 = added_sets_1 = 0;
1322 i2dest = SET_SRC (PATTERN (i3));
1323
1324 /* Replace the dest in I2 with our dest and make the resulting
1325 insn the new pattern for I3. Then skip to where we
1326 validate the pattern. Everything was set up above. */
1327 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1328 SET_DEST (PATTERN (i3)));
1329
1330 newpat = p2;
1331 i3_subst_into_i2 = 1;
1332 goto validate_replacement;
1333 }
1334 }
1335
1336#ifndef HAVE_cc0
1337 /* If we have no I1 and I2 looks like:
1338 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1339 (set Y OP)])
1340 make up a dummy I1 that is
1341 (set Y OP)
1342 and change I2 to be
1343 (set (reg:CC X) (compare:CC Y (const_int 0)))
1344
1345 (We can ignore any trailing CLOBBERs.)
1346
1347 This undoes a previous combination and allows us to match a branch-and-
1348 decrement insn. */
1349
1350 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1351 && XVECLEN (PATTERN (i2), 0) >= 2
1352 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1353 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1354 == MODE_CC)
1355 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1356 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1357 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1358 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1359 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1360 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1361 {
1362 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1363 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1364 break;
1365
1366 if (i == 1)
1367 {
1368 /* We make I1 with the same INSN_UID as I2. This gives it
1369 the same INSN_CUID for value tracking. Our fake I1 will
1370 never appear in the insn stream so giving it the same INSN_UID
1371 as I2 will not cause a problem. */
1372
1373 subst_prev_insn = i1
1444 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1445 SET_DEST (XVECEXP (p2, 0, i))))
1446 break;
1447
1448 if (i == XVECLEN (p2, 0))
1449 for (i = 0; i < XVECLEN (p2, 0); i++)
1450 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1451 {
1452 combine_merges++;
1453
1454 subst_insn = i3;
1455 subst_low_cuid = INSN_CUID (i2);
1456
1457 added_sets_2 = added_sets_1 = 0;
1458 i2dest = SET_SRC (PATTERN (i3));
1459
1460 /* Replace the dest in I2 with our dest and make the resulting
1461 insn the new pattern for I3. Then skip to where we
1462 validate the pattern. Everything was set up above. */
1463 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1464 SET_DEST (PATTERN (i3)));
1465
1466 newpat = p2;
1467 i3_subst_into_i2 = 1;
1468 goto validate_replacement;
1469 }
1470 }
1471
1472#ifndef HAVE_cc0
1473 /* If we have no I1 and I2 looks like:
1474 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1475 (set Y OP)])
1476 make up a dummy I1 that is
1477 (set Y OP)
1478 and change I2 to be
1479 (set (reg:CC X) (compare:CC Y (const_int 0)))
1480
1481 (We can ignore any trailing CLOBBERs.)
1482
1483 This undoes a previous combination and allows us to match a branch-and-
1484 decrement insn. */
1485
1486 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1487 && XVECLEN (PATTERN (i2), 0) >= 2
1488 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1489 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1490 == MODE_CC)
1491 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1492 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1493 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1494 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1495 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1496 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1497 {
1498 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1499 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1500 break;
1501
1502 if (i == 1)
1503 {
1504 /* We make I1 with the same INSN_UID as I2. This gives it
1505 the same INSN_CUID for value tracking. Our fake I1 will
1506 never appear in the insn stream so giving it the same INSN_UID
1507 as I2 will not cause a problem. */
1508
1509 subst_prev_insn = i1
1374 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1375 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1510 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1511 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1512 NULL_RTX);
1376
1377 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1378 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1379 SET_DEST (PATTERN (i1)));
1380 }
1381 }
1382#endif
1383
1384 /* Verify that I2 and I1 are valid for combining. */
1385 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1386 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1387 {
1388 undo_all ();
1389 return 0;
1390 }
1391
1392 /* Record whether I2DEST is used in I2SRC and similarly for the other
1393 cases. Knowing this will help in register status updating below. */
1394 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1395 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1396 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1397
1398 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1399 in I2SRC. */
1400 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1401
1402 /* Ensure that I3's pattern can be the destination of combines. */
1403 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1404 i1 && i2dest_in_i1src && i1_feeds_i3,
1405 &i3dest_killed))
1406 {
1407 undo_all ();
1408 return 0;
1409 }
1410
1411 /* See if any of the insns is a MULT operation. Unless one is, we will
1412 reject a combination that is, since it must be slower. Be conservative
1413 here. */
1414 if (GET_CODE (i2src) == MULT
1415 || (i1 != 0 && GET_CODE (i1src) == MULT)
1416 || (GET_CODE (PATTERN (i3)) == SET
1417 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1418 have_mult = 1;
1419
1420 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1421 We used to do this EXCEPT in one case: I3 has a post-inc in an
1422 output operand. However, that exception can give rise to insns like
1423 mov r3,(r3)+
1424 which is a famous insn on the PDP-11 where the value of r3 used as the
1425 source was model-dependent. Avoid this sort of thing. */
1426
1427#if 0
1428 if (!(GET_CODE (PATTERN (i3)) == SET
1429 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1430 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1431 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1432 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1433 /* It's not the exception. */
1434#endif
1435#ifdef AUTO_INC_DEC
1436 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1437 if (REG_NOTE_KIND (link) == REG_INC
1438 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1439 || (i1 != 0
1440 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1441 {
1442 undo_all ();
1443 return 0;
1444 }
1445#endif
1446
1447 /* See if the SETs in I1 or I2 need to be kept around in the merged
1448 instruction: whenever the value set there is still needed past I3.
1449 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1450
1451 For the SET in I1, we have two cases: If I1 and I2 independently
1452 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1453 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1454 in I1 needs to be kept around unless I1DEST dies or is set in either
1455 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1456 I1DEST. If so, we know I1 feeds into I2. */
1457
1458 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1459
1460 added_sets_1
1461 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1462 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1463
1464 /* If the set in I2 needs to be kept around, we must make a copy of
1465 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1466 PATTERN (I2), we are only substituting for the original I1DEST, not into
1467 an already-substituted copy. This also prevents making self-referential
1468 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1469 I2DEST. */
1470
1471 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1513
1514 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1515 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1516 SET_DEST (PATTERN (i1)));
1517 }
1518 }
1519#endif
1520
1521 /* Verify that I2 and I1 are valid for combining. */
1522 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1523 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1524 {
1525 undo_all ();
1526 return 0;
1527 }
1528
1529 /* Record whether I2DEST is used in I2SRC and similarly for the other
1530 cases. Knowing this will help in register status updating below. */
1531 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1532 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1533 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1534
1535 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1536 in I2SRC. */
1537 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1538
1539 /* Ensure that I3's pattern can be the destination of combines. */
1540 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1541 i1 && i2dest_in_i1src && i1_feeds_i3,
1542 &i3dest_killed))
1543 {
1544 undo_all ();
1545 return 0;
1546 }
1547
1548 /* See if any of the insns is a MULT operation. Unless one is, we will
1549 reject a combination that is, since it must be slower. Be conservative
1550 here. */
1551 if (GET_CODE (i2src) == MULT
1552 || (i1 != 0 && GET_CODE (i1src) == MULT)
1553 || (GET_CODE (PATTERN (i3)) == SET
1554 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1555 have_mult = 1;
1556
1557 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1558 We used to do this EXCEPT in one case: I3 has a post-inc in an
1559 output operand. However, that exception can give rise to insns like
1560 mov r3,(r3)+
1561 which is a famous insn on the PDP-11 where the value of r3 used as the
1562 source was model-dependent. Avoid this sort of thing. */
1563
1564#if 0
1565 if (!(GET_CODE (PATTERN (i3)) == SET
1566 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1567 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1568 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1569 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1570 /* It's not the exception. */
1571#endif
1572#ifdef AUTO_INC_DEC
1573 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1574 if (REG_NOTE_KIND (link) == REG_INC
1575 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1576 || (i1 != 0
1577 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1578 {
1579 undo_all ();
1580 return 0;
1581 }
1582#endif
1583
1584 /* See if the SETs in I1 or I2 need to be kept around in the merged
1585 instruction: whenever the value set there is still needed past I3.
1586 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1587
1588 For the SET in I1, we have two cases: If I1 and I2 independently
1589 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1590 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1591 in I1 needs to be kept around unless I1DEST dies or is set in either
1592 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1593 I1DEST. If so, we know I1 feeds into I2. */
1594
1595 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1596
1597 added_sets_1
1598 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1599 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1600
1601 /* If the set in I2 needs to be kept around, we must make a copy of
1602 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1603 PATTERN (I2), we are only substituting for the original I1DEST, not into
1604 an already-substituted copy. This also prevents making self-referential
1605 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1606 I2DEST. */
1607
1608 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1472 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1609 ? gen_rtx_SET (VOIDmode, i2dest, i2src)
1473 : PATTERN (i2));
1474
1475 if (added_sets_2)
1476 i2pat = copy_rtx (i2pat);
1477
1478 combine_merges++;
1479
1480 /* Substitute in the latest insn for the regs set by the earlier ones. */
1481
1482 maxreg = max_reg_num ();
1483
1484 subst_insn = i3;
1485
1486 /* It is possible that the source of I2 or I1 may be performing an
1487 unneeded operation, such as a ZERO_EXTEND of something that is known
1488 to have the high part zero. Handle that case by letting subst look at
1489 the innermost one of them.
1490
1491 Another way to do this would be to have a function that tries to
1492 simplify a single insn instead of merging two or more insns. We don't
1493 do this because of the potential of infinite loops and because
1494 of the potential extra memory required. However, doing it the way
1495 we are is a bit of a kludge and doesn't catch all cases.
1496
1497 But only do this if -fexpensive-optimizations since it slows things down
1498 and doesn't usually win. */
1499
1500 if (flag_expensive_optimizations)
1501 {
1502 /* Pass pc_rtx so no substitutions are done, just simplifications.
1503 The cases that we are interested in here do not involve the few
1504 cases were is_replaced is checked. */
1505 if (i1)
1506 {
1507 subst_low_cuid = INSN_CUID (i1);
1508 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1509 }
1510 else
1511 {
1512 subst_low_cuid = INSN_CUID (i2);
1513 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1514 }
1515
1610 : PATTERN (i2));
1611
1612 if (added_sets_2)
1613 i2pat = copy_rtx (i2pat);
1614
1615 combine_merges++;
1616
1617 /* Substitute in the latest insn for the regs set by the earlier ones. */
1618
1619 maxreg = max_reg_num ();
1620
1621 subst_insn = i3;
1622
1623 /* It is possible that the source of I2 or I1 may be performing an
1624 unneeded operation, such as a ZERO_EXTEND of something that is known
1625 to have the high part zero. Handle that case by letting subst look at
1626 the innermost one of them.
1627
1628 Another way to do this would be to have a function that tries to
1629 simplify a single insn instead of merging two or more insns. We don't
1630 do this because of the potential of infinite loops and because
1631 of the potential extra memory required. However, doing it the way
1632 we are is a bit of a kludge and doesn't catch all cases.
1633
1634 But only do this if -fexpensive-optimizations since it slows things down
1635 and doesn't usually win. */
1636
1637 if (flag_expensive_optimizations)
1638 {
1639 /* Pass pc_rtx so no substitutions are done, just simplifications.
1640 The cases that we are interested in here do not involve the few
1641 cases were is_replaced is checked. */
1642 if (i1)
1643 {
1644 subst_low_cuid = INSN_CUID (i1);
1645 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1646 }
1647 else
1648 {
1649 subst_low_cuid = INSN_CUID (i2);
1650 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1651 }
1652
1516 previous_num_undos = undobuf.num_undo;
1653 undobuf.previous_undos = undobuf.undos;
1517 }
1518
1519#ifndef HAVE_cc0
1520 /* Many machines that don't use CC0 have insns that can both perform an
1521 arithmetic operation and set the condition code. These operations will
1522 be represented as a PARALLEL with the first element of the vector
1523 being a COMPARE of an arithmetic operation with the constant zero.
1524 The second element of the vector will set some pseudo to the result
1525 of the same arithmetic operation. If we simplify the COMPARE, we won't
1526 match such a pattern and so will generate an extra insn. Here we test
1527 for this case, where both the comparison and the operation result are
1528 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1529 I2SRC. Later we will make the PARALLEL that contains I2. */
1530
1531 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1532 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1533 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1534 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1535 {
1654 }
1655
1656#ifndef HAVE_cc0
1657 /* Many machines that don't use CC0 have insns that can both perform an
1658 arithmetic operation and set the condition code. These operations will
1659 be represented as a PARALLEL with the first element of the vector
1660 being a COMPARE of an arithmetic operation with the constant zero.
1661 The second element of the vector will set some pseudo to the result
1662 of the same arithmetic operation. If we simplify the COMPARE, we won't
1663 match such a pattern and so will generate an extra insn. Here we test
1664 for this case, where both the comparison and the operation result are
1665 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1666 I2SRC. Later we will make the PARALLEL that contains I2. */
1667
1668 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1669 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1670 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1671 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1672 {
1673#ifdef EXTRA_CC_MODES
1536 rtx *cc_use;
1537 enum machine_mode compare_mode;
1674 rtx *cc_use;
1675 enum machine_mode compare_mode;
1676#endif
1538
1539 newpat = PATTERN (i3);
1540 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1541
1542 i2_is_used = 1;
1543
1544#ifdef EXTRA_CC_MODES
1545 /* See if a COMPARE with the operand we substituted in should be done
1546 with the mode that is currently being used. If not, do the same
1547 processing we do in `subst' for a SET; namely, if the destination
1548 is used only once, try to replace it with a register of the proper
1549 mode and also replace the COMPARE. */
1550 if (undobuf.other_insn == 0
1551 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1552 &undobuf.other_insn))
1553 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1554 i2src, const0_rtx))
1555 != GET_MODE (SET_DEST (newpat))))
1556 {
1557 int regno = REGNO (SET_DEST (newpat));
1677
1678 newpat = PATTERN (i3);
1679 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1680
1681 i2_is_used = 1;
1682
1683#ifdef EXTRA_CC_MODES
1684 /* See if a COMPARE with the operand we substituted in should be done
1685 with the mode that is currently being used. If not, do the same
1686 processing we do in `subst' for a SET; namely, if the destination
1687 is used only once, try to replace it with a register of the proper
1688 mode and also replace the COMPARE. */
1689 if (undobuf.other_insn == 0
1690 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1691 &undobuf.other_insn))
1692 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1693 i2src, const0_rtx))
1694 != GET_MODE (SET_DEST (newpat))))
1695 {
1696 int regno = REGNO (SET_DEST (newpat));
1558 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1697 rtx new_dest = gen_rtx_REG (compare_mode, regno);
1559
1560 if (regno < FIRST_PSEUDO_REGISTER
1698
1699 if (regno < FIRST_PSEUDO_REGISTER
1561 || (reg_n_sets[regno] == 1 && ! added_sets_2
1700 || (REG_N_SETS (regno) == 1 && ! added_sets_2
1562 && ! REG_USERVAR_P (SET_DEST (newpat))))
1563 {
1564 if (regno >= FIRST_PSEUDO_REGISTER)
1565 SUBST (regno_reg_rtx[regno], new_dest);
1566
1567 SUBST (SET_DEST (newpat), new_dest);
1568 SUBST (XEXP (*cc_use, 0), new_dest);
1569 SUBST (SET_SRC (newpat),
1570 gen_rtx_combine (COMPARE, compare_mode,
1571 i2src, const0_rtx));
1572 }
1573 else
1574 undobuf.other_insn = 0;
1575 }
1576#endif
1577 }
1578 else
1579#endif
1580 {
1581 n_occurrences = 0; /* `subst' counts here */
1582
1583 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1584 need to make a unique copy of I2SRC each time we substitute it
1585 to avoid self-referential rtl. */
1586
1587 subst_low_cuid = INSN_CUID (i2);
1588 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1589 ! i1_feeds_i3 && i1dest_in_i1src);
1701 && ! REG_USERVAR_P (SET_DEST (newpat))))
1702 {
1703 if (regno >= FIRST_PSEUDO_REGISTER)
1704 SUBST (regno_reg_rtx[regno], new_dest);
1705
1706 SUBST (SET_DEST (newpat), new_dest);
1707 SUBST (XEXP (*cc_use, 0), new_dest);
1708 SUBST (SET_SRC (newpat),
1709 gen_rtx_combine (COMPARE, compare_mode,
1710 i2src, const0_rtx));
1711 }
1712 else
1713 undobuf.other_insn = 0;
1714 }
1715#endif
1716 }
1717 else
1718#endif
1719 {
1720 n_occurrences = 0; /* `subst' counts here */
1721
1722 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1723 need to make a unique copy of I2SRC each time we substitute it
1724 to avoid self-referential rtl. */
1725
1726 subst_low_cuid = INSN_CUID (i2);
1727 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1728 ! i1_feeds_i3 && i1dest_in_i1src);
1590 previous_num_undos = undobuf.num_undo;
1729 undobuf.previous_undos = undobuf.undos;
1591
1592 /* Record whether i2's body now appears within i3's body. */
1593 i2_is_used = n_occurrences;
1594 }
1595
1596 /* If we already got a failure, don't try to do more. Otherwise,
1597 try to substitute in I1 if we have it. */
1598
1599 if (i1 && GET_CODE (newpat) != CLOBBER)
1600 {
1601 /* Before we can do this substitution, we must redo the test done
1602 above (see detailed comments there) that ensures that I1DEST
1730
1731 /* Record whether i2's body now appears within i3's body. */
1732 i2_is_used = n_occurrences;
1733 }
1734
1735 /* If we already got a failure, don't try to do more. Otherwise,
1736 try to substitute in I1 if we have it. */
1737
1738 if (i1 && GET_CODE (newpat) != CLOBBER)
1739 {
1740 /* Before we can do this substitution, we must redo the test done
1741 above (see detailed comments there) that ensures that I1DEST
1603 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1742 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1604
1605 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1606 0, NULL_PTR))
1607 {
1608 undo_all ();
1609 return 0;
1610 }
1611
1612 n_occurrences = 0;
1613 subst_low_cuid = INSN_CUID (i1);
1614 newpat = subst (newpat, i1dest, i1src, 0, 0);
1743
1744 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1745 0, NULL_PTR))
1746 {
1747 undo_all ();
1748 return 0;
1749 }
1750
1751 n_occurrences = 0;
1752 subst_low_cuid = INSN_CUID (i1);
1753 newpat = subst (newpat, i1dest, i1src, 0, 0);
1615 previous_num_undos = undobuf.num_undo;
1754 undobuf.previous_undos = undobuf.undos;
1616 }
1617
1618 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1619 to count all the ways that I2SRC and I1SRC can be used. */
1620 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1621 && i2_is_used + added_sets_2 > 1)
1622 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1623 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1624 > 1))
1625 /* Fail if we tried to make a new register (we used to abort, but there's
1626 really no reason to). */
1627 || max_reg_num () != maxreg
1628 /* Fail if we couldn't do something and have a CLOBBER. */
1629 || GET_CODE (newpat) == CLOBBER
1630 /* Fail if this new pattern is a MULT and we didn't have one before
1631 at the outer level. */
1632 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1633 && ! have_mult))
1634 {
1635 undo_all ();
1636 return 0;
1637 }
1638
1639 /* If the actions of the earlier insns must be kept
1640 in addition to substituting them into the latest one,
1641 we must make a new PARALLEL for the latest insn
1642 to hold additional the SETs. */
1643
1644 if (added_sets_1 || added_sets_2)
1645 {
1646 combine_extras++;
1647
1648 if (GET_CODE (newpat) == PARALLEL)
1649 {
1650 rtvec old = XVEC (newpat, 0);
1651 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1755 }
1756
1757 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1758 to count all the ways that I2SRC and I1SRC can be used. */
1759 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1760 && i2_is_used + added_sets_2 > 1)
1761 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1762 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1763 > 1))
1764 /* Fail if we tried to make a new register (we used to abort, but there's
1765 really no reason to). */
1766 || max_reg_num () != maxreg
1767 /* Fail if we couldn't do something and have a CLOBBER. */
1768 || GET_CODE (newpat) == CLOBBER
1769 /* Fail if this new pattern is a MULT and we didn't have one before
1770 at the outer level. */
1771 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1772 && ! have_mult))
1773 {
1774 undo_all ();
1775 return 0;
1776 }
1777
1778 /* If the actions of the earlier insns must be kept
1779 in addition to substituting them into the latest one,
1780 we must make a new PARALLEL for the latest insn
1781 to hold additional the SETs. */
1782
1783 if (added_sets_1 || added_sets_2)
1784 {
1785 combine_extras++;
1786
1787 if (GET_CODE (newpat) == PARALLEL)
1788 {
1789 rtvec old = XVEC (newpat, 0);
1790 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1652 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1653 bcopy ((char *) &old->elem[0], (char *) &XVECEXP (newpat, 0, 0),
1791 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
1792 bcopy ((char *) &old->elem[0], (char *) XVEC (newpat, 0)->elem,
1654 sizeof (old->elem[0]) * old->num_elem);
1655 }
1656 else
1657 {
1658 rtx old = newpat;
1659 total_sets = 1 + added_sets_1 + added_sets_2;
1793 sizeof (old->elem[0]) * old->num_elem);
1794 }
1795 else
1796 {
1797 rtx old = newpat;
1798 total_sets = 1 + added_sets_1 + added_sets_2;
1660 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1799 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
1661 XVECEXP (newpat, 0, 0) = old;
1662 }
1663
1664 if (added_sets_1)
1665 XVECEXP (newpat, 0, --total_sets)
1666 = (GET_CODE (PATTERN (i1)) == PARALLEL
1800 XVECEXP (newpat, 0, 0) = old;
1801 }
1802
1803 if (added_sets_1)
1804 XVECEXP (newpat, 0, --total_sets)
1805 = (GET_CODE (PATTERN (i1)) == PARALLEL
1667 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1806 ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
1668
1669 if (added_sets_2)
1670 {
1671 /* If there is no I1, use I2's body as is. We used to also not do
1672 the subst call below if I2 was substituted into I3,
1673 but that could lose a simplification. */
1674 if (i1 == 0)
1675 XVECEXP (newpat, 0, --total_sets) = i2pat;
1676 else
1677 /* See comment where i2pat is assigned. */
1678 XVECEXP (newpat, 0, --total_sets)
1679 = subst (i2pat, i1dest, i1src, 0, 0);
1680 }
1681 }
1682
1683 /* We come here when we are replacing a destination in I2 with the
1684 destination of I3. */
1685 validate_replacement:
1686
1687 /* Note which hard regs this insn has as inputs. */
1688 mark_used_regs_combine (newpat);
1689
1690 /* Is the result of combination a valid instruction? */
1691 insn_code_number
1692 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1693
1694 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1695 the second SET's destination is a register that is unused. In that case,
1696 we just need the first SET. This can occur when simplifying a divmod
1697 insn. We *must* test for this case here because the code below that
1698 splits two independent SETs doesn't handle this case correctly when it
1699 updates the register status. Also check the case where the first
1700 SET's destination is unused. That would not cause incorrect code, but
1701 does cause an unneeded insn to remain. */
1702
1703 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1704 && XVECLEN (newpat, 0) == 2
1705 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1706 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1707 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1708 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1709 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1710 && asm_noperands (newpat) < 0)
1711 {
1712 newpat = XVECEXP (newpat, 0, 0);
1713 insn_code_number
1714 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1715 }
1716
1717 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1718 && XVECLEN (newpat, 0) == 2
1719 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1720 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1721 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1722 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1723 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1724 && asm_noperands (newpat) < 0)
1725 {
1726 newpat = XVECEXP (newpat, 0, 1);
1727 insn_code_number
1728 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1729 }
1730
1731 /* If we were combining three insns and the result is a simple SET
1732 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1733 insns. There are two ways to do this. It can be split using a
1734 machine-specific method (like when you have an addition of a large
1735 constant) or by combine in the function find_split_point. */
1736
1737 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1738 && asm_noperands (newpat) < 0)
1739 {
1740 rtx m_split, *split;
1741 rtx ni2dest = i2dest;
1742
1743 /* See if the MD file can split NEWPAT. If it can't, see if letting it
1744 use I2DEST as a scratch register will help. In the latter case,
1745 convert I2DEST to the mode of the source of NEWPAT if we can. */
1746
1747 m_split = split_insns (newpat, i3);
1748
1749 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1750 inputs of NEWPAT. */
1751
1752 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1753 possible to try that as a scratch reg. This would require adding
1754 more code to make it work though. */
1755
1756 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
1757 {
1758 /* If I2DEST is a hard register or the only use of a pseudo,
1759 we can change its mode. */
1760 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1761 && GET_MODE (SET_DEST (newpat)) != VOIDmode
1762 && GET_CODE (i2dest) == REG
1763 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1807
1808 if (added_sets_2)
1809 {
1810 /* If there is no I1, use I2's body as is. We used to also not do
1811 the subst call below if I2 was substituted into I3,
1812 but that could lose a simplification. */
1813 if (i1 == 0)
1814 XVECEXP (newpat, 0, --total_sets) = i2pat;
1815 else
1816 /* See comment where i2pat is assigned. */
1817 XVECEXP (newpat, 0, --total_sets)
1818 = subst (i2pat, i1dest, i1src, 0, 0);
1819 }
1820 }
1821
1822 /* We come here when we are replacing a destination in I2 with the
1823 destination of I3. */
1824 validate_replacement:
1825
1826 /* Note which hard regs this insn has as inputs. */
1827 mark_used_regs_combine (newpat);
1828
1829 /* Is the result of combination a valid instruction? */
1830 insn_code_number
1831 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1832
1833 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1834 the second SET's destination is a register that is unused. In that case,
1835 we just need the first SET. This can occur when simplifying a divmod
1836 insn. We *must* test for this case here because the code below that
1837 splits two independent SETs doesn't handle this case correctly when it
1838 updates the register status. Also check the case where the first
1839 SET's destination is unused. That would not cause incorrect code, but
1840 does cause an unneeded insn to remain. */
1841
1842 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1843 && XVECLEN (newpat, 0) == 2
1844 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1845 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1846 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1847 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1848 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1849 && asm_noperands (newpat) < 0)
1850 {
1851 newpat = XVECEXP (newpat, 0, 0);
1852 insn_code_number
1853 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1854 }
1855
1856 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1857 && XVECLEN (newpat, 0) == 2
1858 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1859 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1860 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1861 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1862 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1863 && asm_noperands (newpat) < 0)
1864 {
1865 newpat = XVECEXP (newpat, 0, 1);
1866 insn_code_number
1867 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1868 }
1869
1870 /* If we were combining three insns and the result is a simple SET
1871 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1872 insns. There are two ways to do this. It can be split using a
1873 machine-specific method (like when you have an addition of a large
1874 constant) or by combine in the function find_split_point. */
1875
1876 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1877 && asm_noperands (newpat) < 0)
1878 {
1879 rtx m_split, *split;
1880 rtx ni2dest = i2dest;
1881
1882 /* See if the MD file can split NEWPAT. If it can't, see if letting it
1883 use I2DEST as a scratch register will help. In the latter case,
1884 convert I2DEST to the mode of the source of NEWPAT if we can. */
1885
1886 m_split = split_insns (newpat, i3);
1887
1888 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1889 inputs of NEWPAT. */
1890
1891 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1892 possible to try that as a scratch reg. This would require adding
1893 more code to make it work though. */
1894
1895 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
1896 {
1897 /* If I2DEST is a hard register or the only use of a pseudo,
1898 we can change its mode. */
1899 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1900 && GET_MODE (SET_DEST (newpat)) != VOIDmode
1901 && GET_CODE (i2dest) == REG
1902 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1764 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1903 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
1765 && ! REG_USERVAR_P (i2dest))))
1904 && ! REG_USERVAR_P (i2dest))))
1766 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1905 ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
1767 REGNO (i2dest));
1768
1906 REGNO (i2dest));
1907
1769 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1770 gen_rtvec (2, newpat,
1771 gen_rtx (CLOBBER,
1772 VOIDmode,
1773 ni2dest))),
1774 i3);
1908 m_split = split_insns
1909 (gen_rtx_PARALLEL (VOIDmode,
1910 gen_rtvec (2, newpat,
1911 gen_rtx_CLOBBER (VOIDmode,
1912 ni2dest))),
1913 i3);
1775 }
1776
1777 if (m_split && GET_CODE (m_split) == SEQUENCE
1778 && XVECLEN (m_split, 0) == 2
1779 && (next_real_insn (i2) == i3
1780 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1781 INSN_CUID (i2))))
1782 {
1783 rtx i2set, i3set;
1784 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
1785 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1786
1787 i3set = single_set (XVECEXP (m_split, 0, 1));
1788 i2set = single_set (XVECEXP (m_split, 0, 0));
1789
1790 /* In case we changed the mode of I2DEST, replace it in the
1791 pseudo-register table here. We can't do it above in case this
1792 code doesn't get executed and we do a split the other way. */
1793
1794 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1795 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1796
1797 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes,
1798 &i2_scratches);
1799
1800 /* If I2 or I3 has multiple SETs, we won't know how to track
1914 }
1915
1916 if (m_split && GET_CODE (m_split) == SEQUENCE
1917 && XVECLEN (m_split, 0) == 2
1918 && (next_real_insn (i2) == i3
1919 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1920 INSN_CUID (i2))))
1921 {
1922 rtx i2set, i3set;
1923 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
1924 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1925
1926 i3set = single_set (XVECEXP (m_split, 0, 1));
1927 i2set = single_set (XVECEXP (m_split, 0, 0));
1928
1929 /* In case we changed the mode of I2DEST, replace it in the
1930 pseudo-register table here. We can't do it above in case this
1931 code doesn't get executed and we do a split the other way. */
1932
1933 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1934 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1935
1936 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes,
1937 &i2_scratches);
1938
1939 /* If I2 or I3 has multiple SETs, we won't know how to track
1801 register status, so don't use these insns. */
1940 register status, so don't use these insns. If I2's destination
1941 is used between I2 and I3, we also can't use these insns. */
1802
1942
1803 if (i2_code_number >= 0 && i2set && i3set)
1943 if (i2_code_number >= 0 && i2set && i3set
1944 && (next_real_insn (i2) == i3
1945 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
1804 insn_code_number = recog_for_combine (&newi3pat, i3, &new_i3_notes,
1805 &i3_scratches);
1806 if (insn_code_number >= 0)
1807 newpat = newi3pat;
1808
1809 /* It is possible that both insns now set the destination of I3.
1810 If so, we must show an extra use of it. */
1811
1946 insn_code_number = recog_for_combine (&newi3pat, i3, &new_i3_notes,
1947 &i3_scratches);
1948 if (insn_code_number >= 0)
1949 newpat = newi3pat;
1950
1951 /* It is possible that both insns now set the destination of I3.
1952 If so, we must show an extra use of it. */
1953
1812 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1813 && GET_CODE (SET_DEST (i2set)) == REG
1814 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
1815 reg_n_sets[REGNO (SET_DEST (i2set))]++;
1954 if (insn_code_number >= 0)
1955 {
1956 rtx new_i3_dest = SET_DEST (i3set);
1957 rtx new_i2_dest = SET_DEST (i2set);
1958
1959 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
1960 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
1961 || GET_CODE (new_i3_dest) == SUBREG)
1962 new_i3_dest = XEXP (new_i3_dest, 0);
1963
1964 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
1965 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
1966 || GET_CODE (new_i2_dest) == SUBREG)
1967 new_i2_dest = XEXP (new_i2_dest, 0);
1968
1969 if (GET_CODE (new_i3_dest) == REG
1970 && GET_CODE (new_i2_dest) == REG
1971 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
1972 REG_N_SETS (REGNO (new_i2_dest))++;
1973 }
1816 }
1817
1818 /* If we can split it and use I2DEST, go ahead and see if that
1819 helps things be recognized. Verify that none of the registers
1820 are set between I2 and I3. */
1821 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
1822#ifdef HAVE_cc0
1823 && GET_CODE (i2dest) == REG
1824#endif
1825 /* We need I2DEST in the proper mode. If it is a hard register
1826 or the only use of a pseudo, we can change its mode. */
1827 && (GET_MODE (*split) == GET_MODE (i2dest)
1828 || GET_MODE (*split) == VOIDmode
1829 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1974 }
1975
1976 /* If we can split it and use I2DEST, go ahead and see if that
1977 helps things be recognized. Verify that none of the registers
1978 are set between I2 and I3. */
1979 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
1980#ifdef HAVE_cc0
1981 && GET_CODE (i2dest) == REG
1982#endif
1983 /* We need I2DEST in the proper mode. If it is a hard register
1984 or the only use of a pseudo, we can change its mode. */
1985 && (GET_MODE (*split) == GET_MODE (i2dest)
1986 || GET_MODE (*split) == VOIDmode
1987 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1830 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1988 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
1831 && ! REG_USERVAR_P (i2dest)))
1832 && (next_real_insn (i2) == i3
1833 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1834 /* We can't overwrite I2DEST if its value is still used by
1835 NEWPAT. */
1836 && ! reg_referenced_p (i2dest, newpat))
1837 {
1838 rtx newdest = i2dest;
1839 enum rtx_code split_code = GET_CODE (*split);
1840 enum machine_mode split_mode = GET_MODE (*split);
1841
1842 /* Get NEWDEST as a register in the proper mode. We have already
1843 validated that we can do this. */
1844 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
1845 {
1989 && ! REG_USERVAR_P (i2dest)))
1990 && (next_real_insn (i2) == i3
1991 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1992 /* We can't overwrite I2DEST if its value is still used by
1993 NEWPAT. */
1994 && ! reg_referenced_p (i2dest, newpat))
1995 {
1996 rtx newdest = i2dest;
1997 enum rtx_code split_code = GET_CODE (*split);
1998 enum machine_mode split_mode = GET_MODE (*split);
1999
2000 /* Get NEWDEST as a register in the proper mode. We have already
2001 validated that we can do this. */
2002 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2003 {
1846 newdest = gen_rtx (REG, split_mode, REGNO (i2dest));
2004 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
1847
1848 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1849 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1850 }
1851
1852 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1853 an ASHIFT. This can occur if it was inside a PLUS and hence
1854 appeared to be a memory address. This is a kludge. */
1855 if (split_code == MULT
1856 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1857 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1858 {
1859 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
1860 XEXP (*split, 0), GEN_INT (i)));
1861 /* Update split_code because we may not have a multiply
1862 anymore. */
1863 split_code = GET_CODE (*split);
1864 }
1865
1866#ifdef INSN_SCHEDULING
1867 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1868 be written as a ZERO_EXTEND. */
1869 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
1870 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
1871 XEXP (*split, 0)));
1872#endif
1873
1874 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1875 SUBST (*split, newdest);
1876 i2_code_number
1877 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
1878
1879 /* If the split point was a MULT and we didn't have one before,
1880 don't use one now. */
1881 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
1882 insn_code_number
1883 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1884 }
1885 }
1886
1887 /* Check for a case where we loaded from memory in a narrow mode and
1888 then sign extended it, but we need both registers. In that case,
1889 we have a PARALLEL with both loads from the same memory location.
1890 We can split this into a load from memory followed by a register-register
1891 copy. This saves at least one insn, more if register allocation can
1892 eliminate the copy.
1893
1894 We cannot do this if the destination of the second assignment is
1895 a register that we have already assumed is zero-extended. Similarly
1896 for a SUBREG of such a register. */
1897
1898 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1899 && GET_CODE (newpat) == PARALLEL
1900 && XVECLEN (newpat, 0) == 2
1901 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1902 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1903 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1904 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1905 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1906 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1907 INSN_CUID (i2))
1908 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1909 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1910 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
1911 (GET_CODE (temp) == REG
1912 && reg_nonzero_bits[REGNO (temp)] != 0
1913 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1914 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1915 && (reg_nonzero_bits[REGNO (temp)]
1916 != GET_MODE_MASK (word_mode))))
1917 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
1918 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
1919 (GET_CODE (temp) == REG
1920 && reg_nonzero_bits[REGNO (temp)] != 0
1921 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1922 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1923 && (reg_nonzero_bits[REGNO (temp)]
1924 != GET_MODE_MASK (word_mode)))))
1925 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1926 SET_SRC (XVECEXP (newpat, 0, 1)))
1927 && ! find_reg_note (i3, REG_UNUSED,
1928 SET_DEST (XVECEXP (newpat, 0, 0))))
1929 {
1930 rtx ni2dest;
1931
1932 newi2pat = XVECEXP (newpat, 0, 0);
1933 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
1934 newpat = XVECEXP (newpat, 0, 1);
1935 SUBST (SET_SRC (newpat),
1936 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
1937 i2_code_number
1938 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
1939
1940 if (i2_code_number >= 0)
1941 insn_code_number
1942 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
1943
1944 if (insn_code_number >= 0)
1945 {
1946 rtx insn;
1947 rtx link;
1948
1949 /* If we will be able to accept this, we have made a change to the
1950 destination of I3. This can invalidate a LOG_LINKS pointing
1951 to I3. No other part of combine.c makes such a transformation.
1952
1953 The new I3 will have a destination that was previously the
1954 destination of I1 or I2 and which was used in i2 or I3. Call
1955 distribute_links to make a LOG_LINK from the next use of
1956 that destination. */
1957
1958 PATTERN (i3) = newpat;
2005
2006 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2007 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2008 }
2009
2010 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2011 an ASHIFT. This can occur if it was inside a PLUS and hence
2012 appeared to be a memory address. This is a kludge. */
2013 if (split_code == MULT
2014 && GET_CODE (XEXP (*split, 1)) == CONST_INT
2015 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2016 {
2017 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
2018 XEXP (*split, 0), GEN_INT (i)));
2019 /* Update split_code because we may not have a multiply
2020 anymore. */
2021 split_code = GET_CODE (*split);
2022 }
2023
2024#ifdef INSN_SCHEDULING
2025 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2026 be written as a ZERO_EXTEND. */
2027 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2028 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
2029 XEXP (*split, 0)));
2030#endif
2031
2032 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
2033 SUBST (*split, newdest);
2034 i2_code_number
2035 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2036
2037 /* If the split point was a MULT and we didn't have one before,
2038 don't use one now. */
2039 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2040 insn_code_number
2041 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
2042 }
2043 }
2044
2045 /* Check for a case where we loaded from memory in a narrow mode and
2046 then sign extended it, but we need both registers. In that case,
2047 we have a PARALLEL with both loads from the same memory location.
2048 We can split this into a load from memory followed by a register-register
2049 copy. This saves at least one insn, more if register allocation can
2050 eliminate the copy.
2051
2052 We cannot do this if the destination of the second assignment is
2053 a register that we have already assumed is zero-extended. Similarly
2054 for a SUBREG of such a register. */
2055
2056 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2057 && GET_CODE (newpat) == PARALLEL
2058 && XVECLEN (newpat, 0) == 2
2059 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2060 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2061 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2062 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2063 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2064 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2065 INSN_CUID (i2))
2066 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2067 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2068 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2069 (GET_CODE (temp) == REG
2070 && reg_nonzero_bits[REGNO (temp)] != 0
2071 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2072 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2073 && (reg_nonzero_bits[REGNO (temp)]
2074 != GET_MODE_MASK (word_mode))))
2075 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2076 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2077 (GET_CODE (temp) == REG
2078 && reg_nonzero_bits[REGNO (temp)] != 0
2079 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2080 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2081 && (reg_nonzero_bits[REGNO (temp)]
2082 != GET_MODE_MASK (word_mode)))))
2083 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2084 SET_SRC (XVECEXP (newpat, 0, 1)))
2085 && ! find_reg_note (i3, REG_UNUSED,
2086 SET_DEST (XVECEXP (newpat, 0, 0))))
2087 {
2088 rtx ni2dest;
2089
2090 newi2pat = XVECEXP (newpat, 0, 0);
2091 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2092 newpat = XVECEXP (newpat, 0, 1);
2093 SUBST (SET_SRC (newpat),
2094 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
2095 i2_code_number
2096 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2097
2098 if (i2_code_number >= 0)
2099 insn_code_number
2100 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
2101
2102 if (insn_code_number >= 0)
2103 {
2104 rtx insn;
2105 rtx link;
2106
2107 /* If we will be able to accept this, we have made a change to the
2108 destination of I3. This can invalidate a LOG_LINKS pointing
2109 to I3. No other part of combine.c makes such a transformation.
2110
2111 The new I3 will have a destination that was previously the
2112 destination of I1 or I2 and which was used in i2 or I3. Call
2113 distribute_links to make a LOG_LINK from the next use of
2114 that destination. */
2115
2116 PATTERN (i3) = newpat;
1959 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
2117 distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
1960
1961 /* I3 now uses what used to be its destination and which is
1962 now I2's destination. That means we need a LOG_LINK from
1963 I3 to I2. But we used to have one, so we still will.
1964
1965 However, some later insn might be using I2's dest and have
1966 a LOG_LINK pointing at I3. We must remove this link.
1967 The simplest way to remove the link is to point it at I1,
1968 which we know will be a NOTE. */
1969
1970 for (insn = NEXT_INSN (i3);
1971 insn && (this_basic_block == n_basic_blocks - 1
1972 || insn != basic_block_head[this_basic_block + 1]);
1973 insn = NEXT_INSN (insn))
1974 {
1975 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1976 && reg_referenced_p (ni2dest, PATTERN (insn)))
1977 {
1978 for (link = LOG_LINKS (insn); link;
1979 link = XEXP (link, 1))
1980 if (XEXP (link, 0) == i3)
1981 XEXP (link, 0) = i1;
1982
1983 break;
1984 }
1985 }
1986 }
1987 }
1988
1989 /* Similarly, check for a case where we have a PARALLEL of two independent
1990 SETs but we started with three insns. In this case, we can do the sets
1991 as two separate insns. This case occurs when some SET allows two
1992 other insns to combine, but the destination of that SET is still live. */
1993
1994 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1995 && GET_CODE (newpat) == PARALLEL
1996 && XVECLEN (newpat, 0) == 2
1997 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1998 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1999 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2000 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2001 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2002 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2003 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2004 INSN_CUID (i2))
2005 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2006 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2007 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2008 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2009 XVECEXP (newpat, 0, 0))
2010 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2011 XVECEXP (newpat, 0, 1)))
2012 {
2118
2119 /* I3 now uses what used to be its destination and which is
2120 now I2's destination. That means we need a LOG_LINK from
2121 I3 to I2. But we used to have one, so we still will.
2122
2123 However, some later insn might be using I2's dest and have
2124 a LOG_LINK pointing at I3. We must remove this link.
2125 The simplest way to remove the link is to point it at I1,
2126 which we know will be a NOTE. */
2127
2128 for (insn = NEXT_INSN (i3);
2129 insn && (this_basic_block == n_basic_blocks - 1
2130 || insn != basic_block_head[this_basic_block + 1]);
2131 insn = NEXT_INSN (insn))
2132 {
2133 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
2134 && reg_referenced_p (ni2dest, PATTERN (insn)))
2135 {
2136 for (link = LOG_LINKS (insn); link;
2137 link = XEXP (link, 1))
2138 if (XEXP (link, 0) == i3)
2139 XEXP (link, 0) = i1;
2140
2141 break;
2142 }
2143 }
2144 }
2145 }
2146
2147 /* Similarly, check for a case where we have a PARALLEL of two independent
2148 SETs but we started with three insns. In this case, we can do the sets
2149 as two separate insns. This case occurs when some SET allows two
2150 other insns to combine, but the destination of that SET is still live. */
2151
2152 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2153 && GET_CODE (newpat) == PARALLEL
2154 && XVECLEN (newpat, 0) == 2
2155 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2156 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2157 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2158 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2159 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2160 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2161 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2162 INSN_CUID (i2))
2163 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2164 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2165 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2166 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2167 XVECEXP (newpat, 0, 0))
2168 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2169 XVECEXP (newpat, 0, 1)))
2170 {
2013 newi2pat = XVECEXP (newpat, 0, 1);
2014 newpat = XVECEXP (newpat, 0, 0);
2171 /* Normally, it doesn't matter which of the two is done first,
2172 but it does if one references cc0. In that case, it has to
2173 be first. */
2174#ifdef HAVE_cc0
2175 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2176 {
2177 newi2pat = XVECEXP (newpat, 0, 0);
2178 newpat = XVECEXP (newpat, 0, 1);
2179 }
2180 else
2181#endif
2182 {
2183 newi2pat = XVECEXP (newpat, 0, 1);
2184 newpat = XVECEXP (newpat, 0, 0);
2185 }
2015
2016 i2_code_number
2017 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2018
2019 if (i2_code_number >= 0)
2020 insn_code_number
2021 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
2022 }
2023
2024 /* If it still isn't recognized, fail and change things back the way they
2025 were. */
2026 if ((insn_code_number < 0
2027 /* Is the result a reasonable ASM_OPERANDS? */
2028 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2029 {
2030 undo_all ();
2031 return 0;
2032 }
2033
2034 /* If we had to change another insn, make sure it is valid also. */
2035 if (undobuf.other_insn)
2036 {
2037 rtx other_pat = PATTERN (undobuf.other_insn);
2038 rtx new_other_notes;
2039 rtx note, next;
2040
2041 CLEAR_HARD_REG_SET (newpat_used_regs);
2042
2043 other_code_number
2044 = recog_for_combine (&other_pat, undobuf.other_insn,
2045 &new_other_notes, &other_scratches);
2046
2047 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2048 {
2049 undo_all ();
2050 return 0;
2051 }
2052
2053 PATTERN (undobuf.other_insn) = other_pat;
2054
2055 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2056 are still valid. Then add any non-duplicate notes added by
2057 recog_for_combine. */
2058 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2059 {
2060 next = XEXP (note, 1);
2061
2062 if (REG_NOTE_KIND (note) == REG_UNUSED
2063 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2064 {
2065 if (GET_CODE (XEXP (note, 0)) == REG)
2186
2187 i2_code_number
2188 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2189
2190 if (i2_code_number >= 0)
2191 insn_code_number
2192 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
2193 }
2194
2195 /* If it still isn't recognized, fail and change things back the way they
2196 were. */
2197 if ((insn_code_number < 0
2198 /* Is the result a reasonable ASM_OPERANDS? */
2199 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2200 {
2201 undo_all ();
2202 return 0;
2203 }
2204
2205 /* If we had to change another insn, make sure it is valid also. */
2206 if (undobuf.other_insn)
2207 {
2208 rtx other_pat = PATTERN (undobuf.other_insn);
2209 rtx new_other_notes;
2210 rtx note, next;
2211
2212 CLEAR_HARD_REG_SET (newpat_used_regs);
2213
2214 other_code_number
2215 = recog_for_combine (&other_pat, undobuf.other_insn,
2216 &new_other_notes, &other_scratches);
2217
2218 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2219 {
2220 undo_all ();
2221 return 0;
2222 }
2223
2224 PATTERN (undobuf.other_insn) = other_pat;
2225
2226 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2227 are still valid. Then add any non-duplicate notes added by
2228 recog_for_combine. */
2229 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2230 {
2231 next = XEXP (note, 1);
2232
2233 if (REG_NOTE_KIND (note) == REG_UNUSED
2234 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2235 {
2236 if (GET_CODE (XEXP (note, 0)) == REG)
2066 reg_n_deaths[REGNO (XEXP (note, 0))]--;
2237 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2067
2068 remove_note (undobuf.other_insn, note);
2069 }
2070 }
2071
2072 for (note = new_other_notes; note; note = XEXP (note, 1))
2073 if (GET_CODE (XEXP (note, 0)) == REG)
2238
2239 remove_note (undobuf.other_insn, note);
2240 }
2241 }
2242
2243 for (note = new_other_notes; note; note = XEXP (note, 1))
2244 if (GET_CODE (XEXP (note, 0)) == REG)
2074 reg_n_deaths[REGNO (XEXP (note, 0))]++;
2245 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
2075
2076 distribute_notes (new_other_notes, undobuf.other_insn,
2077 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2078 }
2079
2080 /* We now know that we can do this combination. Merge the insns and
2081 update the status of registers and LOG_LINKS. */
2082
2083 {
2084 rtx i3notes, i2notes, i1notes = 0;
2085 rtx i3links, i2links, i1links = 0;
2086 rtx midnotes = 0;
2087 register int regno;
2246
2247 distribute_notes (new_other_notes, undobuf.other_insn,
2248 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2249 }
2250
2251 /* We now know that we can do this combination. Merge the insns and
2252 update the status of registers and LOG_LINKS. */
2253
2254 {
2255 rtx i3notes, i2notes, i1notes = 0;
2256 rtx i3links, i2links, i1links = 0;
2257 rtx midnotes = 0;
2258 register int regno;
2088 /* Compute which registers we expect to eliminate. */
2089 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
2259 /* Compute which registers we expect to eliminate. newi2pat may be setting
2260 either i3dest or i2dest, so we must check it. Also, i1dest may be the
2261 same as i3dest, in which case newi2pat may be setting i1dest. */
2262 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2263 || i2dest_in_i2src || i2dest_in_i1src
2090 ? 0 : i2dest);
2264 ? 0 : i2dest);
2091 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
2265 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2266 || (newi2pat && reg_set_p (i1dest, newi2pat))
2267 ? 0 : i1dest);
2092
2093 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2094 clear them. */
2095 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2096 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2097 if (i1)
2098 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2099
2100 /* Ensure that we do not have something that should not be shared but
2101 occurs multiple times in the new insns. Check this by first
2102 resetting all the `used' flags and then copying anything is shared. */
2103
2104 reset_used_flags (i3notes);
2105 reset_used_flags (i2notes);
2106 reset_used_flags (i1notes);
2107 reset_used_flags (newpat);
2108 reset_used_flags (newi2pat);
2109 if (undobuf.other_insn)
2110 reset_used_flags (PATTERN (undobuf.other_insn));
2111
2112 i3notes = copy_rtx_if_shared (i3notes);
2113 i2notes = copy_rtx_if_shared (i2notes);
2114 i1notes = copy_rtx_if_shared (i1notes);
2115 newpat = copy_rtx_if_shared (newpat);
2116 newi2pat = copy_rtx_if_shared (newi2pat);
2117 if (undobuf.other_insn)
2118 reset_used_flags (PATTERN (undobuf.other_insn));
2119
2120 INSN_CODE (i3) = insn_code_number;
2121 PATTERN (i3) = newpat;
2122 if (undobuf.other_insn)
2123 INSN_CODE (undobuf.other_insn) = other_code_number;
2124
2125 /* We had one special case above where I2 had more than one set and
2126 we replaced a destination of one of those sets with the destination
2127 of I3. In that case, we have to update LOG_LINKS of insns later
2128 in this basic block. Note that this (expensive) case is rare.
2129
2130 Also, in this case, we must pretend that all REG_NOTEs for I2
2131 actually came from I3, so that REG_UNUSED notes from I2 will be
2132 properly handled. */
2133
2134 if (i3_subst_into_i2)
2135 {
2136 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2137 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2138 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2139 && ! find_reg_note (i2, REG_UNUSED,
2140 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2141 for (temp = NEXT_INSN (i2);
2142 temp && (this_basic_block == n_basic_blocks - 1
2143 || basic_block_head[this_basic_block] != temp);
2144 temp = NEXT_INSN (temp))
2145 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2146 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2147 if (XEXP (link, 0) == i2)
2148 XEXP (link, 0) = i3;
2149
2150 if (i3notes)
2151 {
2152 rtx link = i3notes;
2153 while (XEXP (link, 1))
2154 link = XEXP (link, 1);
2155 XEXP (link, 1) = i2notes;
2156 }
2157 else
2158 i3notes = i2notes;
2159 i2notes = 0;
2160 }
2161
2162 LOG_LINKS (i3) = 0;
2163 REG_NOTES (i3) = 0;
2164 LOG_LINKS (i2) = 0;
2165 REG_NOTES (i2) = 0;
2166
2167 if (newi2pat)
2168 {
2169 INSN_CODE (i2) = i2_code_number;
2170 PATTERN (i2) = newi2pat;
2171 }
2172 else
2173 {
2174 PUT_CODE (i2, NOTE);
2175 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2176 NOTE_SOURCE_FILE (i2) = 0;
2177 }
2178
2179 if (i1)
2180 {
2181 LOG_LINKS (i1) = 0;
2182 REG_NOTES (i1) = 0;
2183 PUT_CODE (i1, NOTE);
2184 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2185 NOTE_SOURCE_FILE (i1) = 0;
2186 }
2187
2188 /* Get death notes for everything that is now used in either I3 or
2268
2269 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2270 clear them. */
2271 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2272 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2273 if (i1)
2274 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2275
2276 /* Ensure that we do not have something that should not be shared but
2277 occurs multiple times in the new insns. Check this by first
2278 resetting all the `used' flags and then copying anything is shared. */
2279
2280 reset_used_flags (i3notes);
2281 reset_used_flags (i2notes);
2282 reset_used_flags (i1notes);
2283 reset_used_flags (newpat);
2284 reset_used_flags (newi2pat);
2285 if (undobuf.other_insn)
2286 reset_used_flags (PATTERN (undobuf.other_insn));
2287
2288 i3notes = copy_rtx_if_shared (i3notes);
2289 i2notes = copy_rtx_if_shared (i2notes);
2290 i1notes = copy_rtx_if_shared (i1notes);
2291 newpat = copy_rtx_if_shared (newpat);
2292 newi2pat = copy_rtx_if_shared (newi2pat);
2293 if (undobuf.other_insn)
2294 reset_used_flags (PATTERN (undobuf.other_insn));
2295
2296 INSN_CODE (i3) = insn_code_number;
2297 PATTERN (i3) = newpat;
2298 if (undobuf.other_insn)
2299 INSN_CODE (undobuf.other_insn) = other_code_number;
2300
2301 /* We had one special case above where I2 had more than one set and
2302 we replaced a destination of one of those sets with the destination
2303 of I3. In that case, we have to update LOG_LINKS of insns later
2304 in this basic block. Note that this (expensive) case is rare.
2305
2306 Also, in this case, we must pretend that all REG_NOTEs for I2
2307 actually came from I3, so that REG_UNUSED notes from I2 will be
2308 properly handled. */
2309
2310 if (i3_subst_into_i2)
2311 {
2312 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2313 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2314 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2315 && ! find_reg_note (i2, REG_UNUSED,
2316 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2317 for (temp = NEXT_INSN (i2);
2318 temp && (this_basic_block == n_basic_blocks - 1
2319 || basic_block_head[this_basic_block] != temp);
2320 temp = NEXT_INSN (temp))
2321 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2322 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2323 if (XEXP (link, 0) == i2)
2324 XEXP (link, 0) = i3;
2325
2326 if (i3notes)
2327 {
2328 rtx link = i3notes;
2329 while (XEXP (link, 1))
2330 link = XEXP (link, 1);
2331 XEXP (link, 1) = i2notes;
2332 }
2333 else
2334 i3notes = i2notes;
2335 i2notes = 0;
2336 }
2337
2338 LOG_LINKS (i3) = 0;
2339 REG_NOTES (i3) = 0;
2340 LOG_LINKS (i2) = 0;
2341 REG_NOTES (i2) = 0;
2342
2343 if (newi2pat)
2344 {
2345 INSN_CODE (i2) = i2_code_number;
2346 PATTERN (i2) = newi2pat;
2347 }
2348 else
2349 {
2350 PUT_CODE (i2, NOTE);
2351 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2352 NOTE_SOURCE_FILE (i2) = 0;
2353 }
2354
2355 if (i1)
2356 {
2357 LOG_LINKS (i1) = 0;
2358 REG_NOTES (i1) = 0;
2359 PUT_CODE (i1, NOTE);
2360 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2361 NOTE_SOURCE_FILE (i1) = 0;
2362 }
2363
2364 /* Get death notes for everything that is now used in either I3 or
2189 I2 and used to die in a previous insn. */
2365 I2 and used to die in a previous insn. If we built two new
2366 patterns, move from I1 to I2 then I2 to I3 so that we get the
2367 proper movement on registers that I2 modifies. */
2190
2368
2191 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
2192 if (newi2pat)
2369 if (newi2pat)
2193 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
2370 {
2371 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2372 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2373 }
2374 else
2375 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2376 i3, &midnotes);
2194
2195 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2196 if (i3notes)
2197 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2198 elim_i2, elim_i1);
2199 if (i2notes)
2200 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2201 elim_i2, elim_i1);
2202 if (i1notes)
2203 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2204 elim_i2, elim_i1);
2205 if (midnotes)
2206 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2207 elim_i2, elim_i1);
2208
2209 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2210 know these are REG_UNUSED and want them to go to the desired insn,
2211 so we always pass it as i3. We have not counted the notes in
2212 reg_n_deaths yet, so we need to do so now. */
2213
2214 if (newi2pat && new_i2_notes)
2215 {
2216 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2217 if (GET_CODE (XEXP (temp, 0)) == REG)
2377
2378 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2379 if (i3notes)
2380 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2381 elim_i2, elim_i1);
2382 if (i2notes)
2383 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2384 elim_i2, elim_i1);
2385 if (i1notes)
2386 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2387 elim_i2, elim_i1);
2388 if (midnotes)
2389 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2390 elim_i2, elim_i1);
2391
2392 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2393 know these are REG_UNUSED and want them to go to the desired insn,
2394 so we always pass it as i3. We have not counted the notes in
2395 reg_n_deaths yet, so we need to do so now. */
2396
2397 if (newi2pat && new_i2_notes)
2398 {
2399 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2400 if (GET_CODE (XEXP (temp, 0)) == REG)
2218 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2401 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2219
2220 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2221 }
2222
2223 if (new_i3_notes)
2224 {
2225 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2226 if (GET_CODE (XEXP (temp, 0)) == REG)
2402
2403 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2404 }
2405
2406 if (new_i3_notes)
2407 {
2408 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2409 if (GET_CODE (XEXP (temp, 0)) == REG)
2227 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2410 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2228
2229 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2230 }
2231
2232 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
2411
2412 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2413 }
2414
2415 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
2233 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
2416 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
2417 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
2418 in that case, it might delete I2. Similarly for I2 and I1.
2234 Show an additional death due to the REG_DEAD note we make here. If
2235 we discard it in distribute_notes, we will decrement it again. */
2236
2237 if (i3dest_killed)
2238 {
2239 if (GET_CODE (i3dest_killed) == REG)
2419 Show an additional death due to the REG_DEAD note we make here. If
2420 we discard it in distribute_notes, we will decrement it again. */
2421
2422 if (i3dest_killed)
2423 {
2424 if (GET_CODE (i3dest_killed) == REG)
2240 reg_n_deaths[REGNO (i3dest_killed)]++;
2425 REG_N_DEATHS (REGNO (i3dest_killed))++;
2241
2426
2242 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2243 NULL_RTX),
2244 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2245 NULL_RTX, NULL_RTX);
2427 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
2428 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2429 NULL_RTX),
2430 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
2431 else
2432 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2433 NULL_RTX),
2434 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2435 elim_i2, elim_i1);
2246 }
2247
2436 }
2437
2248 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
2249 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
2250 we passed I3 in that case, it might delete I2. */
2251
2252 if (i2dest_in_i2src)
2253 {
2254 if (GET_CODE (i2dest) == REG)
2438 if (i2dest_in_i2src)
2439 {
2440 if (GET_CODE (i2dest) == REG)
2255 reg_n_deaths[REGNO (i2dest)]++;
2441 REG_N_DEATHS (REGNO (i2dest))++;
2256
2257 if (newi2pat && reg_set_p (i2dest, newi2pat))
2442
2443 if (newi2pat && reg_set_p (i2dest, newi2pat))
2258 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2444 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2259 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2260 else
2445 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2446 else
2261 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2447 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
2262 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2263 NULL_RTX, NULL_RTX);
2264 }
2265
2266 if (i1dest_in_i1src)
2267 {
2268 if (GET_CODE (i1dest) == REG)
2448 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2449 NULL_RTX, NULL_RTX);
2450 }
2451
2452 if (i1dest_in_i1src)
2453 {
2454 if (GET_CODE (i1dest) == REG)
2269 reg_n_deaths[REGNO (i1dest)]++;
2455 REG_N_DEATHS (REGNO (i1dest))++;
2270
2271 if (newi2pat && reg_set_p (i1dest, newi2pat))
2456
2457 if (newi2pat && reg_set_p (i1dest, newi2pat))
2272 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2458 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2273 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2274 else
2459 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2460 else
2275 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2461 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
2276 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2277 NULL_RTX, NULL_RTX);
2278 }
2279
2280 distribute_links (i3links);
2281 distribute_links (i2links);
2282 distribute_links (i1links);
2283
2284 if (GET_CODE (i2dest) == REG)
2285 {
2286 rtx link;
2287 rtx i2_insn = 0, i2_val = 0, set;
2288
2289 /* The insn that used to set this register doesn't exist, and
2290 this life of the register may not exist either. See if one of
2291 I3's links points to an insn that sets I2DEST. If it does,
2292 that is now the last known value for I2DEST. If we don't update
2293 this and I2 set the register to a value that depended on its old
2294 contents, we will get confused. If this insn is used, thing
2295 will be set correctly in combine_instructions. */
2296
2297 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2298 if ((set = single_set (XEXP (link, 0))) != 0
2299 && rtx_equal_p (i2dest, SET_DEST (set)))
2300 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2301
2302 record_value_for_reg (i2dest, i2_insn, i2_val);
2303
2304 /* If the reg formerly set in I2 died only once and that was in I3,
2305 zero its use count so it won't make `reload' do any work. */
2462 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2463 NULL_RTX, NULL_RTX);
2464 }
2465
2466 distribute_links (i3links);
2467 distribute_links (i2links);
2468 distribute_links (i1links);
2469
2470 if (GET_CODE (i2dest) == REG)
2471 {
2472 rtx link;
2473 rtx i2_insn = 0, i2_val = 0, set;
2474
2475 /* The insn that used to set this register doesn't exist, and
2476 this life of the register may not exist either. See if one of
2477 I3's links points to an insn that sets I2DEST. If it does,
2478 that is now the last known value for I2DEST. If we don't update
2479 this and I2 set the register to a value that depended on its old
2480 contents, we will get confused. If this insn is used, thing
2481 will be set correctly in combine_instructions. */
2482
2483 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2484 if ((set = single_set (XEXP (link, 0))) != 0
2485 && rtx_equal_p (i2dest, SET_DEST (set)))
2486 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2487
2488 record_value_for_reg (i2dest, i2_insn, i2_val);
2489
2490 /* If the reg formerly set in I2 died only once and that was in I3,
2491 zero its use count so it won't make `reload' do any work. */
2306 if (! added_sets_2 && newi2pat == 0 && ! i2dest_in_i2src)
2492 if (! added_sets_2
2493 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2494 && ! i2dest_in_i2src)
2307 {
2308 regno = REGNO (i2dest);
2495 {
2496 regno = REGNO (i2dest);
2309 reg_n_sets[regno]--;
2310 if (reg_n_sets[regno] == 0
2311 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2312 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2313 reg_n_refs[regno] = 0;
2497 REG_N_SETS (regno)--;
2498 if (REG_N_SETS (regno) == 0
2499 && ! REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
2500 REG_N_REFS (regno) = 0;
2314 }
2315 }
2316
2317 if (i1 && GET_CODE (i1dest) == REG)
2318 {
2319 rtx link;
2320 rtx i1_insn = 0, i1_val = 0, set;
2321
2322 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2323 if ((set = single_set (XEXP (link, 0))) != 0
2324 && rtx_equal_p (i1dest, SET_DEST (set)))
2325 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2326
2327 record_value_for_reg (i1dest, i1_insn, i1_val);
2328
2329 regno = REGNO (i1dest);
2330 if (! added_sets_1 && ! i1dest_in_i1src)
2331 {
2501 }
2502 }
2503
2504 if (i1 && GET_CODE (i1dest) == REG)
2505 {
2506 rtx link;
2507 rtx i1_insn = 0, i1_val = 0, set;
2508
2509 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2510 if ((set = single_set (XEXP (link, 0))) != 0
2511 && rtx_equal_p (i1dest, SET_DEST (set)))
2512 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2513
2514 record_value_for_reg (i1dest, i1_insn, i1_val);
2515
2516 regno = REGNO (i1dest);
2517 if (! added_sets_1 && ! i1dest_in_i1src)
2518 {
2332 reg_n_sets[regno]--;
2333 if (reg_n_sets[regno] == 0
2334 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2335 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2336 reg_n_refs[regno] = 0;
2519 REG_N_SETS (regno)--;
2520 if (REG_N_SETS (regno) == 0
2521 && ! REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
2522 REG_N_REFS (regno) = 0;
2337 }
2338 }
2339
2340 /* Update reg_nonzero_bits et al for any changes that may have been made
2341 to this insn. */
2342
2343 note_stores (newpat, set_nonzero_bits_and_sign_copies);
2344 if (newi2pat)
2345 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
2346
2347 /* If we added any (clobber (scratch)), add them to the max for a
2348 block. This is a very pessimistic calculation, since we might
2349 have had them already and this might not be the worst block, but
2350 it's not worth doing any better. */
2351 max_scratch += i3_scratches + i2_scratches + other_scratches;
2352
2353 /* If I3 is now an unconditional jump, ensure that it has a
2354 BARRIER following it since it may have initially been a
2355 conditional jump. It may also be the last nonnote insn. */
2356
2357 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2358 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2359 || GET_CODE (temp) != BARRIER))
2360 emit_barrier_after (i3);
2361 }
2362
2363 combine_successes++;
2364
2365 /* Clear this here, so that subsequent get_last_value calls are not
2366 affected. */
2367 subst_prev_insn = NULL_RTX;
2368
2369 if (added_links_insn
2370 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2371 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2372 return added_links_insn;
2373 else
2374 return newi2pat ? i2 : i3;
2375}
2376
2377/* Undo all the modifications recorded in undobuf. */
2378
2379static void
2380undo_all ()
2381{
2523 }
2524 }
2525
2526 /* Update reg_nonzero_bits et al for any changes that may have been made
2527 to this insn. */
2528
2529 note_stores (newpat, set_nonzero_bits_and_sign_copies);
2530 if (newi2pat)
2531 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
2532
2533 /* If we added any (clobber (scratch)), add them to the max for a
2534 block. This is a very pessimistic calculation, since we might
2535 have had them already and this might not be the worst block, but
2536 it's not worth doing any better. */
2537 max_scratch += i3_scratches + i2_scratches + other_scratches;
2538
2539 /* If I3 is now an unconditional jump, ensure that it has a
2540 BARRIER following it since it may have initially been a
2541 conditional jump. It may also be the last nonnote insn. */
2542
2543 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2544 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2545 || GET_CODE (temp) != BARRIER))
2546 emit_barrier_after (i3);
2547 }
2548
2549 combine_successes++;
2550
2551 /* Clear this here, so that subsequent get_last_value calls are not
2552 affected. */
2553 subst_prev_insn = NULL_RTX;
2554
2555 if (added_links_insn
2556 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2557 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2558 return added_links_insn;
2559 else
2560 return newi2pat ? i2 : i3;
2561}
2562
2563/* Undo all the modifications recorded in undobuf. */
2564
2565static void
2566undo_all ()
2567{
2382 register int i;
2383 if (undobuf.num_undo > MAX_UNDO)
2384 undobuf.num_undo = MAX_UNDO;
2385 for (i = undobuf.num_undo - 1; i >= 0; i--)
2568 struct undo *undo, *next;
2569
2570 for (undo = undobuf.undos; undo; undo = next)
2386 {
2571 {
2387 if (undobuf.undo[i].is_int)
2388 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2572 next = undo->next;
2573 if (undo->is_int)
2574 *undo->where.i = undo->old_contents.i;
2389 else
2575 else
2390 *undobuf.undo[i].where.r = undobuf.undo[i].old_contents.r;
2391
2576 *undo->where.r = undo->old_contents.r;
2577
2578 undo->next = undobuf.frees;
2579 undobuf.frees = undo;
2392 }
2393
2394 obfree (undobuf.storage);
2580 }
2581
2582 obfree (undobuf.storage);
2395 undobuf.num_undo = 0;
2583 undobuf.undos = undobuf.previous_undos = 0;
2396
2397 /* Clear this here, so that subsequent get_last_value calls are not
2398 affected. */
2399 subst_prev_insn = NULL_RTX;
2400}
2401
2402/* Find the innermost point within the rtx at LOC, possibly LOC itself,
2403 where we have an arithmetic expression and return that point. LOC will
2404 be inside INSN.
2405
2406 try_combine will call this function to see if an insn can be split into
2407 two insns. */
2408
2409static rtx *
2410find_split_point (loc, insn)
2411 rtx *loc;
2412 rtx insn;
2413{
2414 rtx x = *loc;
2415 enum rtx_code code = GET_CODE (x);
2416 rtx *split;
2417 int len = 0, pos, unsignedp;
2418 rtx inner;
2419
2420 /* First special-case some codes. */
2421 switch (code)
2422 {
2423 case SUBREG:
2424#ifdef INSN_SCHEDULING
2425 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2426 point. */
2427 if (GET_CODE (SUBREG_REG (x)) == MEM)
2428 return loc;
2429#endif
2430 return find_split_point (&SUBREG_REG (x), insn);
2431
2432 case MEM:
2433#ifdef HAVE_lo_sum
2434 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2435 using LO_SUM and HIGH. */
2436 if (GET_CODE (XEXP (x, 0)) == CONST
2437 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2438 {
2439 SUBST (XEXP (x, 0),
2440 gen_rtx_combine (LO_SUM, Pmode,
2441 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2442 XEXP (x, 0)));
2443 return &XEXP (XEXP (x, 0), 0);
2444 }
2445#endif
2446
2447 /* If we have a PLUS whose second operand is a constant and the
2448 address is not valid, perhaps will can split it up using
2449 the machine-specific way to split large constants. We use
2450 the first pseudo-reg (one of the virtual regs) as a placeholder;
2451 it will not remain in the result. */
2452 if (GET_CODE (XEXP (x, 0)) == PLUS
2453 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2454 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2455 {
2456 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2584
2585 /* Clear this here, so that subsequent get_last_value calls are not
2586 affected. */
2587 subst_prev_insn = NULL_RTX;
2588}
2589
2590/* Find the innermost point within the rtx at LOC, possibly LOC itself,
2591 where we have an arithmetic expression and return that point. LOC will
2592 be inside INSN.
2593
2594 try_combine will call this function to see if an insn can be split into
2595 two insns. */
2596
2597static rtx *
2598find_split_point (loc, insn)
2599 rtx *loc;
2600 rtx insn;
2601{
2602 rtx x = *loc;
2603 enum rtx_code code = GET_CODE (x);
2604 rtx *split;
2605 int len = 0, pos, unsignedp;
2606 rtx inner;
2607
2608 /* First special-case some codes. */
2609 switch (code)
2610 {
2611 case SUBREG:
2612#ifdef INSN_SCHEDULING
2613 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2614 point. */
2615 if (GET_CODE (SUBREG_REG (x)) == MEM)
2616 return loc;
2617#endif
2618 return find_split_point (&SUBREG_REG (x), insn);
2619
2620 case MEM:
2621#ifdef HAVE_lo_sum
2622 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2623 using LO_SUM and HIGH. */
2624 if (GET_CODE (XEXP (x, 0)) == CONST
2625 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2626 {
2627 SUBST (XEXP (x, 0),
2628 gen_rtx_combine (LO_SUM, Pmode,
2629 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2630 XEXP (x, 0)));
2631 return &XEXP (XEXP (x, 0), 0);
2632 }
2633#endif
2634
2635 /* If we have a PLUS whose second operand is a constant and the
2636 address is not valid, perhaps will can split it up using
2637 the machine-specific way to split large constants. We use
2638 the first pseudo-reg (one of the virtual regs) as a placeholder;
2639 it will not remain in the result. */
2640 if (GET_CODE (XEXP (x, 0)) == PLUS
2641 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2642 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2643 {
2644 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2457 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2645 rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
2458 subst_insn);
2459
2460 /* This should have produced two insns, each of which sets our
2461 placeholder. If the source of the second is a valid address,
2462 we can make put both sources together and make a split point
2463 in the middle. */
2464
2465 if (seq && XVECLEN (seq, 0) == 2
2466 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2467 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2468 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2469 && ! reg_mentioned_p (reg,
2470 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2471 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2472 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2473 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2474 && memory_address_p (GET_MODE (x),
2475 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2476 {
2477 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2478 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2479
2480 /* Replace the placeholder in SRC2 with SRC1. If we can
2481 find where in SRC2 it was placed, that can become our
2482 split point and we can replace this address with SRC2.
2483 Just try two obvious places. */
2484
2485 src2 = replace_rtx (src2, reg, src1);
2486 split = 0;
2487 if (XEXP (src2, 0) == src1)
2488 split = &XEXP (src2, 0);
2489 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2490 && XEXP (XEXP (src2, 0), 0) == src1)
2491 split = &XEXP (XEXP (src2, 0), 0);
2492
2493 if (split)
2494 {
2495 SUBST (XEXP (x, 0), src2);
2496 return split;
2497 }
2498 }
2499
2500 /* If that didn't work, perhaps the first operand is complex and
2501 needs to be computed separately, so make a split point there.
2502 This will occur on machines that just support REG + CONST
2503 and have a constant moved through some previous computation. */
2504
2505 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2506 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2507 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2508 == 'o')))
2509 return &XEXP (XEXP (x, 0), 0);
2510 }
2511 break;
2512
2513 case SET:
2514#ifdef HAVE_cc0
2515 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2516 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2517 we need to put the operand into a register. So split at that
2518 point. */
2519
2520 if (SET_DEST (x) == cc0_rtx
2521 && GET_CODE (SET_SRC (x)) != COMPARE
2522 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2523 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2524 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2525 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2526 return &SET_SRC (x);
2527#endif
2528
2529 /* See if we can split SET_SRC as it stands. */
2530 split = find_split_point (&SET_SRC (x), insn);
2531 if (split && split != &SET_SRC (x))
2532 return split;
2533
2646 subst_insn);
2647
2648 /* This should have produced two insns, each of which sets our
2649 placeholder. If the source of the second is a valid address,
2650 we can make put both sources together and make a split point
2651 in the middle. */
2652
2653 if (seq && XVECLEN (seq, 0) == 2
2654 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2655 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2656 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2657 && ! reg_mentioned_p (reg,
2658 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2659 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2660 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2661 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2662 && memory_address_p (GET_MODE (x),
2663 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2664 {
2665 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2666 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2667
2668 /* Replace the placeholder in SRC2 with SRC1. If we can
2669 find where in SRC2 it was placed, that can become our
2670 split point and we can replace this address with SRC2.
2671 Just try two obvious places. */
2672
2673 src2 = replace_rtx (src2, reg, src1);
2674 split = 0;
2675 if (XEXP (src2, 0) == src1)
2676 split = &XEXP (src2, 0);
2677 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2678 && XEXP (XEXP (src2, 0), 0) == src1)
2679 split = &XEXP (XEXP (src2, 0), 0);
2680
2681 if (split)
2682 {
2683 SUBST (XEXP (x, 0), src2);
2684 return split;
2685 }
2686 }
2687
2688 /* If that didn't work, perhaps the first operand is complex and
2689 needs to be computed separately, so make a split point there.
2690 This will occur on machines that just support REG + CONST
2691 and have a constant moved through some previous computation. */
2692
2693 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2694 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2695 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2696 == 'o')))
2697 return &XEXP (XEXP (x, 0), 0);
2698 }
2699 break;
2700
2701 case SET:
2702#ifdef HAVE_cc0
2703 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2704 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2705 we need to put the operand into a register. So split at that
2706 point. */
2707
2708 if (SET_DEST (x) == cc0_rtx
2709 && GET_CODE (SET_SRC (x)) != COMPARE
2710 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2711 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2712 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2713 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2714 return &SET_SRC (x);
2715#endif
2716
2717 /* See if we can split SET_SRC as it stands. */
2718 split = find_split_point (&SET_SRC (x), insn);
2719 if (split && split != &SET_SRC (x))
2720 return split;
2721
2722 /* See if we can split SET_DEST as it stands. */
2723 split = find_split_point (&SET_DEST (x), insn);
2724 if (split && split != &SET_DEST (x))
2725 return split;
2726
2534 /* See if this is a bitfield assignment with everything constant. If
2535 so, this is an IOR of an AND, so split it into that. */
2536 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2537 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2538 <= HOST_BITS_PER_WIDE_INT)
2539 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2540 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2541 && GET_CODE (SET_SRC (x)) == CONST_INT
2542 && ((INTVAL (XEXP (SET_DEST (x), 1))
2543 + INTVAL (XEXP (SET_DEST (x), 2)))
2544 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2545 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2546 {
2547 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2548 int len = INTVAL (XEXP (SET_DEST (x), 1));
2549 int src = INTVAL (SET_SRC (x));
2550 rtx dest = XEXP (SET_DEST (x), 0);
2551 enum machine_mode mode = GET_MODE (dest);
2552 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2553
2554 if (BITS_BIG_ENDIAN)
2555 pos = GET_MODE_BITSIZE (mode) - len - pos;
2556
2557 if (src == mask)
2558 SUBST (SET_SRC (x),
2559 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2560 else
2561 SUBST (SET_SRC (x),
2562 gen_binary (IOR, mode,
2563 gen_binary (AND, mode, dest,
2564 GEN_INT (~ (mask << pos)
2565 & GET_MODE_MASK (mode))),
2566 GEN_INT (src << pos)));
2567
2568 SUBST (SET_DEST (x), dest);
2569
2570 split = find_split_point (&SET_SRC (x), insn);
2571 if (split && split != &SET_SRC (x))
2572 return split;
2573 }
2574
2575 /* Otherwise, see if this is an operation that we can split into two.
2576 If so, try to split that. */
2577 code = GET_CODE (SET_SRC (x));
2578
2579 switch (code)
2580 {
2581 case AND:
2582 /* If we are AND'ing with a large constant that is only a single
2583 bit and the result is only being used in a context where we
2584 need to know if it is zero or non-zero, replace it with a bit
2585 extraction. This will avoid the large constant, which might
2586 have taken more than one insn to make. If the constant were
2587 not a valid argument to the AND but took only one insn to make,
2588 this is no worse, but if it took more than one insn, it will
2589 be better. */
2590
2591 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2592 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2593 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2594 && GET_CODE (SET_DEST (x)) == REG
2595 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2596 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2597 && XEXP (*split, 0) == SET_DEST (x)
2598 && XEXP (*split, 1) == const0_rtx)
2599 {
2727 /* See if this is a bitfield assignment with everything constant. If
2728 so, this is an IOR of an AND, so split it into that. */
2729 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2730 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2731 <= HOST_BITS_PER_WIDE_INT)
2732 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2733 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2734 && GET_CODE (SET_SRC (x)) == CONST_INT
2735 && ((INTVAL (XEXP (SET_DEST (x), 1))
2736 + INTVAL (XEXP (SET_DEST (x), 2)))
2737 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2738 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2739 {
2740 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2741 int len = INTVAL (XEXP (SET_DEST (x), 1));
2742 int src = INTVAL (SET_SRC (x));
2743 rtx dest = XEXP (SET_DEST (x), 0);
2744 enum machine_mode mode = GET_MODE (dest);
2745 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2746
2747 if (BITS_BIG_ENDIAN)
2748 pos = GET_MODE_BITSIZE (mode) - len - pos;
2749
2750 if (src == mask)
2751 SUBST (SET_SRC (x),
2752 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2753 else
2754 SUBST (SET_SRC (x),
2755 gen_binary (IOR, mode,
2756 gen_binary (AND, mode, dest,
2757 GEN_INT (~ (mask << pos)
2758 & GET_MODE_MASK (mode))),
2759 GEN_INT (src << pos)));
2760
2761 SUBST (SET_DEST (x), dest);
2762
2763 split = find_split_point (&SET_SRC (x), insn);
2764 if (split && split != &SET_SRC (x))
2765 return split;
2766 }
2767
2768 /* Otherwise, see if this is an operation that we can split into two.
2769 If so, try to split that. */
2770 code = GET_CODE (SET_SRC (x));
2771
2772 switch (code)
2773 {
2774 case AND:
2775 /* If we are AND'ing with a large constant that is only a single
2776 bit and the result is only being used in a context where we
2777 need to know if it is zero or non-zero, replace it with a bit
2778 extraction. This will avoid the large constant, which might
2779 have taken more than one insn to make. If the constant were
2780 not a valid argument to the AND but took only one insn to make,
2781 this is no worse, but if it took more than one insn, it will
2782 be better. */
2783
2784 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2785 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2786 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2787 && GET_CODE (SET_DEST (x)) == REG
2788 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2789 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2790 && XEXP (*split, 0) == SET_DEST (x)
2791 && XEXP (*split, 1) == const0_rtx)
2792 {
2793 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
2794 XEXP (SET_SRC (x), 0),
2795 pos, NULL_RTX, 1, 1, 0, 0);
2796 if (extraction != 0)
2797 {
2798 SUBST (SET_SRC (x), extraction);
2799 return find_split_point (loc, insn);
2800 }
2801 }
2802 break;
2803
2804 case NE:
2805 /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
2806 is known to be on, this can be converted into a NEG of a shift. */
2807 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
2808 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
2809 && 1 <= (pos = exact_log2
2810 (nonzero_bits (XEXP (SET_SRC (x), 0),
2811 GET_MODE (XEXP (SET_SRC (x), 0))))))
2812 {
2813 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
2814
2600 SUBST (SET_SRC (x),
2815 SUBST (SET_SRC (x),
2601 make_extraction (GET_MODE (SET_DEST (x)),
2602 XEXP (SET_SRC (x), 0),
2603 pos, NULL_RTX, 1, 1, 0, 0));
2604 return find_split_point (loc, insn);
2816 gen_rtx_combine (NEG, mode,
2817 gen_rtx_combine (LSHIFTRT, mode,
2818 XEXP (SET_SRC (x), 0),
2819 GEN_INT (pos))));
2820
2821 split = find_split_point (&SET_SRC (x), insn);
2822 if (split && split != &SET_SRC (x))
2823 return split;
2605 }
2606 break;
2607
2608 case SIGN_EXTEND:
2609 inner = XEXP (SET_SRC (x), 0);
2824 }
2825 break;
2826
2827 case SIGN_EXTEND:
2828 inner = XEXP (SET_SRC (x), 0);
2829
2830 /* We can't optimize if either mode is a partial integer
2831 mode as we don't know how many bits are significant
2832 in those modes. */
2833 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
2834 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
2835 break;
2836
2610 pos = 0;
2611 len = GET_MODE_BITSIZE (GET_MODE (inner));
2612 unsignedp = 0;
2613 break;
2614
2615 case SIGN_EXTRACT:
2616 case ZERO_EXTRACT:
2617 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2618 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2619 {
2620 inner = XEXP (SET_SRC (x), 0);
2621 len = INTVAL (XEXP (SET_SRC (x), 1));
2622 pos = INTVAL (XEXP (SET_SRC (x), 2));
2623
2624 if (BITS_BIG_ENDIAN)
2625 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2626 unsignedp = (code == ZERO_EXTRACT);
2627 }
2628 break;
2837 pos = 0;
2838 len = GET_MODE_BITSIZE (GET_MODE (inner));
2839 unsignedp = 0;
2840 break;
2841
2842 case SIGN_EXTRACT:
2843 case ZERO_EXTRACT:
2844 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2845 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2846 {
2847 inner = XEXP (SET_SRC (x), 0);
2848 len = INTVAL (XEXP (SET_SRC (x), 1));
2849 pos = INTVAL (XEXP (SET_SRC (x), 2));
2850
2851 if (BITS_BIG_ENDIAN)
2852 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2853 unsignedp = (code == ZERO_EXTRACT);
2854 }
2855 break;
2856
2857 default:
2858 break;
2629 }
2630
2631 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2632 {
2633 enum machine_mode mode = GET_MODE (SET_SRC (x));
2634
2635 /* For unsigned, we have a choice of a shift followed by an
2636 AND or two shifts. Use two shifts for field sizes where the
2637 constant might be too large. We assume here that we can
2638 always at least get 8-bit constants in an AND insn, which is
2639 true for every current RISC. */
2640
2641 if (unsignedp && len <= 8)
2642 {
2643 SUBST (SET_SRC (x),
2644 gen_rtx_combine
2645 (AND, mode,
2646 gen_rtx_combine (LSHIFTRT, mode,
2647 gen_lowpart_for_combine (mode, inner),
2648 GEN_INT (pos)),
2649 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2650
2651 split = find_split_point (&SET_SRC (x), insn);
2652 if (split && split != &SET_SRC (x))
2653 return split;
2654 }
2655 else
2656 {
2657 SUBST (SET_SRC (x),
2658 gen_rtx_combine
2659 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
2660 gen_rtx_combine (ASHIFT, mode,
2661 gen_lowpart_for_combine (mode, inner),
2662 GEN_INT (GET_MODE_BITSIZE (mode)
2663 - len - pos)),
2664 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2665
2666 split = find_split_point (&SET_SRC (x), insn);
2667 if (split && split != &SET_SRC (x))
2668 return split;
2669 }
2670 }
2671
2672 /* See if this is a simple operation with a constant as the second
2673 operand. It might be that this constant is out of range and hence
2674 could be used as a split point. */
2675 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2676 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2677 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2678 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2679 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2680 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2681 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2682 == 'o'))))
2683 return &XEXP (SET_SRC (x), 1);
2684
2685 /* Finally, see if this is a simple operation with its first operand
2686 not in a register. The operation might require this operand in a
2687 register, so return it as a split point. We can always do this
2688 because if the first operand were another operation, we would have
2689 already found it as a split point. */
2690 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2691 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2692 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2693 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2694 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2695 return &XEXP (SET_SRC (x), 0);
2696
2697 return 0;
2698
2699 case AND:
2700 case IOR:
2701 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2702 it is better to write this as (not (ior A B)) so we can split it.
2703 Similarly for IOR. */
2704 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2705 {
2706 SUBST (*loc,
2707 gen_rtx_combine (NOT, GET_MODE (x),
2708 gen_rtx_combine (code == IOR ? AND : IOR,
2709 GET_MODE (x),
2710 XEXP (XEXP (x, 0), 0),
2711 XEXP (XEXP (x, 1), 0))));
2712 return find_split_point (loc, insn);
2713 }
2714
2715 /* Many RISC machines have a large set of logical insns. If the
2716 second operand is a NOT, put it first so we will try to split the
2717 other operand first. */
2718 if (GET_CODE (XEXP (x, 1)) == NOT)
2719 {
2720 rtx tem = XEXP (x, 0);
2721 SUBST (XEXP (x, 0), XEXP (x, 1));
2722 SUBST (XEXP (x, 1), tem);
2723 }
2724 break;
2859 }
2860
2861 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2862 {
2863 enum machine_mode mode = GET_MODE (SET_SRC (x));
2864
2865 /* For unsigned, we have a choice of a shift followed by an
2866 AND or two shifts. Use two shifts for field sizes where the
2867 constant might be too large. We assume here that we can
2868 always at least get 8-bit constants in an AND insn, which is
2869 true for every current RISC. */
2870
2871 if (unsignedp && len <= 8)
2872 {
2873 SUBST (SET_SRC (x),
2874 gen_rtx_combine
2875 (AND, mode,
2876 gen_rtx_combine (LSHIFTRT, mode,
2877 gen_lowpart_for_combine (mode, inner),
2878 GEN_INT (pos)),
2879 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2880
2881 split = find_split_point (&SET_SRC (x), insn);
2882 if (split && split != &SET_SRC (x))
2883 return split;
2884 }
2885 else
2886 {
2887 SUBST (SET_SRC (x),
2888 gen_rtx_combine
2889 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
2890 gen_rtx_combine (ASHIFT, mode,
2891 gen_lowpart_for_combine (mode, inner),
2892 GEN_INT (GET_MODE_BITSIZE (mode)
2893 - len - pos)),
2894 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2895
2896 split = find_split_point (&SET_SRC (x), insn);
2897 if (split && split != &SET_SRC (x))
2898 return split;
2899 }
2900 }
2901
2902 /* See if this is a simple operation with a constant as the second
2903 operand. It might be that this constant is out of range and hence
2904 could be used as a split point. */
2905 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2906 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2907 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2908 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2909 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2910 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2911 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2912 == 'o'))))
2913 return &XEXP (SET_SRC (x), 1);
2914
2915 /* Finally, see if this is a simple operation with its first operand
2916 not in a register. The operation might require this operand in a
2917 register, so return it as a split point. We can always do this
2918 because if the first operand were another operation, we would have
2919 already found it as a split point. */
2920 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2921 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2922 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2923 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2924 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2925 return &XEXP (SET_SRC (x), 0);
2926
2927 return 0;
2928
2929 case AND:
2930 case IOR:
2931 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2932 it is better to write this as (not (ior A B)) so we can split it.
2933 Similarly for IOR. */
2934 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2935 {
2936 SUBST (*loc,
2937 gen_rtx_combine (NOT, GET_MODE (x),
2938 gen_rtx_combine (code == IOR ? AND : IOR,
2939 GET_MODE (x),
2940 XEXP (XEXP (x, 0), 0),
2941 XEXP (XEXP (x, 1), 0))));
2942 return find_split_point (loc, insn);
2943 }
2944
2945 /* Many RISC machines have a large set of logical insns. If the
2946 second operand is a NOT, put it first so we will try to split the
2947 other operand first. */
2948 if (GET_CODE (XEXP (x, 1)) == NOT)
2949 {
2950 rtx tem = XEXP (x, 0);
2951 SUBST (XEXP (x, 0), XEXP (x, 1));
2952 SUBST (XEXP (x, 1), tem);
2953 }
2954 break;
2955
2956 default:
2957 break;
2725 }
2726
2727 /* Otherwise, select our actions depending on our rtx class. */
2728 switch (GET_RTX_CLASS (code))
2729 {
2730 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2731 case '3':
2732 split = find_split_point (&XEXP (x, 2), insn);
2733 if (split)
2734 return split;
2958 }
2959
2960 /* Otherwise, select our actions depending on our rtx class. */
2961 switch (GET_RTX_CLASS (code))
2962 {
2963 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2964 case '3':
2965 split = find_split_point (&XEXP (x, 2), insn);
2966 if (split)
2967 return split;
2735 /* ... fall through ... */
2968 /* ... fall through ... */
2736 case '2':
2737 case 'c':
2738 case '<':
2739 split = find_split_point (&XEXP (x, 1), insn);
2740 if (split)
2741 return split;
2969 case '2':
2970 case 'c':
2971 case '<':
2972 split = find_split_point (&XEXP (x, 1), insn);
2973 if (split)
2974 return split;
2742 /* ... fall through ... */
2975 /* ... fall through ... */
2743 case '1':
2744 /* Some machines have (and (shift ...) ...) insns. If X is not
2745 an AND, but XEXP (X, 0) is, use it as our split point. */
2746 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2747 return &XEXP (x, 0);
2748
2749 split = find_split_point (&XEXP (x, 0), insn);
2750 if (split)
2751 return split;
2752 return loc;
2753 }
2754
2755 /* Otherwise, we don't have a split point. */
2756 return 0;
2757}
2758
2759/* Throughout X, replace FROM with TO, and return the result.
2760 The result is TO if X is FROM;
2761 otherwise the result is X, but its contents may have been modified.
2762 If they were modified, a record was made in undobuf so that
2763 undo_all will (among other things) return X to its original state.
2764
2765 If the number of changes necessary is too much to record to undo,
2766 the excess changes are not made, so the result is invalid.
2767 The changes already made can still be undone.
2768 undobuf.num_undo is incremented for such changes, so by testing that
2769 the caller can tell whether the result is valid.
2770
2771 `n_occurrences' is incremented each time FROM is replaced.
2772
2773 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2774
2775 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
2776 by copying if `n_occurrences' is non-zero. */
2777
2778static rtx
2779subst (x, from, to, in_dest, unique_copy)
2780 register rtx x, from, to;
2781 int in_dest;
2782 int unique_copy;
2783{
2784 register enum rtx_code code = GET_CODE (x);
2785 enum machine_mode op0_mode = VOIDmode;
2786 register char *fmt;
2787 register int len, i;
2788 rtx new;
2789
2790/* Two expressions are equal if they are identical copies of a shared
2791 RTX or if they are both registers with the same register number
2792 and mode. */
2793
2794#define COMBINE_RTX_EQUAL_P(X,Y) \
2795 ((X) == (Y) \
2796 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2797 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2798
2799 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2800 {
2801 n_occurrences++;
2802 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2803 }
2804
2805 /* If X and FROM are the same register but different modes, they will
2806 not have been seen as equal above. However, flow.c will make a
2807 LOG_LINKS entry for that case. If we do nothing, we will try to
2808 rerecognize our original insn and, when it succeeds, we will
2809 delete the feeding insn, which is incorrect.
2810
2811 So force this insn not to match in this (rare) case. */
2812 if (! in_dest && code == REG && GET_CODE (from) == REG
2813 && REGNO (x) == REGNO (from))
2976 case '1':
2977 /* Some machines have (and (shift ...) ...) insns. If X is not
2978 an AND, but XEXP (X, 0) is, use it as our split point. */
2979 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2980 return &XEXP (x, 0);
2981
2982 split = find_split_point (&XEXP (x, 0), insn);
2983 if (split)
2984 return split;
2985 return loc;
2986 }
2987
2988 /* Otherwise, we don't have a split point. */
2989 return 0;
2990}
2991
2992/* Throughout X, replace FROM with TO, and return the result.
2993 The result is TO if X is FROM;
2994 otherwise the result is X, but its contents may have been modified.
2995 If they were modified, a record was made in undobuf so that
2996 undo_all will (among other things) return X to its original state.
2997
2998 If the number of changes necessary is too much to record to undo,
2999 the excess changes are not made, so the result is invalid.
3000 The changes already made can still be undone.
3001 undobuf.num_undo is incremented for such changes, so by testing that
3002 the caller can tell whether the result is valid.
3003
3004 `n_occurrences' is incremented each time FROM is replaced.
3005
3006 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
3007
3008 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
3009 by copying if `n_occurrences' is non-zero. */
3010
3011static rtx
3012subst (x, from, to, in_dest, unique_copy)
3013 register rtx x, from, to;
3014 int in_dest;
3015 int unique_copy;
3016{
3017 register enum rtx_code code = GET_CODE (x);
3018 enum machine_mode op0_mode = VOIDmode;
3019 register char *fmt;
3020 register int len, i;
3021 rtx new;
3022
3023/* Two expressions are equal if they are identical copies of a shared
3024 RTX or if they are both registers with the same register number
3025 and mode. */
3026
3027#define COMBINE_RTX_EQUAL_P(X,Y) \
3028 ((X) == (Y) \
3029 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
3030 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3031
3032 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3033 {
3034 n_occurrences++;
3035 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3036 }
3037
3038 /* If X and FROM are the same register but different modes, they will
3039 not have been seen as equal above. However, flow.c will make a
3040 LOG_LINKS entry for that case. If we do nothing, we will try to
3041 rerecognize our original insn and, when it succeeds, we will
3042 delete the feeding insn, which is incorrect.
3043
3044 So force this insn not to match in this (rare) case. */
3045 if (! in_dest && code == REG && GET_CODE (from) == REG
3046 && REGNO (x) == REGNO (from))
2814 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
3047 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
2815
2816 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2817 of which may contain things that can be combined. */
2818 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2819 return x;
2820
2821 /* It is possible to have a subexpression appear twice in the insn.
2822 Suppose that FROM is a register that appears within TO.
2823 Then, after that subexpression has been scanned once by `subst',
2824 the second time it is scanned, TO may be found. If we were
2825 to scan TO here, we would find FROM within it and create a
2826 self-referent rtl structure which is completely wrong. */
2827 if (COMBINE_RTX_EQUAL_P (x, to))
2828 return to;
2829
3048
3049 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3050 of which may contain things that can be combined. */
3051 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3052 return x;
3053
3054 /* It is possible to have a subexpression appear twice in the insn.
3055 Suppose that FROM is a register that appears within TO.
3056 Then, after that subexpression has been scanned once by `subst',
3057 the second time it is scanned, TO may be found. If we were
3058 to scan TO here, we would find FROM within it and create a
3059 self-referent rtl structure which is completely wrong. */
3060 if (COMBINE_RTX_EQUAL_P (x, to))
3061 return to;
3062
2830 len = GET_RTX_LENGTH (code);
2831 fmt = GET_RTX_FORMAT (code);
3063 /* Parallel asm_operands need special attention because all of the
3064 inputs are shared across the arms. Furthermore, unsharing the
3065 rtl results in recognition failures. Failure to handle this case
3066 specially can result in circular rtl.
2832
3067
2833 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2834 set up to skip this common case. All other cases where we want to
2835 suppress replacing something inside a SET_SRC are handled via the
2836 IN_DEST operand. */
2837 if (code == SET
2838 && (GET_CODE (SET_DEST (x)) == REG
2839 || GET_CODE (SET_DEST (x)) == CC0
2840 || GET_CODE (SET_DEST (x)) == PC))
2841 fmt = "ie";
3068 Solve this by doing a normal pass across the first entry of the
3069 parallel, and only processing the SET_DESTs of the subsequent
3070 entries. Ug. */
2842
3071
2843 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2844 if (fmt[0] == 'e')
2845 op0_mode = GET_MODE (XEXP (x, 0));
2846
2847 for (i = 0; i < len; i++)
3072 if (code == PARALLEL
3073 && GET_CODE (XVECEXP (x, 0, 0)) == SET
3074 && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
2848 {
3075 {
2849 if (fmt[i] == 'E')
3076 new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3077
3078 /* If this substitution failed, this whole thing fails. */
3079 if (GET_CODE (new) == CLOBBER
3080 && XEXP (new, 0) == const0_rtx)
3081 return new;
3082
3083 SUBST (XVECEXP (x, 0, 0), new);
3084
3085 for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
2850 {
3086 {
2851 register int j;
2852 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3087 rtx dest = SET_DEST (XVECEXP (x, 0, i));
3088
3089 if (GET_CODE (dest) != REG
3090 && GET_CODE (dest) != CC0
3091 && GET_CODE (dest) != PC)
2853 {
3092 {
2854 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2855 {
2856 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2857 n_occurrences++;
2858 }
2859 else
2860 {
2861 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
3093 new = subst (dest, from, to, 0, unique_copy);
2862
3094
2863 /* If this substitution failed, this whole thing fails. */
2864 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2865 return new;
2866 }
3095 /* If this substitution failed, this whole thing fails. */
3096 if (GET_CODE (new) == CLOBBER
3097 && XEXP (new, 0) == const0_rtx)
3098 return new;
2867
3099
2868 SUBST (XVECEXP (x, i, j), new);
3100 SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
2869 }
2870 }
3101 }
3102 }
2871 else if (fmt[i] == 'e')
3103 }
3104 else
3105 {
3106 len = GET_RTX_LENGTH (code);
3107 fmt = GET_RTX_FORMAT (code);
3108
3109 /* We don't need to process a SET_DEST that is a register, CC0,
3110 or PC, so set up to skip this common case. All other cases
3111 where we want to suppress replacing something inside a
3112 SET_SRC are handled via the IN_DEST operand. */
3113 if (code == SET
3114 && (GET_CODE (SET_DEST (x)) == REG
3115 || GET_CODE (SET_DEST (x)) == CC0
3116 || GET_CODE (SET_DEST (x)) == PC))
3117 fmt = "ie";
3118
3119 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3120 constant. */
3121 if (fmt[0] == 'e')
3122 op0_mode = GET_MODE (XEXP (x, 0));
3123
3124 for (i = 0; i < len; i++)
2872 {
3125 {
2873 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3126 if (fmt[i] == 'E')
2874 {
3127 {
2875 /* In general, don't install a subreg involving two modes not
2876 tieable. It can worsen register allocation, and can even
2877 make invalid reload insns, since the reg inside may need to
2878 be copied from in the outside mode, and that may be invalid
2879 if it is an fp reg copied in integer mode.
3128 register int j;
3129 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3130 {
3131 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3132 {
3133 new = (unique_copy && n_occurrences
3134 ? copy_rtx (to) : to);
3135 n_occurrences++;
3136 }
3137 else
3138 {
3139 new = subst (XVECEXP (x, i, j), from, to, 0,
3140 unique_copy);
2880
3141
2881 We allow two exceptions to this: It is valid if it is inside
2882 another SUBREG and the mode of that SUBREG and the mode of
2883 the inside of TO is tieable and it is valid if X is a SET
2884 that copies FROM to CC0. */
2885 if (GET_CODE (to) == SUBREG
2886 && ! MODES_TIEABLE_P (GET_MODE (to),
2887 GET_MODE (SUBREG_REG (to)))
2888 && ! (code == SUBREG
2889 && MODES_TIEABLE_P (GET_MODE (x),
2890 GET_MODE (SUBREG_REG (to))))
3142 /* If this substitution failed, this whole thing
3143 fails. */
3144 if (GET_CODE (new) == CLOBBER
3145 && XEXP (new, 0) == const0_rtx)
3146 return new;
3147 }
3148
3149 SUBST (XVECEXP (x, i, j), new);
3150 }
3151 }
3152 else if (fmt[i] == 'e')
3153 {
3154 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3155 {
3156 /* In general, don't install a subreg involving two
3157 modes not tieable. It can worsen register
3158 allocation, and can even make invalid reload
3159 insns, since the reg inside may need to be copied
3160 from in the outside mode, and that may be invalid
3161 if it is an fp reg copied in integer mode.
3162
3163 We allow two exceptions to this: It is valid if
3164 it is inside another SUBREG and the mode of that
3165 SUBREG and the mode of the inside of TO is
3166 tieable and it is valid if X is a SET that copies
3167 FROM to CC0. */
3168
3169 if (GET_CODE (to) == SUBREG
3170 && ! MODES_TIEABLE_P (GET_MODE (to),
3171 GET_MODE (SUBREG_REG (to)))
3172 && ! (code == SUBREG
3173 && MODES_TIEABLE_P (GET_MODE (x),
3174 GET_MODE (SUBREG_REG (to))))
2891#ifdef HAVE_cc0
3175#ifdef HAVE_cc0
2892 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3176 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
2893#endif
3177#endif
2894 )
2895 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
3178 )
3179 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
2896
3180
2897 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2898 n_occurrences++;
2899 }
2900 else
2901 /* If we are in a SET_DEST, suppress most cases unless we
2902 have gone inside a MEM, in which case we want to
2903 simplify the address. We assume here that things that
2904 are actually part of the destination have their inner
2905 parts in the first expression. This is true for SUBREG,
2906 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2907 things aside from REG and MEM that should appear in a
2908 SET_DEST. */
2909 new = subst (XEXP (x, i), from, to,
2910 (((in_dest
2911 && (code == SUBREG || code == STRICT_LOW_PART
2912 || code == ZERO_EXTRACT))
2913 || code == SET)
2914 && i == 0), unique_copy);
3181 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3182 n_occurrences++;
3183 }
3184 else
3185 /* If we are in a SET_DEST, suppress most cases unless we
3186 have gone inside a MEM, in which case we want to
3187 simplify the address. We assume here that things that
3188 are actually part of the destination have their inner
3189 parts in the first expression. This is true for SUBREG,
3190 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3191 things aside from REG and MEM that should appear in a
3192 SET_DEST. */
3193 new = subst (XEXP (x, i), from, to,
3194 (((in_dest
3195 && (code == SUBREG || code == STRICT_LOW_PART
3196 || code == ZERO_EXTRACT))
3197 || code == SET)
3198 && i == 0), unique_copy);
2915
3199
2916 /* If we found that we will have to reject this combination,
2917 indicate that by returning the CLOBBER ourselves, rather than
2918 an expression containing it. This will speed things up as
2919 well as prevent accidents where two CLOBBERs are considered
2920 to be equal, thus producing an incorrect simplification. */
3200 /* If we found that we will have to reject this combination,
3201 indicate that by returning the CLOBBER ourselves, rather than
3202 an expression containing it. This will speed things up as
3203 well as prevent accidents where two CLOBBERs are considered
3204 to be equal, thus producing an incorrect simplification. */
2921
3205
2922 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2923 return new;
3206 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3207 return new;
2924
3208
2925 SUBST (XEXP (x, i), new);
3209 SUBST (XEXP (x, i), new);
3210 }
2926 }
2927 }
2928
2929 /* Try to simplify X. If the simplification changed the code, it is likely
2930 that further simplification will help, so loop, but limit the number
2931 of repetitions that will be performed. */
2932
2933 for (i = 0; i < 4; i++)
2934 {
2935 /* If X is sufficiently simple, don't bother trying to do anything
2936 with it. */
2937 if (code != CONST_INT && code != REG && code != CLOBBER)
2938 x = simplify_rtx (x, op0_mode, i == 3, in_dest);
2939
2940 if (GET_CODE (x) == code)
2941 break;
2942
2943 code = GET_CODE (x);
2944
2945 /* We no longer know the original mode of operand 0 since we
2946 have changed the form of X) */
2947 op0_mode = VOIDmode;
2948 }
2949
2950 return x;
2951}
2952
2953/* Simplify X, a piece of RTL. We just operate on the expression at the
2954 outer level; call `subst' to simplify recursively. Return the new
2955 expression.
2956
2957 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
2958 will be the iteration even if an expression with a code different from
2959 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
2960
2961static rtx
2962simplify_rtx (x, op0_mode, last, in_dest)
2963 rtx x;
2964 enum machine_mode op0_mode;
2965 int last;
2966 int in_dest;
2967{
2968 enum rtx_code code = GET_CODE (x);
2969 enum machine_mode mode = GET_MODE (x);
2970 rtx temp;
2971 int i;
2972
2973 /* If this is a commutative operation, put a constant last and a complex
2974 expression first. We don't need to do this for comparisons here. */
2975 if (GET_RTX_CLASS (code) == 'c'
2976 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2977 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2978 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2979 || (GET_CODE (XEXP (x, 0)) == SUBREG
2980 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2981 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2982 {
2983 temp = XEXP (x, 0);
2984 SUBST (XEXP (x, 0), XEXP (x, 1));
2985 SUBST (XEXP (x, 1), temp);
2986 }
2987
2988 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2989 sign extension of a PLUS with a constant, reverse the order of the sign
2990 extension and the addition. Note that this not the same as the original
2991 code, but overflow is undefined for signed values. Also note that the
2992 PLUS will have been partially moved "inside" the sign-extension, so that
2993 the first operand of X will really look like:
2994 (ashiftrt (plus (ashift A C4) C5) C4).
2995 We convert this to
2996 (plus (ashiftrt (ashift A C4) C2) C4)
2997 and replace the first operand of X with that expression. Later parts
2998 of this function may simplify the expression further.
2999
3000 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3001 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3002 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3003
3004 We do this to simplify address expressions. */
3005
3006 if ((code == PLUS || code == MINUS || code == MULT)
3007 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3008 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3009 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3010 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3011 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3012 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3013 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3014 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3015 XEXP (XEXP (XEXP (x, 0), 0), 1),
3016 XEXP (XEXP (x, 0), 1))) != 0)
3017 {
3018 rtx new
3019 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3020 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3021 INTVAL (XEXP (XEXP (x, 0), 1)));
3022
3023 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3024 INTVAL (XEXP (XEXP (x, 0), 1)));
3025
3026 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3027 }
3028
3029 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3030 applying it to the arms of the IF_THEN_ELSE. This often simplifies
3031 things. Check for cases where both arms are testing the same
3032 condition.
3033
3034 Don't do anything if all operands are very simple. */
3035
3036 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3037 || GET_RTX_CLASS (code) == '<')
3038 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3039 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3040 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3041 == 'o')))
3042 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3043 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3044 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3045 == 'o')))))
3046 || (GET_RTX_CLASS (code) == '1'
3047 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3048 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3049 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3050 == 'o'))))))
3051 {
3052 rtx cond, true, false;
3053
3054 cond = if_then_else_cond (x, &true, &false);
3211 }
3212 }
3213
3214 /* Try to simplify X. If the simplification changed the code, it is likely
3215 that further simplification will help, so loop, but limit the number
3216 of repetitions that will be performed. */
3217
3218 for (i = 0; i < 4; i++)
3219 {
3220 /* If X is sufficiently simple, don't bother trying to do anything
3221 with it. */
3222 if (code != CONST_INT && code != REG && code != CLOBBER)
3223 x = simplify_rtx (x, op0_mode, i == 3, in_dest);
3224
3225 if (GET_CODE (x) == code)
3226 break;
3227
3228 code = GET_CODE (x);
3229
3230 /* We no longer know the original mode of operand 0 since we
3231 have changed the form of X) */
3232 op0_mode = VOIDmode;
3233 }
3234
3235 return x;
3236}
3237
3238/* Simplify X, a piece of RTL. We just operate on the expression at the
3239 outer level; call `subst' to simplify recursively. Return the new
3240 expression.
3241
3242 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3243 will be the iteration even if an expression with a code different from
3244 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
3245
3246static rtx
3247simplify_rtx (x, op0_mode, last, in_dest)
3248 rtx x;
3249 enum machine_mode op0_mode;
3250 int last;
3251 int in_dest;
3252{
3253 enum rtx_code code = GET_CODE (x);
3254 enum machine_mode mode = GET_MODE (x);
3255 rtx temp;
3256 int i;
3257
3258 /* If this is a commutative operation, put a constant last and a complex
3259 expression first. We don't need to do this for comparisons here. */
3260 if (GET_RTX_CLASS (code) == 'c'
3261 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3262 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
3263 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
3264 || (GET_CODE (XEXP (x, 0)) == SUBREG
3265 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
3266 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
3267 {
3268 temp = XEXP (x, 0);
3269 SUBST (XEXP (x, 0), XEXP (x, 1));
3270 SUBST (XEXP (x, 1), temp);
3271 }
3272
3273 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3274 sign extension of a PLUS with a constant, reverse the order of the sign
3275 extension and the addition. Note that this not the same as the original
3276 code, but overflow is undefined for signed values. Also note that the
3277 PLUS will have been partially moved "inside" the sign-extension, so that
3278 the first operand of X will really look like:
3279 (ashiftrt (plus (ashift A C4) C5) C4).
3280 We convert this to
3281 (plus (ashiftrt (ashift A C4) C2) C4)
3282 and replace the first operand of X with that expression. Later parts
3283 of this function may simplify the expression further.
3284
3285 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3286 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3287 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3288
3289 We do this to simplify address expressions. */
3290
3291 if ((code == PLUS || code == MINUS || code == MULT)
3292 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3293 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3294 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3295 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3296 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3297 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3298 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3299 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3300 XEXP (XEXP (XEXP (x, 0), 0), 1),
3301 XEXP (XEXP (x, 0), 1))) != 0)
3302 {
3303 rtx new
3304 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3305 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3306 INTVAL (XEXP (XEXP (x, 0), 1)));
3307
3308 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3309 INTVAL (XEXP (XEXP (x, 0), 1)));
3310
3311 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3312 }
3313
3314 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3315 applying it to the arms of the IF_THEN_ELSE. This often simplifies
3316 things. Check for cases where both arms are testing the same
3317 condition.
3318
3319 Don't do anything if all operands are very simple. */
3320
3321 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3322 || GET_RTX_CLASS (code) == '<')
3323 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3324 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3325 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3326 == 'o')))
3327 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3328 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3329 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3330 == 'o')))))
3331 || (GET_RTX_CLASS (code) == '1'
3332 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3333 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3334 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3335 == 'o'))))))
3336 {
3337 rtx cond, true, false;
3338
3339 cond = if_then_else_cond (x, &true, &false);
3055 if (cond != 0)
3340 if (cond != 0
3341 /* If everything is a comparison, what we have is highly unlikely
3342 to be simpler, so don't use it. */
3343 && ! (GET_RTX_CLASS (code) == '<'
3344 && (GET_RTX_CLASS (GET_CODE (true)) == '<'
3345 || GET_RTX_CLASS (GET_CODE (false)) == '<')))
3056 {
3057 rtx cop1 = const0_rtx;
3058 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3059
3060 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3061 return x;
3062
3063 /* Simplify the alternative arms; this may collapse the true and
3064 false arms to store-flag values. */
3065 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3066 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3067
3068 /* Restarting if we generate a store-flag expression will cause
3069 us to loop. Just drop through in this case. */
3070
3071 /* If the result values are STORE_FLAG_VALUE and zero, we can
3072 just make the comparison operation. */
3073 if (true == const_true_rtx && false == const0_rtx)
3074 x = gen_binary (cond_code, mode, cond, cop1);
3075 else if (true == const0_rtx && false == const_true_rtx)
3076 x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3077
3078 /* Likewise, we can make the negate of a comparison operation
3079 if the result values are - STORE_FLAG_VALUE and zero. */
3080 else if (GET_CODE (true) == CONST_INT
3081 && INTVAL (true) == - STORE_FLAG_VALUE
3082 && false == const0_rtx)
3083 x = gen_unary (NEG, mode, mode,
3084 gen_binary (cond_code, mode, cond, cop1));
3085 else if (GET_CODE (false) == CONST_INT
3086 && INTVAL (false) == - STORE_FLAG_VALUE
3087 && true == const0_rtx)
3088 x = gen_unary (NEG, mode, mode,
3089 gen_binary (reverse_condition (cond_code),
3090 mode, cond, cop1));
3091 else
3346 {
3347 rtx cop1 = const0_rtx;
3348 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3349
3350 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3351 return x;
3352
3353 /* Simplify the alternative arms; this may collapse the true and
3354 false arms to store-flag values. */
3355 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3356 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3357
3358 /* Restarting if we generate a store-flag expression will cause
3359 us to loop. Just drop through in this case. */
3360
3361 /* If the result values are STORE_FLAG_VALUE and zero, we can
3362 just make the comparison operation. */
3363 if (true == const_true_rtx && false == const0_rtx)
3364 x = gen_binary (cond_code, mode, cond, cop1);
3365 else if (true == const0_rtx && false == const_true_rtx)
3366 x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3367
3368 /* Likewise, we can make the negate of a comparison operation
3369 if the result values are - STORE_FLAG_VALUE and zero. */
3370 else if (GET_CODE (true) == CONST_INT
3371 && INTVAL (true) == - STORE_FLAG_VALUE
3372 && false == const0_rtx)
3373 x = gen_unary (NEG, mode, mode,
3374 gen_binary (cond_code, mode, cond, cop1));
3375 else if (GET_CODE (false) == CONST_INT
3376 && INTVAL (false) == - STORE_FLAG_VALUE
3377 && true == const0_rtx)
3378 x = gen_unary (NEG, mode, mode,
3379 gen_binary (reverse_condition (cond_code),
3380 mode, cond, cop1));
3381 else
3092 return gen_rtx (IF_THEN_ELSE, mode,
3093 gen_binary (cond_code, VOIDmode, cond, cop1),
3094 true, false);
3382 return gen_rtx_IF_THEN_ELSE (mode,
3383 gen_binary (cond_code, VOIDmode,
3384 cond, cop1),
3385 true, false);
3095
3096 code = GET_CODE (x);
3097 op0_mode = VOIDmode;
3098 }
3099 }
3100
3101 /* Try to fold this expression in case we have constants that weren't
3102 present before. */
3103 temp = 0;
3104 switch (GET_RTX_CLASS (code))
3105 {
3106 case '1':
3107 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3108 break;
3109 case '<':
3110 temp = simplify_relational_operation (code, op0_mode,
3111 XEXP (x, 0), XEXP (x, 1));
3112#ifdef FLOAT_STORE_FLAG_VALUE
3113 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3114 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3115 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3116#endif
3117 break;
3118 case 'c':
3119 case '2':
3120 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3121 break;
3122 case 'b':
3123 case '3':
3124 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3125 XEXP (x, 1), XEXP (x, 2));
3126 break;
3127 }
3128
3129 if (temp)
3130 x = temp, code = GET_CODE (temp);
3131
3132 /* First see if we can apply the inverse distributive law. */
3133 if (code == PLUS || code == MINUS
3134 || code == AND || code == IOR || code == XOR)
3135 {
3136 x = apply_distributive_law (x);
3137 code = GET_CODE (x);
3138 }
3139
3140 /* If CODE is an associative operation not otherwise handled, see if we
3141 can associate some operands. This can win if they are constants or
3142 if they are logically related (i.e. (a & b) & a. */
3143 if ((code == PLUS || code == MINUS
3144 || code == MULT || code == AND || code == IOR || code == XOR
3145 || code == DIV || code == UDIV
3146 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3147 && INTEGRAL_MODE_P (mode))
3148 {
3149 if (GET_CODE (XEXP (x, 0)) == code)
3150 {
3151 rtx other = XEXP (XEXP (x, 0), 0);
3152 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3153 rtx inner_op1 = XEXP (x, 1);
3154 rtx inner;
3155
3156 /* Make sure we pass the constant operand if any as the second
3157 one if this is a commutative operation. */
3158 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3159 {
3160 rtx tem = inner_op0;
3161 inner_op0 = inner_op1;
3162 inner_op1 = tem;
3163 }
3164 inner = simplify_binary_operation (code == MINUS ? PLUS
3165 : code == DIV ? MULT
3166 : code == UDIV ? MULT
3167 : code,
3168 mode, inner_op0, inner_op1);
3169
3170 /* For commutative operations, try the other pair if that one
3171 didn't simplify. */
3172 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3173 {
3174 other = XEXP (XEXP (x, 0), 1);
3175 inner = simplify_binary_operation (code, mode,
3176 XEXP (XEXP (x, 0), 0),
3177 XEXP (x, 1));
3178 }
3179
3180 if (inner)
3181 return gen_binary (code, mode, other, inner);
3182 }
3183 }
3184
3185 /* A little bit of algebraic simplification here. */
3186 switch (code)
3187 {
3188 case MEM:
3189 /* Ensure that our address has any ASHIFTs converted to MULT in case
3190 address-recognizing predicates are called later. */
3191 temp = make_compound_operation (XEXP (x, 0), MEM);
3192 SUBST (XEXP (x, 0), temp);
3193 break;
3194
3195 case SUBREG:
3196 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3197 is paradoxical. If we can't do that safely, then it becomes
3198 something nonsensical so that this combination won't take place. */
3199
3200 if (GET_CODE (SUBREG_REG (x)) == MEM
3201 && (GET_MODE_SIZE (mode)
3202 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3203 {
3204 rtx inner = SUBREG_REG (x);
3205 int endian_offset = 0;
3206 /* Don't change the mode of the MEM
3207 if that would change the meaning of the address. */
3208 if (MEM_VOLATILE_P (SUBREG_REG (x))
3209 || mode_dependent_address_p (XEXP (inner, 0)))
3386
3387 code = GET_CODE (x);
3388 op0_mode = VOIDmode;
3389 }
3390 }
3391
3392 /* Try to fold this expression in case we have constants that weren't
3393 present before. */
3394 temp = 0;
3395 switch (GET_RTX_CLASS (code))
3396 {
3397 case '1':
3398 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3399 break;
3400 case '<':
3401 temp = simplify_relational_operation (code, op0_mode,
3402 XEXP (x, 0), XEXP (x, 1));
3403#ifdef FLOAT_STORE_FLAG_VALUE
3404 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3405 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3406 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3407#endif
3408 break;
3409 case 'c':
3410 case '2':
3411 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3412 break;
3413 case 'b':
3414 case '3':
3415 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3416 XEXP (x, 1), XEXP (x, 2));
3417 break;
3418 }
3419
3420 if (temp)
3421 x = temp, code = GET_CODE (temp);
3422
3423 /* First see if we can apply the inverse distributive law. */
3424 if (code == PLUS || code == MINUS
3425 || code == AND || code == IOR || code == XOR)
3426 {
3427 x = apply_distributive_law (x);
3428 code = GET_CODE (x);
3429 }
3430
3431 /* If CODE is an associative operation not otherwise handled, see if we
3432 can associate some operands. This can win if they are constants or
3433 if they are logically related (i.e. (a & b) & a. */
3434 if ((code == PLUS || code == MINUS
3435 || code == MULT || code == AND || code == IOR || code == XOR
3436 || code == DIV || code == UDIV
3437 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3438 && INTEGRAL_MODE_P (mode))
3439 {
3440 if (GET_CODE (XEXP (x, 0)) == code)
3441 {
3442 rtx other = XEXP (XEXP (x, 0), 0);
3443 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3444 rtx inner_op1 = XEXP (x, 1);
3445 rtx inner;
3446
3447 /* Make sure we pass the constant operand if any as the second
3448 one if this is a commutative operation. */
3449 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3450 {
3451 rtx tem = inner_op0;
3452 inner_op0 = inner_op1;
3453 inner_op1 = tem;
3454 }
3455 inner = simplify_binary_operation (code == MINUS ? PLUS
3456 : code == DIV ? MULT
3457 : code == UDIV ? MULT
3458 : code,
3459 mode, inner_op0, inner_op1);
3460
3461 /* For commutative operations, try the other pair if that one
3462 didn't simplify. */
3463 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3464 {
3465 other = XEXP (XEXP (x, 0), 1);
3466 inner = simplify_binary_operation (code, mode,
3467 XEXP (XEXP (x, 0), 0),
3468 XEXP (x, 1));
3469 }
3470
3471 if (inner)
3472 return gen_binary (code, mode, other, inner);
3473 }
3474 }
3475
3476 /* A little bit of algebraic simplification here. */
3477 switch (code)
3478 {
3479 case MEM:
3480 /* Ensure that our address has any ASHIFTs converted to MULT in case
3481 address-recognizing predicates are called later. */
3482 temp = make_compound_operation (XEXP (x, 0), MEM);
3483 SUBST (XEXP (x, 0), temp);
3484 break;
3485
3486 case SUBREG:
3487 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3488 is paradoxical. If we can't do that safely, then it becomes
3489 something nonsensical so that this combination won't take place. */
3490
3491 if (GET_CODE (SUBREG_REG (x)) == MEM
3492 && (GET_MODE_SIZE (mode)
3493 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3494 {
3495 rtx inner = SUBREG_REG (x);
3496 int endian_offset = 0;
3497 /* Don't change the mode of the MEM
3498 if that would change the meaning of the address. */
3499 if (MEM_VOLATILE_P (SUBREG_REG (x))
3500 || mode_dependent_address_p (XEXP (inner, 0)))
3210 return gen_rtx (CLOBBER, mode, const0_rtx);
3501 return gen_rtx_CLOBBER (mode, const0_rtx);
3211
3212 if (BYTES_BIG_ENDIAN)
3213 {
3214 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3215 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3216 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3217 endian_offset -= (UNITS_PER_WORD
3218 - GET_MODE_SIZE (GET_MODE (inner)));
3219 }
3220 /* Note if the plus_constant doesn't make a valid address
3221 then this combination won't be accepted. */
3502
3503 if (BYTES_BIG_ENDIAN)
3504 {
3505 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3506 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3507 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3508 endian_offset -= (UNITS_PER_WORD
3509 - GET_MODE_SIZE (GET_MODE (inner)));
3510 }
3511 /* Note if the plus_constant doesn't make a valid address
3512 then this combination won't be accepted. */
3222 x = gen_rtx (MEM, mode,
3223 plus_constant (XEXP (inner, 0),
3224 (SUBREG_WORD (x) * UNITS_PER_WORD
3225 + endian_offset)));
3513 x = gen_rtx_MEM (mode,
3514 plus_constant (XEXP (inner, 0),
3515 (SUBREG_WORD (x) * UNITS_PER_WORD
3516 + endian_offset)));
3226 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3227 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3228 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3229 return x;
3230 }
3231
3232 /* If we are in a SET_DEST, these other cases can't apply. */
3233 if (in_dest)
3234 return x;
3235
3236 /* Changing mode twice with SUBREG => just change it once,
3237 or not at all if changing back to starting mode. */
3238 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3239 {
3240 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3241 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3242 return SUBREG_REG (SUBREG_REG (x));
3243
3244 SUBST_INT (SUBREG_WORD (x),
3245 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3246 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3247 }
3248
3249 /* SUBREG of a hard register => just change the register number
3250 and/or mode. If the hard register is not valid in that mode,
3251 suppress this combination. If the hard register is the stack,
3252 frame, or argument pointer, leave this as a SUBREG. */
3253
3254 if (GET_CODE (SUBREG_REG (x)) == REG
3255 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3256 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
3257#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3258 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3259#endif
3260#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3261 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3262#endif
3263 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
3264 {
3265 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3266 mode))
3517 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3518 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3519 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3520 return x;
3521 }
3522
3523 /* If we are in a SET_DEST, these other cases can't apply. */
3524 if (in_dest)
3525 return x;
3526
3527 /* Changing mode twice with SUBREG => just change it once,
3528 or not at all if changing back to starting mode. */
3529 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3530 {
3531 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3532 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3533 return SUBREG_REG (SUBREG_REG (x));
3534
3535 SUBST_INT (SUBREG_WORD (x),
3536 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3537 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3538 }
3539
3540 /* SUBREG of a hard register => just change the register number
3541 and/or mode. If the hard register is not valid in that mode,
3542 suppress this combination. If the hard register is the stack,
3543 frame, or argument pointer, leave this as a SUBREG. */
3544
3545 if (GET_CODE (SUBREG_REG (x)) == REG
3546 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3547 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
3548#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3549 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3550#endif
3551#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3552 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3553#endif
3554 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
3555 {
3556 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3557 mode))
3267 return gen_rtx (REG, mode,
3268 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3558 return gen_rtx_REG (mode,
3559 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3269 else
3560 else
3270 return gen_rtx (CLOBBER, mode, const0_rtx);
3561 return gen_rtx_CLOBBER (mode, const0_rtx);
3271 }
3272
3273 /* For a constant, try to pick up the part we want. Handle a full
3274 word and low-order part. Only do this if we are narrowing
3275 the constant; if it is being widened, we have no idea what
3276 the extra bits will have been set to. */
3277
3278 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3279 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3562 }
3563
3564 /* For a constant, try to pick up the part we want. Handle a full
3565 word and low-order part. Only do this if we are narrowing
3566 the constant; if it is being widened, we have no idea what
3567 the extra bits will have been set to. */
3568
3569 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3570 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3280 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
3571 && GET_MODE_SIZE (op0_mode) > UNITS_PER_WORD
3281 && GET_MODE_CLASS (mode) == MODE_INT)
3282 {
3283 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
3284 0, op0_mode);
3285 if (temp)
3286 return temp;
3287 }
3288
3289 /* If we want a subreg of a constant, at offset 0,
3290 take the low bits. On a little-endian machine, that's
3291 always valid. On a big-endian machine, it's valid
3572 && GET_MODE_CLASS (mode) == MODE_INT)
3573 {
3574 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
3575 0, op0_mode);
3576 if (temp)
3577 return temp;
3578 }
3579
3580 /* If we want a subreg of a constant, at offset 0,
3581 take the low bits. On a little-endian machine, that's
3582 always valid. On a big-endian machine, it's valid
3292 only if the constant's mode fits in one word. */
3293 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
3294 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode)
3583 only if the constant's mode fits in one word. Note that we
3584 cannot use subreg_lowpart_p since SUBREG_REG may be VOIDmode. */
3585 if (CONSTANT_P (SUBREG_REG (x))
3586 && ((GET_MODE_SIZE (op0_mode) <= UNITS_PER_WORD
3587 || ! WORDS_BIG_ENDIAN)
3588 ? SUBREG_WORD (x) == 0
3589 : (SUBREG_WORD (x)
3590 == ((GET_MODE_SIZE (op0_mode)
3591 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
3592 / UNITS_PER_WORD)))
3593 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode)
3295 && (! WORDS_BIG_ENDIAN
3296 || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
3297 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3298
3299 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3300 since we are saying that the high bits don't matter. */
3301 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3302 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3303 return SUBREG_REG (x);
3304
3305 /* Note that we cannot do any narrowing for non-constants since
3306 we might have been counting on using the fact that some bits were
3307 zero. We now do this in the SET. */
3308
3309 break;
3310
3311 case NOT:
3312 /* (not (plus X -1)) can become (neg X). */
3313 if (GET_CODE (XEXP (x, 0)) == PLUS
3314 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3315 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
3316
3317 /* Similarly, (not (neg X)) is (plus X -1). */
3318 if (GET_CODE (XEXP (x, 0)) == NEG)
3319 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3320 constm1_rtx);
3321
3322 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3323 if (GET_CODE (XEXP (x, 0)) == XOR
3324 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3325 && (temp = simplify_unary_operation (NOT, mode,
3326 XEXP (XEXP (x, 0), 1),
3327 mode)) != 0)
3328 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
3329
3330 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3331 other than 1, but that is not valid. We could do a similar
3332 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3333 but this doesn't seem common enough to bother with. */
3334 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3335 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3594 && (! WORDS_BIG_ENDIAN
3595 || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
3596 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3597
3598 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3599 since we are saying that the high bits don't matter. */
3600 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3601 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3602 return SUBREG_REG (x);
3603
3604 /* Note that we cannot do any narrowing for non-constants since
3605 we might have been counting on using the fact that some bits were
3606 zero. We now do this in the SET. */
3607
3608 break;
3609
3610 case NOT:
3611 /* (not (plus X -1)) can become (neg X). */
3612 if (GET_CODE (XEXP (x, 0)) == PLUS
3613 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3614 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
3615
3616 /* Similarly, (not (neg X)) is (plus X -1). */
3617 if (GET_CODE (XEXP (x, 0)) == NEG)
3618 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3619 constm1_rtx);
3620
3621 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3622 if (GET_CODE (XEXP (x, 0)) == XOR
3623 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3624 && (temp = simplify_unary_operation (NOT, mode,
3625 XEXP (XEXP (x, 0), 1),
3626 mode)) != 0)
3627 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
3628
3629 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3630 other than 1, but that is not valid. We could do a similar
3631 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3632 but this doesn't seem common enough to bother with. */
3633 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3634 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3336 return gen_rtx (ROTATE, mode, gen_unary (NOT, mode, mode, const1_rtx),
3337 XEXP (XEXP (x, 0), 1));
3635 return gen_rtx_ROTATE (mode, gen_unary (NOT, mode, mode, const1_rtx),
3636 XEXP (XEXP (x, 0), 1));
3338
3339 if (GET_CODE (XEXP (x, 0)) == SUBREG
3340 && subreg_lowpart_p (XEXP (x, 0))
3341 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3342 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3343 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3344 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3345 {
3346 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3347
3637
3638 if (GET_CODE (XEXP (x, 0)) == SUBREG
3639 && subreg_lowpart_p (XEXP (x, 0))
3640 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3641 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3642 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3643 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3644 {
3645 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3646
3348 x = gen_rtx (ROTATE, inner_mode,
3349 gen_unary (NOT, inner_mode, inner_mode, const1_rtx),
3350 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3647 x = gen_rtx_ROTATE (inner_mode,
3648 gen_unary (NOT, inner_mode, inner_mode,
3649 const1_rtx),
3650 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3351 return gen_lowpart_for_combine (mode, x);
3352 }
3353
3651 return gen_lowpart_for_combine (mode, x);
3652 }
3653
3354#if STORE_FLAG_VALUE == -1
3355 /* (not (comparison foo bar)) can be done by reversing the comparison
3356 code if valid. */
3357 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3654 /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3655 reversing the comparison code if valid. */
3656 if (STORE_FLAG_VALUE == -1
3657 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3358 && reversible_comparison_p (XEXP (x, 0)))
3359 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3360 mode, XEXP (XEXP (x, 0), 0),
3361 XEXP (XEXP (x, 0), 1));
3362
3363 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3658 && reversible_comparison_p (XEXP (x, 0)))
3659 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3660 mode, XEXP (XEXP (x, 0), 0),
3661 XEXP (XEXP (x, 0), 1));
3662
3663 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3364 is (lt foo (const_int 0)), so we can perform the above
3365 simplification. */
3664 is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3665 perform the above simplification. */
3366
3666
3367 if (XEXP (x, 1) == const1_rtx
3667 if (STORE_FLAG_VALUE == -1
3668 && XEXP (x, 1) == const1_rtx
3368 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3369 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3370 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3371 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3669 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3670 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3671 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3672 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3372#endif
3373
3374 /* Apply De Morgan's laws to reduce number of patterns for machines
3375 with negating logical insns (and-not, nand, etc.). If result has
3376 only one NOT, put it first, since that is how the patterns are
3377 coded. */
3378
3379 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3380 {
3381 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3382
3383 if (GET_CODE (in1) == NOT)
3384 in1 = XEXP (in1, 0);
3385 else
3386 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3387
3388 if (GET_CODE (in2) == NOT)
3389 in2 = XEXP (in2, 0);
3390 else if (GET_CODE (in2) == CONST_INT
3391 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3392 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
3393 else
3394 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3395
3396 if (GET_CODE (in2) == NOT)
3397 {
3398 rtx tem = in2;
3399 in2 = in1; in1 = tem;
3400 }
3401
3402 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3403 mode, in1, in2);
3404 }
3405 break;
3406
3407 case NEG:
3408 /* (neg (plus X 1)) can become (not X). */
3409 if (GET_CODE (XEXP (x, 0)) == PLUS
3410 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3411 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3412
3413 /* Similarly, (neg (not X)) is (plus X 1). */
3414 if (GET_CODE (XEXP (x, 0)) == NOT)
3415 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
3416
3417 /* (neg (minus X Y)) can become (minus Y X). */
3418 if (GET_CODE (XEXP (x, 0)) == MINUS
3419 && (! FLOAT_MODE_P (mode)
3673
3674 /* Apply De Morgan's laws to reduce number of patterns for machines
3675 with negating logical insns (and-not, nand, etc.). If result has
3676 only one NOT, put it first, since that is how the patterns are
3677 coded. */
3678
3679 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3680 {
3681 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3682
3683 if (GET_CODE (in1) == NOT)
3684 in1 = XEXP (in1, 0);
3685 else
3686 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3687
3688 if (GET_CODE (in2) == NOT)
3689 in2 = XEXP (in2, 0);
3690 else if (GET_CODE (in2) == CONST_INT
3691 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3692 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
3693 else
3694 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3695
3696 if (GET_CODE (in2) == NOT)
3697 {
3698 rtx tem = in2;
3699 in2 = in1; in1 = tem;
3700 }
3701
3702 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3703 mode, in1, in2);
3704 }
3705 break;
3706
3707 case NEG:
3708 /* (neg (plus X 1)) can become (not X). */
3709 if (GET_CODE (XEXP (x, 0)) == PLUS
3710 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3711 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3712
3713 /* Similarly, (neg (not X)) is (plus X 1). */
3714 if (GET_CODE (XEXP (x, 0)) == NOT)
3715 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
3716
3717 /* (neg (minus X Y)) can become (minus Y X). */
3718 if (GET_CODE (XEXP (x, 0)) == MINUS
3719 && (! FLOAT_MODE_P (mode)
3420 /* x-y != -(y-x) with IEEE floating point. */
3720 /* x-y != -(y-x) with IEEE floating point. */
3421 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3422 || flag_fast_math))
3423 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3424 XEXP (XEXP (x, 0), 0));
3425
3721 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3722 || flag_fast_math))
3723 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3724 XEXP (XEXP (x, 0), 0));
3725
3426 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3726 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3427 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3428 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3429 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3430
3431 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3432 if we can then eliminate the NEG (e.g.,
3433 if the operand is a constant). */
3434
3435 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3436 {
3437 temp = simplify_unary_operation (NEG, mode,
3438 XEXP (XEXP (x, 0), 0), mode);
3439 if (temp)
3440 {
3441 SUBST (XEXP (XEXP (x, 0), 0), temp);
3442 return XEXP (x, 0);
3443 }
3444 }
3445
3446 temp = expand_compound_operation (XEXP (x, 0));
3447
3448 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3449 replaced by (lshiftrt X C). This will convert
3450 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3451
3452 if (GET_CODE (temp) == ASHIFTRT
3453 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3454 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3455 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3456 INTVAL (XEXP (temp, 1)));
3457
3458 /* If X has only a single bit that might be nonzero, say, bit I, convert
3459 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3460 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3461 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3462 or a SUBREG of one since we'd be making the expression more
3463 complex if it was just a register. */
3464
3465 if (GET_CODE (temp) != REG
3466 && ! (GET_CODE (temp) == SUBREG
3467 && GET_CODE (SUBREG_REG (temp)) == REG)
3468 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
3469 {
3470 rtx temp1 = simplify_shift_const
3471 (NULL_RTX, ASHIFTRT, mode,
3472 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3473 GET_MODE_BITSIZE (mode) - 1 - i),
3474 GET_MODE_BITSIZE (mode) - 1 - i);
3475
3476 /* If all we did was surround TEMP with the two shifts, we
3477 haven't improved anything, so don't use it. Otherwise,
3478 we are better off with TEMP1. */
3479 if (GET_CODE (temp1) != ASHIFTRT
3480 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3481 || XEXP (XEXP (temp1, 0), 0) != temp)
3482 return temp1;
3483 }
3484 break;
3485
3486 case TRUNCATE:
3727 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3728 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3729 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3730
3731 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3732 if we can then eliminate the NEG (e.g.,
3733 if the operand is a constant). */
3734
3735 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3736 {
3737 temp = simplify_unary_operation (NEG, mode,
3738 XEXP (XEXP (x, 0), 0), mode);
3739 if (temp)
3740 {
3741 SUBST (XEXP (XEXP (x, 0), 0), temp);
3742 return XEXP (x, 0);
3743 }
3744 }
3745
3746 temp = expand_compound_operation (XEXP (x, 0));
3747
3748 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3749 replaced by (lshiftrt X C). This will convert
3750 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3751
3752 if (GET_CODE (temp) == ASHIFTRT
3753 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3754 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3755 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3756 INTVAL (XEXP (temp, 1)));
3757
3758 /* If X has only a single bit that might be nonzero, say, bit I, convert
3759 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3760 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3761 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3762 or a SUBREG of one since we'd be making the expression more
3763 complex if it was just a register. */
3764
3765 if (GET_CODE (temp) != REG
3766 && ! (GET_CODE (temp) == SUBREG
3767 && GET_CODE (SUBREG_REG (temp)) == REG)
3768 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
3769 {
3770 rtx temp1 = simplify_shift_const
3771 (NULL_RTX, ASHIFTRT, mode,
3772 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3773 GET_MODE_BITSIZE (mode) - 1 - i),
3774 GET_MODE_BITSIZE (mode) - 1 - i);
3775
3776 /* If all we did was surround TEMP with the two shifts, we
3777 haven't improved anything, so don't use it. Otherwise,
3778 we are better off with TEMP1. */
3779 if (GET_CODE (temp1) != ASHIFTRT
3780 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3781 || XEXP (XEXP (temp1, 0), 0) != temp)
3782 return temp1;
3783 }
3784 break;
3785
3786 case TRUNCATE:
3487 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3787 /* We can't handle truncation to a partial integer mode here
3788 because we don't know the real bitsize of the partial
3789 integer mode. */
3790 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3791 break;
3792
3793 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3794 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3795 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
3488 SUBST (XEXP (x, 0),
3489 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3490 GET_MODE_MASK (mode), NULL_RTX, 0));
3796 SUBST (XEXP (x, 0),
3797 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3798 GET_MODE_MASK (mode), NULL_RTX, 0));
3799
3800 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
3801 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3802 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3803 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3804 return XEXP (XEXP (x, 0), 0);
3805
3806 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3807 (OP:SI foo:SI) if OP is NEG or ABS. */
3808 if ((GET_CODE (XEXP (x, 0)) == ABS
3809 || GET_CODE (XEXP (x, 0)) == NEG)
3810 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3811 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
3812 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3813 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3814 XEXP (XEXP (XEXP (x, 0), 0), 0));
3815
3816 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
3817 (truncate:SI x). */
3818 if (GET_CODE (XEXP (x, 0)) == SUBREG
3819 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
3820 && subreg_lowpart_p (XEXP (x, 0)))
3821 return SUBREG_REG (XEXP (x, 0));
3822
3823 /* If we know that the value is already truncated, we can
3824 replace the TRUNCATE with a SUBREG. */
3825 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3826 >= GET_MODE_BITSIZE (mode) + 1)
3827 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3828
3829 /* A truncate of a comparison can be replaced with a subreg if
3830 STORE_FLAG_VALUE permits. This is like the previous test,
3831 but it works even if the comparison is done in a mode larger
3832 than HOST_BITS_PER_WIDE_INT. */
3833 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3834 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3835 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0)
3836 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3837
3838 /* Similarly, a truncate of a register whose value is a
3839 comparison can be replaced with a subreg if STORE_FLAG_VALUE
3840 permits. */
3841 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3842 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0
3843 && (temp = get_last_value (XEXP (x, 0)))
3844 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
3845 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3846
3491 break;
3492
3493 case FLOAT_TRUNCATE:
3494 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3495 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3496 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3497 return XEXP (XEXP (x, 0), 0);
3498
3499 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3500 (OP:SF foo:SF) if OP is NEG or ABS. */
3501 if ((GET_CODE (XEXP (x, 0)) == ABS
3502 || GET_CODE (XEXP (x, 0)) == NEG)
3503 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3504 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3505 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3506 XEXP (XEXP (XEXP (x, 0), 0), 0));
3507
3508 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3509 is (float_truncate:SF x). */
3510 if (GET_CODE (XEXP (x, 0)) == SUBREG
3511 && subreg_lowpart_p (XEXP (x, 0))
3512 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3513 return SUBREG_REG (XEXP (x, 0));
3514 break;
3515
3516#ifdef HAVE_cc0
3517 case COMPARE:
3518 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3519 using cc0, in which case we want to leave it as a COMPARE
3520 so we can distinguish it from a register-register-copy. */
3521 if (XEXP (x, 1) == const0_rtx)
3522 return XEXP (x, 0);
3523
3524 /* In IEEE floating point, x-0 is not the same as x. */
3525 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3526 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3527 || flag_fast_math)
3528 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3529 return XEXP (x, 0);
3530 break;
3531#endif
3532
3533 case CONST:
3534 /* (const (const X)) can become (const X). Do it this way rather than
3535 returning the inner CONST since CONST can be shared with a
3536 REG_EQUAL note. */
3537 if (GET_CODE (XEXP (x, 0)) == CONST)
3538 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3539 break;
3540
3541#ifdef HAVE_lo_sum
3542 case LO_SUM:
3543 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3544 can add in an offset. find_split_point will split this address up
3545 again if it doesn't match. */
3546 if (GET_CODE (XEXP (x, 0)) == HIGH
3547 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3548 return XEXP (x, 1);
3549 break;
3550#endif
3551
3552 case PLUS:
3553 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3554 outermost. That's because that's the way indexed addresses are
3555 supposed to appear. This code used to check many more cases, but
3556 they are now checked elsewhere. */
3557 if (GET_CODE (XEXP (x, 0)) == PLUS
3558 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3559 return gen_binary (PLUS, mode,
3560 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3561 XEXP (x, 1)),
3562 XEXP (XEXP (x, 0), 1));
3563
3564 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3565 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3566 bit-field and can be replaced by either a sign_extend or a
3567 sign_extract. The `and' may be a zero_extend. */
3568 if (GET_CODE (XEXP (x, 0)) == XOR
3569 && GET_CODE (XEXP (x, 1)) == CONST_INT
3570 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3571 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3572 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3573 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3574 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3575 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3576 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
3577 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
3578 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3579 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3580 == i + 1))))
3581 return simplify_shift_const
3582 (NULL_RTX, ASHIFTRT, mode,
3583 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3584 XEXP (XEXP (XEXP (x, 0), 0), 0),
3585 GET_MODE_BITSIZE (mode) - (i + 1)),
3586 GET_MODE_BITSIZE (mode) - (i + 1));
3587
3588 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3589 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3590 is 1. This produces better code than the alternative immediately
3591 below. */
3592 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3593 && reversible_comparison_p (XEXP (x, 0))
3594 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3595 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
3596 return
3597 gen_unary (NEG, mode, mode,
3598 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3599 mode, XEXP (XEXP (x, 0), 0),
3600 XEXP (XEXP (x, 0), 1)));
3601
3602 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
3603 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3604 the bitsize of the mode - 1. This allows simplification of
3605 "a = (b & 8) == 0;" */
3606 if (XEXP (x, 1) == constm1_rtx
3607 && GET_CODE (XEXP (x, 0)) != REG
3608 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3609 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3610 && nonzero_bits (XEXP (x, 0), mode) == 1)
3611 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
3612 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3613 gen_rtx_combine (XOR, mode,
3614 XEXP (x, 0), const1_rtx),
3615 GET_MODE_BITSIZE (mode) - 1),
3616 GET_MODE_BITSIZE (mode) - 1);
3617
3618 /* If we are adding two things that have no bits in common, convert
3619 the addition into an IOR. This will often be further simplified,
3620 for example in cases like ((a & 1) + (a & 2)), which can
3621 become a & 3. */
3622
3623 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3624 && (nonzero_bits (XEXP (x, 0), mode)
3625 & nonzero_bits (XEXP (x, 1), mode)) == 0)
3626 return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3627 break;
3628
3629 case MINUS:
3847 break;
3848
3849 case FLOAT_TRUNCATE:
3850 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3851 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3852 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3853 return XEXP (XEXP (x, 0), 0);
3854
3855 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3856 (OP:SF foo:SF) if OP is NEG or ABS. */
3857 if ((GET_CODE (XEXP (x, 0)) == ABS
3858 || GET_CODE (XEXP (x, 0)) == NEG)
3859 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3860 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3861 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3862 XEXP (XEXP (XEXP (x, 0), 0), 0));
3863
3864 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3865 is (float_truncate:SF x). */
3866 if (GET_CODE (XEXP (x, 0)) == SUBREG
3867 && subreg_lowpart_p (XEXP (x, 0))
3868 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3869 return SUBREG_REG (XEXP (x, 0));
3870 break;
3871
3872#ifdef HAVE_cc0
3873 case COMPARE:
3874 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3875 using cc0, in which case we want to leave it as a COMPARE
3876 so we can distinguish it from a register-register-copy. */
3877 if (XEXP (x, 1) == const0_rtx)
3878 return XEXP (x, 0);
3879
3880 /* In IEEE floating point, x-0 is not the same as x. */
3881 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3882 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3883 || flag_fast_math)
3884 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3885 return XEXP (x, 0);
3886 break;
3887#endif
3888
3889 case CONST:
3890 /* (const (const X)) can become (const X). Do it this way rather than
3891 returning the inner CONST since CONST can be shared with a
3892 REG_EQUAL note. */
3893 if (GET_CODE (XEXP (x, 0)) == CONST)
3894 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3895 break;
3896
3897#ifdef HAVE_lo_sum
3898 case LO_SUM:
3899 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3900 can add in an offset. find_split_point will split this address up
3901 again if it doesn't match. */
3902 if (GET_CODE (XEXP (x, 0)) == HIGH
3903 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3904 return XEXP (x, 1);
3905 break;
3906#endif
3907
3908 case PLUS:
3909 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3910 outermost. That's because that's the way indexed addresses are
3911 supposed to appear. This code used to check many more cases, but
3912 they are now checked elsewhere. */
3913 if (GET_CODE (XEXP (x, 0)) == PLUS
3914 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3915 return gen_binary (PLUS, mode,
3916 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3917 XEXP (x, 1)),
3918 XEXP (XEXP (x, 0), 1));
3919
3920 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3921 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3922 bit-field and can be replaced by either a sign_extend or a
3923 sign_extract. The `and' may be a zero_extend. */
3924 if (GET_CODE (XEXP (x, 0)) == XOR
3925 && GET_CODE (XEXP (x, 1)) == CONST_INT
3926 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3927 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3928 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3929 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3930 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3931 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3932 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
3933 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
3934 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3935 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3936 == i + 1))))
3937 return simplify_shift_const
3938 (NULL_RTX, ASHIFTRT, mode,
3939 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3940 XEXP (XEXP (XEXP (x, 0), 0), 0),
3941 GET_MODE_BITSIZE (mode) - (i + 1)),
3942 GET_MODE_BITSIZE (mode) - (i + 1));
3943
3944 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3945 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3946 is 1. This produces better code than the alternative immediately
3947 below. */
3948 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3949 && reversible_comparison_p (XEXP (x, 0))
3950 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3951 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
3952 return
3953 gen_unary (NEG, mode, mode,
3954 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3955 mode, XEXP (XEXP (x, 0), 0),
3956 XEXP (XEXP (x, 0), 1)));
3957
3958 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
3959 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3960 the bitsize of the mode - 1. This allows simplification of
3961 "a = (b & 8) == 0;" */
3962 if (XEXP (x, 1) == constm1_rtx
3963 && GET_CODE (XEXP (x, 0)) != REG
3964 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3965 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3966 && nonzero_bits (XEXP (x, 0), mode) == 1)
3967 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
3968 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3969 gen_rtx_combine (XOR, mode,
3970 XEXP (x, 0), const1_rtx),
3971 GET_MODE_BITSIZE (mode) - 1),
3972 GET_MODE_BITSIZE (mode) - 1);
3973
3974 /* If we are adding two things that have no bits in common, convert
3975 the addition into an IOR. This will often be further simplified,
3976 for example in cases like ((a & 1) + (a & 2)), which can
3977 become a & 3. */
3978
3979 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3980 && (nonzero_bits (XEXP (x, 0), mode)
3981 & nonzero_bits (XEXP (x, 1), mode)) == 0)
3982 return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3983 break;
3984
3985 case MINUS:
3630#if STORE_FLAG_VALUE == 1
3631 /* (minus 1 (comparison foo bar)) can be done by reversing the comparison
3632 code if valid. */
3633 if (XEXP (x, 0) == const1_rtx
3986 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
3987 by reversing the comparison code if valid. */
3988 if (STORE_FLAG_VALUE == 1
3989 && XEXP (x, 0) == const1_rtx
3634 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3635 && reversible_comparison_p (XEXP (x, 1)))
3636 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3637 mode, XEXP (XEXP (x, 1), 0),
3638 XEXP (XEXP (x, 1), 1));
3990 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3991 && reversible_comparison_p (XEXP (x, 1)))
3992 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3993 mode, XEXP (XEXP (x, 1), 0),
3994 XEXP (XEXP (x, 1), 1));
3639#endif
3640
3641 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3642 (and <foo> (const_int pow2-1)) */
3643 if (GET_CODE (XEXP (x, 1)) == AND
3644 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3645 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3646 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3647 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3648 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3649
3650 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
3651 integers. */
3652 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
3653 return gen_binary (MINUS, mode,
3654 gen_binary (MINUS, mode, XEXP (x, 0),
3655 XEXP (XEXP (x, 1), 0)),
3656 XEXP (XEXP (x, 1), 1));
3657 break;
3658
3659 case MULT:
3660 /* If we have (mult (plus A B) C), apply the distributive law and then
3661 the inverse distributive law to see if things simplify. This
3662 occurs mostly in addresses, often when unrolling loops. */
3663
3664 if (GET_CODE (XEXP (x, 0)) == PLUS)
3665 {
3666 x = apply_distributive_law
3667 (gen_binary (PLUS, mode,
3668 gen_binary (MULT, mode,
3669 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3670 gen_binary (MULT, mode,
3671 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3672
3673 if (GET_CODE (x) != MULT)
3674 return x;
3675 }
3676 break;
3677
3678 case UDIV:
3679 /* If this is a divide by a power of two, treat it as a shift if
3680 its first operand is a shift. */
3681 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3682 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3683 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3684 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3685 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3686 || GET_CODE (XEXP (x, 0)) == ROTATE
3687 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3688 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
3689 break;
3690
3691 case EQ: case NE:
3692 case GT: case GTU: case GE: case GEU:
3693 case LT: case LTU: case LE: case LEU:
3694 /* If the first operand is a condition code, we can't do anything
3695 with it. */
3696 if (GET_CODE (XEXP (x, 0)) == COMPARE
3697 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3698#ifdef HAVE_cc0
3699 && XEXP (x, 0) != cc0_rtx
3700#endif
3701 ))
3702 {
3703 rtx op0 = XEXP (x, 0);
3704 rtx op1 = XEXP (x, 1);
3705 enum rtx_code new_code;
3706
3707 if (GET_CODE (op0) == COMPARE)
3708 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3709
3710 /* Simplify our comparison, if possible. */
3711 new_code = simplify_comparison (code, &op0, &op1);
3712
3995
3996 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3997 (and <foo> (const_int pow2-1)) */
3998 if (GET_CODE (XEXP (x, 1)) == AND
3999 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4000 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4001 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4002 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4003 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4004
4005 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4006 integers. */
4007 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
4008 return gen_binary (MINUS, mode,
4009 gen_binary (MINUS, mode, XEXP (x, 0),
4010 XEXP (XEXP (x, 1), 0)),
4011 XEXP (XEXP (x, 1), 1));
4012 break;
4013
4014 case MULT:
4015 /* If we have (mult (plus A B) C), apply the distributive law and then
4016 the inverse distributive law to see if things simplify. This
4017 occurs mostly in addresses, often when unrolling loops. */
4018
4019 if (GET_CODE (XEXP (x, 0)) == PLUS)
4020 {
4021 x = apply_distributive_law
4022 (gen_binary (PLUS, mode,
4023 gen_binary (MULT, mode,
4024 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4025 gen_binary (MULT, mode,
4026 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4027
4028 if (GET_CODE (x) != MULT)
4029 return x;
4030 }
4031 break;
4032
4033 case UDIV:
4034 /* If this is a divide by a power of two, treat it as a shift if
4035 its first operand is a shift. */
4036 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4037 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4038 && (GET_CODE (XEXP (x, 0)) == ASHIFT
4039 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4040 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4041 || GET_CODE (XEXP (x, 0)) == ROTATE
4042 || GET_CODE (XEXP (x, 0)) == ROTATERT))
4043 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4044 break;
4045
4046 case EQ: case NE:
4047 case GT: case GTU: case GE: case GEU:
4048 case LT: case LTU: case LE: case LEU:
4049 /* If the first operand is a condition code, we can't do anything
4050 with it. */
4051 if (GET_CODE (XEXP (x, 0)) == COMPARE
4052 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4053#ifdef HAVE_cc0
4054 && XEXP (x, 0) != cc0_rtx
4055#endif
4056 ))
4057 {
4058 rtx op0 = XEXP (x, 0);
4059 rtx op1 = XEXP (x, 1);
4060 enum rtx_code new_code;
4061
4062 if (GET_CODE (op0) == COMPARE)
4063 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4064
4065 /* Simplify our comparison, if possible. */
4066 new_code = simplify_comparison (code, &op0, &op1);
4067
3713#if STORE_FLAG_VALUE == 1
3714 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3715 if only the low-order bit is possibly nonzero in X (such as when
3716 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
3717 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
3718 known to be either 0 or -1, NE becomes a NEG and EQ becomes
3719 (plus X 1).
3720
3721 Remove any ZERO_EXTRACT we made when thinking this was a
3722 comparison. It may now be simpler to use, e.g., an AND. If a
3723 ZERO_EXTRACT is indeed appropriate, it will be placed back by
3724 the call to make_compound_operation in the SET case. */
3725
4068 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4069 if only the low-order bit is possibly nonzero in X (such as when
4070 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4071 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4072 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4073 (plus X 1).
4074
4075 Remove any ZERO_EXTRACT we made when thinking this was a
4076 comparison. It may now be simpler to use, e.g., an AND. If a
4077 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4078 the call to make_compound_operation in the SET case. */
4079
3726 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3727 && op1 == const0_rtx
3728 && nonzero_bits (op0, mode) == 1)
4080 if (STORE_FLAG_VALUE == 1
4081 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4082 && op1 == const0_rtx && nonzero_bits (op0, mode) == 1)
3729 return gen_lowpart_for_combine (mode,
3730 expand_compound_operation (op0));
3731
4083 return gen_lowpart_for_combine (mode,
4084 expand_compound_operation (op0));
4085
3732 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4086 else if (STORE_FLAG_VALUE == 1
4087 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3733 && op1 == const0_rtx
3734 && (num_sign_bit_copies (op0, mode)
3735 == GET_MODE_BITSIZE (mode)))
3736 {
3737 op0 = expand_compound_operation (op0);
3738 return gen_unary (NEG, mode, mode,
3739 gen_lowpart_for_combine (mode, op0));
3740 }
3741
4088 && op1 == const0_rtx
4089 && (num_sign_bit_copies (op0, mode)
4090 == GET_MODE_BITSIZE (mode)))
4091 {
4092 op0 = expand_compound_operation (op0);
4093 return gen_unary (NEG, mode, mode,
4094 gen_lowpart_for_combine (mode, op0));
4095 }
4096
3742 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4097 else if (STORE_FLAG_VALUE == 1
4098 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3743 && op1 == const0_rtx
3744 && nonzero_bits (op0, mode) == 1)
3745 {
3746 op0 = expand_compound_operation (op0);
3747 return gen_binary (XOR, mode,
3748 gen_lowpart_for_combine (mode, op0),
3749 const1_rtx);
3750 }
3751
4099 && op1 == const0_rtx
4100 && nonzero_bits (op0, mode) == 1)
4101 {
4102 op0 = expand_compound_operation (op0);
4103 return gen_binary (XOR, mode,
4104 gen_lowpart_for_combine (mode, op0),
4105 const1_rtx);
4106 }
4107
3752 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4108 else if (STORE_FLAG_VALUE == 1
4109 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3753 && op1 == const0_rtx
3754 && (num_sign_bit_copies (op0, mode)
3755 == GET_MODE_BITSIZE (mode)))
3756 {
3757 op0 = expand_compound_operation (op0);
3758 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
3759 }
4110 && op1 == const0_rtx
4111 && (num_sign_bit_copies (op0, mode)
4112 == GET_MODE_BITSIZE (mode)))
4113 {
4114 op0 = expand_compound_operation (op0);
4115 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
4116 }
3760#endif
3761
4117
3762#if STORE_FLAG_VALUE == -1
3763 /* If STORE_FLAG_VALUE is -1, we have cases similar to
3764 those above. */
4118 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4119 those above. */
3765 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4120 if (STORE_FLAG_VALUE == -1
4121 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3766 && op1 == const0_rtx
3767 && (num_sign_bit_copies (op0, mode)
3768 == GET_MODE_BITSIZE (mode)))
3769 return gen_lowpart_for_combine (mode,
3770 expand_compound_operation (op0));
3771
4122 && op1 == const0_rtx
4123 && (num_sign_bit_copies (op0, mode)
4124 == GET_MODE_BITSIZE (mode)))
4125 return gen_lowpart_for_combine (mode,
4126 expand_compound_operation (op0));
4127
3772 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4128 else if (STORE_FLAG_VALUE == -1
4129 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3773 && op1 == const0_rtx
3774 && nonzero_bits (op0, mode) == 1)
3775 {
3776 op0 = expand_compound_operation (op0);
3777 return gen_unary (NEG, mode, mode,
3778 gen_lowpart_for_combine (mode, op0));
3779 }
3780
4130 && op1 == const0_rtx
4131 && nonzero_bits (op0, mode) == 1)
4132 {
4133 op0 = expand_compound_operation (op0);
4134 return gen_unary (NEG, mode, mode,
4135 gen_lowpart_for_combine (mode, op0));
4136 }
4137
3781 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4138 else if (STORE_FLAG_VALUE == -1
4139 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3782 && op1 == const0_rtx
3783 && (num_sign_bit_copies (op0, mode)
3784 == GET_MODE_BITSIZE (mode)))
3785 {
3786 op0 = expand_compound_operation (op0);
3787 return gen_unary (NOT, mode, mode,
3788 gen_lowpart_for_combine (mode, op0));
3789 }
3790
3791 /* If X is 0/1, (eq X 0) is X-1. */
4140 && op1 == const0_rtx
4141 && (num_sign_bit_copies (op0, mode)
4142 == GET_MODE_BITSIZE (mode)))
4143 {
4144 op0 = expand_compound_operation (op0);
4145 return gen_unary (NOT, mode, mode,
4146 gen_lowpart_for_combine (mode, op0));
4147 }
4148
4149 /* If X is 0/1, (eq X 0) is X-1. */
3792 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4150 else if (STORE_FLAG_VALUE == -1
4151 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3793 && op1 == const0_rtx
3794 && nonzero_bits (op0, mode) == 1)
3795 {
3796 op0 = expand_compound_operation (op0);
3797 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
3798 }
4152 && op1 == const0_rtx
4153 && nonzero_bits (op0, mode) == 1)
4154 {
4155 op0 = expand_compound_operation (op0);
4156 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
4157 }
3799#endif
3800
3801 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3802 one bit that might be nonzero, we can convert (ne x 0) to
3803 (ashift x c) where C puts the bit in the sign bit. Remove any
3804 AND with STORE_FLAG_VALUE when we are done, since we are only
3805 going to test the sign bit. */
3806 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3807 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4158
4159 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4160 one bit that might be nonzero, we can convert (ne x 0) to
4161 (ashift x c) where C puts the bit in the sign bit. Remove any
4162 AND with STORE_FLAG_VALUE when we are done, since we are only
4163 going to test the sign bit. */
4164 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4165 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3808 && (STORE_FLAG_VALUE
4166 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
3809 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3810 && op1 == const0_rtx
3811 && mode == GET_MODE (op0)
3812 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
3813 {
3814 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3815 expand_compound_operation (op0),
3816 GET_MODE_BITSIZE (mode) - 1 - i);
3817 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3818 return XEXP (x, 0);
3819 else
3820 return x;
3821 }
3822
3823 /* If the code changed, return a whole new comparison. */
3824 if (new_code != code)
3825 return gen_rtx_combine (new_code, mode, op0, op1);
3826
3827 /* Otherwise, keep this operation, but maybe change its operands.
3828 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3829 SUBST (XEXP (x, 0), op0);
3830 SUBST (XEXP (x, 1), op1);
3831 }
3832 break;
3833
3834 case IF_THEN_ELSE:
3835 return simplify_if_then_else (x);
3836
3837 case ZERO_EXTRACT:
3838 case SIGN_EXTRACT:
3839 case ZERO_EXTEND:
3840 case SIGN_EXTEND:
4167 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4168 && op1 == const0_rtx
4169 && mode == GET_MODE (op0)
4170 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4171 {
4172 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4173 expand_compound_operation (op0),
4174 GET_MODE_BITSIZE (mode) - 1 - i);
4175 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4176 return XEXP (x, 0);
4177 else
4178 return x;
4179 }
4180
4181 /* If the code changed, return a whole new comparison. */
4182 if (new_code != code)
4183 return gen_rtx_combine (new_code, mode, op0, op1);
4184
4185 /* Otherwise, keep this operation, but maybe change its operands.
4186 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4187 SUBST (XEXP (x, 0), op0);
4188 SUBST (XEXP (x, 1), op1);
4189 }
4190 break;
4191
4192 case IF_THEN_ELSE:
4193 return simplify_if_then_else (x);
4194
4195 case ZERO_EXTRACT:
4196 case SIGN_EXTRACT:
4197 case ZERO_EXTEND:
4198 case SIGN_EXTEND:
3841 /* If we are processing SET_DEST, we are done. */
4199 /* If we are processing SET_DEST, we are done. */
3842 if (in_dest)
3843 return x;
3844
3845 return expand_compound_operation (x);
3846
3847 case SET:
3848 return simplify_set (x);
3849
3850 case AND:
3851 case IOR:
3852 case XOR:
3853 return simplify_logical (x, last);
3854
4200 if (in_dest)
4201 return x;
4202
4203 return expand_compound_operation (x);
4204
4205 case SET:
4206 return simplify_set (x);
4207
4208 case AND:
4209 case IOR:
4210 case XOR:
4211 return simplify_logical (x, last);
4212
3855 case ABS:
4213 case ABS:
3856 /* (abs (neg <foo>)) -> (abs <foo>) */
3857 if (GET_CODE (XEXP (x, 0)) == NEG)
3858 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3859
4214 /* (abs (neg <foo>)) -> (abs <foo>) */
4215 if (GET_CODE (XEXP (x, 0)) == NEG)
4216 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4217
4218 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4219 do nothing. */
4220 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4221 break;
4222
3860 /* If operand is something known to be positive, ignore the ABS. */
3861 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
3862 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
3863 <= HOST_BITS_PER_WIDE_INT)
3864 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3865 & ((HOST_WIDE_INT) 1
3866 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
3867 == 0)))
3868 return XEXP (x, 0);
3869
3870
3871 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
3872 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
3873 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
3874
3875 break;
3876
3877 case FFS:
3878 /* (ffs (*_extend <X>)) = (ffs <X>) */
3879 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3880 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3881 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3882 break;
3883
3884 case FLOAT:
3885 /* (float (sign_extend <X>)) = (float <X>). */
3886 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
3887 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3888 break;
3889
3890 case ASHIFT:
3891 case LSHIFTRT:
3892 case ASHIFTRT:
3893 case ROTATE:
3894 case ROTATERT:
3895 /* If this is a shift by a constant amount, simplify it. */
3896 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3897 return simplify_shift_const (x, code, mode, XEXP (x, 0),
3898 INTVAL (XEXP (x, 1)));
3899
3900#ifdef SHIFT_COUNT_TRUNCATED
3901 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
3902 SUBST (XEXP (x, 1),
3903 force_to_mode (XEXP (x, 1), GET_MODE (x),
3904 ((HOST_WIDE_INT) 1
3905 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
3906 - 1,
3907 NULL_RTX, 0));
3908#endif
3909
3910 break;
4223 /* If operand is something known to be positive, ignore the ABS. */
4224 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4225 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4226 <= HOST_BITS_PER_WIDE_INT)
4227 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4228 & ((HOST_WIDE_INT) 1
4229 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4230 == 0)))
4231 return XEXP (x, 0);
4232
4233
4234 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4235 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4236 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
4237
4238 break;
4239
4240 case FFS:
4241 /* (ffs (*_extend <X>)) = (ffs <X>) */
4242 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4243 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4244 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4245 break;
4246
4247 case FLOAT:
4248 /* (float (sign_extend <X>)) = (float <X>). */
4249 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4250 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4251 break;
4252
4253 case ASHIFT:
4254 case LSHIFTRT:
4255 case ASHIFTRT:
4256 case ROTATE:
4257 case ROTATERT:
4258 /* If this is a shift by a constant amount, simplify it. */
4259 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4260 return simplify_shift_const (x, code, mode, XEXP (x, 0),
4261 INTVAL (XEXP (x, 1)));
4262
4263#ifdef SHIFT_COUNT_TRUNCATED
4264 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4265 SUBST (XEXP (x, 1),
4266 force_to_mode (XEXP (x, 1), GET_MODE (x),
4267 ((HOST_WIDE_INT) 1
4268 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4269 - 1,
4270 NULL_RTX, 0));
4271#endif
4272
4273 break;
4274
4275 default:
4276 break;
3911 }
3912
3913 return x;
3914}
3915
3916/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
3917
3918static rtx
3919simplify_if_then_else (x)
3920 rtx x;
3921{
3922 enum machine_mode mode = GET_MODE (x);
3923 rtx cond = XEXP (x, 0);
3924 rtx true = XEXP (x, 1);
3925 rtx false = XEXP (x, 2);
3926 enum rtx_code true_code = GET_CODE (cond);
3927 int comparison_p = GET_RTX_CLASS (true_code) == '<';
3928 rtx temp;
3929 int i;
3930
4277 }
4278
4279 return x;
4280}
4281
4282/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
4283
4284static rtx
4285simplify_if_then_else (x)
4286 rtx x;
4287{
4288 enum machine_mode mode = GET_MODE (x);
4289 rtx cond = XEXP (x, 0);
4290 rtx true = XEXP (x, 1);
4291 rtx false = XEXP (x, 2);
4292 enum rtx_code true_code = GET_CODE (cond);
4293 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4294 rtx temp;
4295 int i;
4296
3931 /* Simplify storing of the truth value. */
4297 /* Simplify storing of the truth value. */
3932 if (comparison_p && true == const_true_rtx && false == const0_rtx)
3933 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
3934
4298 if (comparison_p && true == const_true_rtx && false == const0_rtx)
4299 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4300
3935 /* Also when the truth value has to be reversed. */
4301 /* Also when the truth value has to be reversed. */
3936 if (comparison_p && reversible_comparison_p (cond)
3937 && true == const0_rtx && false == const_true_rtx)
3938 return gen_binary (reverse_condition (true_code),
3939 mode, XEXP (cond, 0), XEXP (cond, 1));
3940
3941 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
3942 in it is being compared against certain values. Get the true and false
3943 comparisons and see if that says anything about the value of each arm. */
3944
3945 if (comparison_p && reversible_comparison_p (cond)
3946 && GET_CODE (XEXP (cond, 0)) == REG)
3947 {
3948 HOST_WIDE_INT nzb;
3949 rtx from = XEXP (cond, 0);
3950 enum rtx_code false_code = reverse_condition (true_code);
3951 rtx true_val = XEXP (cond, 1);
3952 rtx false_val = true_val;
3953 int swapped = 0;
3954
3955 /* If FALSE_CODE is EQ, swap the codes and arms. */
3956
3957 if (false_code == EQ)
3958 {
3959 swapped = 1, true_code = EQ, false_code = NE;
3960 temp = true, true = false, false = temp;
3961 }
3962
3963 /* If we are comparing against zero and the expression being tested has
3964 only a single bit that might be nonzero, that is its value when it is
3965 not equal to zero. Similarly if it is known to be -1 or 0. */
3966
3967 if (true_code == EQ && true_val == const0_rtx
3968 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
3969 false_code = EQ, false_val = GEN_INT (nzb);
3970 else if (true_code == EQ && true_val == const0_rtx
3971 && (num_sign_bit_copies (from, GET_MODE (from))
3972 == GET_MODE_BITSIZE (GET_MODE (from))))
3973 false_code = EQ, false_val = constm1_rtx;
3974
3975 /* Now simplify an arm if we know the value of the register in the
3976 branch and it is used in the arm. Be careful due to the potential
3977 of locally-shared RTL. */
3978
3979 if (reg_mentioned_p (from, true))
3980 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
3981 pc_rtx, pc_rtx, 0, 0);
3982 if (reg_mentioned_p (from, false))
3983 false = subst (known_cond (copy_rtx (false), false_code,
3984 from, false_val),
3985 pc_rtx, pc_rtx, 0, 0);
3986
3987 SUBST (XEXP (x, 1), swapped ? false : true);
3988 SUBST (XEXP (x, 2), swapped ? true : false);
3989
3990 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
3991 }
3992
3993 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3994 reversed, do so to avoid needing two sets of patterns for
3995 subtract-and-branch insns. Similarly if we have a constant in the true
3996 arm, the false arm is the same as the first operand of the comparison, or
3997 the false arm is more complicated than the true arm. */
3998
3999 if (comparison_p && reversible_comparison_p (cond)
4000 && (true == pc_rtx
4001 || (CONSTANT_P (true)
4002 && GET_CODE (false) != CONST_INT && false != pc_rtx)
4003 || true == const0_rtx
4004 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4005 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4006 || (GET_CODE (true) == SUBREG
4007 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4008 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4009 || reg_mentioned_p (true, false)
4010 || rtx_equal_p (false, XEXP (cond, 0))))
4011 {
4012 true_code = reverse_condition (true_code);
4013 SUBST (XEXP (x, 0),
4014 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4015 XEXP (cond, 1)));
4016
4017 SUBST (XEXP (x, 1), false);
4018 SUBST (XEXP (x, 2), true);
4019
4020 temp = true, true = false, false = temp, cond = XEXP (x, 0);
4302 if (comparison_p && reversible_comparison_p (cond)
4303 && true == const0_rtx && false == const_true_rtx)
4304 return gen_binary (reverse_condition (true_code),
4305 mode, XEXP (cond, 0), XEXP (cond, 1));
4306
4307 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4308 in it is being compared against certain values. Get the true and false
4309 comparisons and see if that says anything about the value of each arm. */
4310
4311 if (comparison_p && reversible_comparison_p (cond)
4312 && GET_CODE (XEXP (cond, 0)) == REG)
4313 {
4314 HOST_WIDE_INT nzb;
4315 rtx from = XEXP (cond, 0);
4316 enum rtx_code false_code = reverse_condition (true_code);
4317 rtx true_val = XEXP (cond, 1);
4318 rtx false_val = true_val;
4319 int swapped = 0;
4320
4321 /* If FALSE_CODE is EQ, swap the codes and arms. */
4322
4323 if (false_code == EQ)
4324 {
4325 swapped = 1, true_code = EQ, false_code = NE;
4326 temp = true, true = false, false = temp;
4327 }
4328
4329 /* If we are comparing against zero and the expression being tested has
4330 only a single bit that might be nonzero, that is its value when it is
4331 not equal to zero. Similarly if it is known to be -1 or 0. */
4332
4333 if (true_code == EQ && true_val == const0_rtx
4334 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4335 false_code = EQ, false_val = GEN_INT (nzb);
4336 else if (true_code == EQ && true_val == const0_rtx
4337 && (num_sign_bit_copies (from, GET_MODE (from))
4338 == GET_MODE_BITSIZE (GET_MODE (from))))
4339 false_code = EQ, false_val = constm1_rtx;
4340
4341 /* Now simplify an arm if we know the value of the register in the
4342 branch and it is used in the arm. Be careful due to the potential
4343 of locally-shared RTL. */
4344
4345 if (reg_mentioned_p (from, true))
4346 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
4347 pc_rtx, pc_rtx, 0, 0);
4348 if (reg_mentioned_p (from, false))
4349 false = subst (known_cond (copy_rtx (false), false_code,
4350 from, false_val),
4351 pc_rtx, pc_rtx, 0, 0);
4352
4353 SUBST (XEXP (x, 1), swapped ? false : true);
4354 SUBST (XEXP (x, 2), swapped ? true : false);
4355
4356 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
4357 }
4358
4359 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4360 reversed, do so to avoid needing two sets of patterns for
4361 subtract-and-branch insns. Similarly if we have a constant in the true
4362 arm, the false arm is the same as the first operand of the comparison, or
4363 the false arm is more complicated than the true arm. */
4364
4365 if (comparison_p && reversible_comparison_p (cond)
4366 && (true == pc_rtx
4367 || (CONSTANT_P (true)
4368 && GET_CODE (false) != CONST_INT && false != pc_rtx)
4369 || true == const0_rtx
4370 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4371 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4372 || (GET_CODE (true) == SUBREG
4373 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4374 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4375 || reg_mentioned_p (true, false)
4376 || rtx_equal_p (false, XEXP (cond, 0))))
4377 {
4378 true_code = reverse_condition (true_code);
4379 SUBST (XEXP (x, 0),
4380 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4381 XEXP (cond, 1)));
4382
4383 SUBST (XEXP (x, 1), false);
4384 SUBST (XEXP (x, 2), true);
4385
4386 temp = true, true = false, false = temp, cond = XEXP (x, 0);
4387
4388 /* It is possible that the conditional has been simplified out. */
4389 true_code = GET_CODE (cond);
4390 comparison_p = GET_RTX_CLASS (true_code) == '<';
4021 }
4022
4023 /* If the two arms are identical, we don't need the comparison. */
4024
4025 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4026 return true;
4027
4391 }
4392
4393 /* If the two arms are identical, we don't need the comparison. */
4394
4395 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4396 return true;
4397
4398 /* Convert a == b ? b : a to "a". */
4399 if (true_code == EQ && ! side_effects_p (cond)
4400 && rtx_equal_p (XEXP (cond, 0), false)
4401 && rtx_equal_p (XEXP (cond, 1), true))
4402 return false;
4403 else if (true_code == NE && ! side_effects_p (cond)
4404 && rtx_equal_p (XEXP (cond, 0), true)
4405 && rtx_equal_p (XEXP (cond, 1), false))
4406 return true;
4407
4028 /* Look for cases where we have (abs x) or (neg (abs X)). */
4029
4030 if (GET_MODE_CLASS (mode) == MODE_INT
4031 && GET_CODE (false) == NEG
4032 && rtx_equal_p (true, XEXP (false, 0))
4033 && comparison_p
4034 && rtx_equal_p (true, XEXP (cond, 0))
4035 && ! side_effects_p (true))
4036 switch (true_code)
4037 {
4038 case GT:
4039 case GE:
4040 return gen_unary (ABS, mode, mode, true);
4041 case LT:
4042 case LE:
4043 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
4408 /* Look for cases where we have (abs x) or (neg (abs X)). */
4409
4410 if (GET_MODE_CLASS (mode) == MODE_INT
4411 && GET_CODE (false) == NEG
4412 && rtx_equal_p (true, XEXP (false, 0))
4413 && comparison_p
4414 && rtx_equal_p (true, XEXP (cond, 0))
4415 && ! side_effects_p (true))
4416 switch (true_code)
4417 {
4418 case GT:
4419 case GE:
4420 return gen_unary (ABS, mode, mode, true);
4421 case LT:
4422 case LE:
4423 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
4424 default:
4425 break;
4044 }
4045
4046 /* Look for MIN or MAX. */
4047
4048 if ((! FLOAT_MODE_P (mode) || flag_fast_math)
4049 && comparison_p
4050 && rtx_equal_p (XEXP (cond, 0), true)
4051 && rtx_equal_p (XEXP (cond, 1), false)
4052 && ! side_effects_p (cond))
4053 switch (true_code)
4054 {
4055 case GE:
4056 case GT:
4057 return gen_binary (SMAX, mode, true, false);
4058 case LE:
4059 case LT:
4060 return gen_binary (SMIN, mode, true, false);
4061 case GEU:
4062 case GTU:
4063 return gen_binary (UMAX, mode, true, false);
4064 case LEU:
4065 case LTU:
4066 return gen_binary (UMIN, mode, true, false);
4426 }
4427
4428 /* Look for MIN or MAX. */
4429
4430 if ((! FLOAT_MODE_P (mode) || flag_fast_math)
4431 && comparison_p
4432 && rtx_equal_p (XEXP (cond, 0), true)
4433 && rtx_equal_p (XEXP (cond, 1), false)
4434 && ! side_effects_p (cond))
4435 switch (true_code)
4436 {
4437 case GE:
4438 case GT:
4439 return gen_binary (SMAX, mode, true, false);
4440 case LE:
4441 case LT:
4442 return gen_binary (SMIN, mode, true, false);
4443 case GEU:
4444 case GTU:
4445 return gen_binary (UMAX, mode, true, false);
4446 case LEU:
4447 case LTU:
4448 return gen_binary (UMIN, mode, true, false);
4449 default:
4450 break;
4067 }
4068
4451 }
4452
4069#if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
4070
4071 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4072 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4073 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4074 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4075 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4453 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4454 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4455 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4456 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4457 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
4076 neither of the above, but it isn't worth checking for. */
4458 neither 1 or -1, but it isn't worth checking for. */
4077
4459
4078 if (comparison_p && mode != VOIDmode && ! side_effects_p (x))
4460 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4461 && comparison_p && mode != VOIDmode && ! side_effects_p (x))
4079 {
4080 rtx t = make_compound_operation (true, SET);
4081 rtx f = make_compound_operation (false, SET);
4082 rtx cond_op0 = XEXP (cond, 0);
4083 rtx cond_op1 = XEXP (cond, 1);
4084 enum rtx_code op, extend_op = NIL;
4085 enum machine_mode m = mode;
4086 rtx z = 0, c1;
4087
4088 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4089 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4090 || GET_CODE (t) == ASHIFT
4091 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4092 && rtx_equal_p (XEXP (t, 0), f))
4093 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4094
4095 /* If an identity-zero op is commutative, check whether there
4462 {
4463 rtx t = make_compound_operation (true, SET);
4464 rtx f = make_compound_operation (false, SET);
4465 rtx cond_op0 = XEXP (cond, 0);
4466 rtx cond_op1 = XEXP (cond, 1);
4467 enum rtx_code op, extend_op = NIL;
4468 enum machine_mode m = mode;
4469 rtx z = 0, c1;
4470
4471 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4472 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4473 || GET_CODE (t) == ASHIFT
4474 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4475 && rtx_equal_p (XEXP (t, 0), f))
4476 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4477
4478 /* If an identity-zero op is commutative, check whether there
4096 would be a match if we swapped the operands. */
4479 would be a match if we swapped the operands. */
4097 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4098 || GET_CODE (t) == XOR)
4099 && rtx_equal_p (XEXP (t, 1), f))
4100 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4101 else if (GET_CODE (t) == SIGN_EXTEND
4102 && (GET_CODE (XEXP (t, 0)) == PLUS
4103 || GET_CODE (XEXP (t, 0)) == MINUS
4104 || GET_CODE (XEXP (t, 0)) == IOR
4105 || GET_CODE (XEXP (t, 0)) == XOR
4106 || GET_CODE (XEXP (t, 0)) == ASHIFT
4107 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4108 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4109 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4110 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4111 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4112 && (num_sign_bit_copies (f, GET_MODE (f))
4113 > (GET_MODE_BITSIZE (mode)
4114 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4115 {
4116 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4117 extend_op = SIGN_EXTEND;
4118 m = GET_MODE (XEXP (t, 0));
4119 }
4120 else if (GET_CODE (t) == SIGN_EXTEND
4121 && (GET_CODE (XEXP (t, 0)) == PLUS
4122 || GET_CODE (XEXP (t, 0)) == IOR
4123 || GET_CODE (XEXP (t, 0)) == XOR)
4124 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4125 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4126 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4127 && (num_sign_bit_copies (f, GET_MODE (f))
4128 > (GET_MODE_BITSIZE (mode)
4129 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4130 {
4131 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4132 extend_op = SIGN_EXTEND;
4133 m = GET_MODE (XEXP (t, 0));
4134 }
4135 else if (GET_CODE (t) == ZERO_EXTEND
4136 && (GET_CODE (XEXP (t, 0)) == PLUS
4137 || GET_CODE (XEXP (t, 0)) == MINUS
4138 || GET_CODE (XEXP (t, 0)) == IOR
4139 || GET_CODE (XEXP (t, 0)) == XOR
4140 || GET_CODE (XEXP (t, 0)) == ASHIFT
4141 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4142 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4143 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4144 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4145 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4146 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4147 && ((nonzero_bits (f, GET_MODE (f))
4148 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4149 == 0))
4150 {
4151 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4152 extend_op = ZERO_EXTEND;
4153 m = GET_MODE (XEXP (t, 0));
4154 }
4155 else if (GET_CODE (t) == ZERO_EXTEND
4156 && (GET_CODE (XEXP (t, 0)) == PLUS
4157 || GET_CODE (XEXP (t, 0)) == IOR
4158 || GET_CODE (XEXP (t, 0)) == XOR)
4159 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4160 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4161 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4162 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4163 && ((nonzero_bits (f, GET_MODE (f))
4164 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4165 == 0))
4166 {
4167 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4168 extend_op = ZERO_EXTEND;
4169 m = GET_MODE (XEXP (t, 0));
4170 }
4171
4172 if (z)
4173 {
4174 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4175 pc_rtx, pc_rtx, 0, 0);
4176 temp = gen_binary (MULT, m, temp,
4177 gen_binary (MULT, m, c1, const_true_rtx));
4178 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4179 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4180
4181 if (extend_op != NIL)
4182 temp = gen_unary (extend_op, mode, m, temp);
4183
4184 return temp;
4185 }
4186 }
4480 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4481 || GET_CODE (t) == XOR)
4482 && rtx_equal_p (XEXP (t, 1), f))
4483 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4484 else if (GET_CODE (t) == SIGN_EXTEND
4485 && (GET_CODE (XEXP (t, 0)) == PLUS
4486 || GET_CODE (XEXP (t, 0)) == MINUS
4487 || GET_CODE (XEXP (t, 0)) == IOR
4488 || GET_CODE (XEXP (t, 0)) == XOR
4489 || GET_CODE (XEXP (t, 0)) == ASHIFT
4490 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4491 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4492 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4493 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4494 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4495 && (num_sign_bit_copies (f, GET_MODE (f))
4496 > (GET_MODE_BITSIZE (mode)
4497 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4498 {
4499 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4500 extend_op = SIGN_EXTEND;
4501 m = GET_MODE (XEXP (t, 0));
4502 }
4503 else if (GET_CODE (t) == SIGN_EXTEND
4504 && (GET_CODE (XEXP (t, 0)) == PLUS
4505 || GET_CODE (XEXP (t, 0)) == IOR
4506 || GET_CODE (XEXP (t, 0)) == XOR)
4507 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4508 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4509 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4510 && (num_sign_bit_copies (f, GET_MODE (f))
4511 > (GET_MODE_BITSIZE (mode)
4512 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4513 {
4514 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4515 extend_op = SIGN_EXTEND;
4516 m = GET_MODE (XEXP (t, 0));
4517 }
4518 else if (GET_CODE (t) == ZERO_EXTEND
4519 && (GET_CODE (XEXP (t, 0)) == PLUS
4520 || GET_CODE (XEXP (t, 0)) == MINUS
4521 || GET_CODE (XEXP (t, 0)) == IOR
4522 || GET_CODE (XEXP (t, 0)) == XOR
4523 || GET_CODE (XEXP (t, 0)) == ASHIFT
4524 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4525 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4526 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4527 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4528 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4529 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4530 && ((nonzero_bits (f, GET_MODE (f))
4531 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4532 == 0))
4533 {
4534 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4535 extend_op = ZERO_EXTEND;
4536 m = GET_MODE (XEXP (t, 0));
4537 }
4538 else if (GET_CODE (t) == ZERO_EXTEND
4539 && (GET_CODE (XEXP (t, 0)) == PLUS
4540 || GET_CODE (XEXP (t, 0)) == IOR
4541 || GET_CODE (XEXP (t, 0)) == XOR)
4542 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4543 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4544 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4545 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4546 && ((nonzero_bits (f, GET_MODE (f))
4547 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4548 == 0))
4549 {
4550 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4551 extend_op = ZERO_EXTEND;
4552 m = GET_MODE (XEXP (t, 0));
4553 }
4554
4555 if (z)
4556 {
4557 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4558 pc_rtx, pc_rtx, 0, 0);
4559 temp = gen_binary (MULT, m, temp,
4560 gen_binary (MULT, m, c1, const_true_rtx));
4561 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4562 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4563
4564 if (extend_op != NIL)
4565 temp = gen_unary (extend_op, mode, m, temp);
4566
4567 return temp;
4568 }
4569 }
4187#endif
4188
4189 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4190 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4191 negation of a single bit, we can convert this operation to a shift. We
4192 can actually do this more generally, but it doesn't seem worth it. */
4193
4194 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4195 && false == const0_rtx && GET_CODE (true) == CONST_INT
4196 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4197 && (i = exact_log2 (INTVAL (true))) >= 0)
4198 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4199 == GET_MODE_BITSIZE (mode))
4200 && (i = exact_log2 (- INTVAL (true))) >= 0)))
4201 return
4202 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4203 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
4204
4205 return x;
4206}
4207
4208/* Simplify X, a SET expression. Return the new expression. */
4209
4210static rtx
4211simplify_set (x)
4212 rtx x;
4213{
4214 rtx src = SET_SRC (x);
4215 rtx dest = SET_DEST (x);
4216 enum machine_mode mode
4217 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4218 rtx other_insn;
4219 rtx *cc_use;
4220
4221 /* (set (pc) (return)) gets written as (return). */
4222 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4223 return src;
4224
4225 /* Now that we know for sure which bits of SRC we are using, see if we can
4226 simplify the expression for the object knowing that we only need the
4227 low-order bits. */
4228
4229 if (GET_MODE_CLASS (mode) == MODE_INT)
4230 src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4231
4232 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4233 the comparison result and try to simplify it unless we already have used
4234 undobuf.other_insn. */
4235 if ((GET_CODE (src) == COMPARE
4236#ifdef HAVE_cc0
4237 || dest == cc0_rtx
4238#endif
4239 )
4240 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4241 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4242 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4243 && rtx_equal_p (XEXP (*cc_use, 0), dest))
4244 {
4245 enum rtx_code old_code = GET_CODE (*cc_use);
4246 enum rtx_code new_code;
4247 rtx op0, op1;
4248 int other_changed = 0;
4249 enum machine_mode compare_mode = GET_MODE (dest);
4250
4251 if (GET_CODE (src) == COMPARE)
4252 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4253 else
4254 op0 = src, op1 = const0_rtx;
4255
4256 /* Simplify our comparison, if possible. */
4257 new_code = simplify_comparison (old_code, &op0, &op1);
4258
4259#ifdef EXTRA_CC_MODES
4260 /* If this machine has CC modes other than CCmode, check to see if we
4261 need to use a different CC mode here. */
4262 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
4263#endif /* EXTRA_CC_MODES */
4264
4265#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
4266 /* If the mode changed, we have to change SET_DEST, the mode in the
4267 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4268 a hard register, just build new versions with the proper mode. If it
4269 is a pseudo, we lose unless it is only time we set the pseudo, in
4270 which case we can safely change its mode. */
4271 if (compare_mode != GET_MODE (dest))
4272 {
4273 int regno = REGNO (dest);
4570
4571 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4572 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4573 negation of a single bit, we can convert this operation to a shift. We
4574 can actually do this more generally, but it doesn't seem worth it. */
4575
4576 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4577 && false == const0_rtx && GET_CODE (true) == CONST_INT
4578 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4579 && (i = exact_log2 (INTVAL (true))) >= 0)
4580 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4581 == GET_MODE_BITSIZE (mode))
4582 && (i = exact_log2 (- INTVAL (true))) >= 0)))
4583 return
4584 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4585 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
4586
4587 return x;
4588}
4589
4590/* Simplify X, a SET expression. Return the new expression. */
4591
4592static rtx
4593simplify_set (x)
4594 rtx x;
4595{
4596 rtx src = SET_SRC (x);
4597 rtx dest = SET_DEST (x);
4598 enum machine_mode mode
4599 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4600 rtx other_insn;
4601 rtx *cc_use;
4602
4603 /* (set (pc) (return)) gets written as (return). */
4604 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4605 return src;
4606
4607 /* Now that we know for sure which bits of SRC we are using, see if we can
4608 simplify the expression for the object knowing that we only need the
4609 low-order bits. */
4610
4611 if (GET_MODE_CLASS (mode) == MODE_INT)
4612 src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4613
4614 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4615 the comparison result and try to simplify it unless we already have used
4616 undobuf.other_insn. */
4617 if ((GET_CODE (src) == COMPARE
4618#ifdef HAVE_cc0
4619 || dest == cc0_rtx
4620#endif
4621 )
4622 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4623 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4624 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4625 && rtx_equal_p (XEXP (*cc_use, 0), dest))
4626 {
4627 enum rtx_code old_code = GET_CODE (*cc_use);
4628 enum rtx_code new_code;
4629 rtx op0, op1;
4630 int other_changed = 0;
4631 enum machine_mode compare_mode = GET_MODE (dest);
4632
4633 if (GET_CODE (src) == COMPARE)
4634 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4635 else
4636 op0 = src, op1 = const0_rtx;
4637
4638 /* Simplify our comparison, if possible. */
4639 new_code = simplify_comparison (old_code, &op0, &op1);
4640
4641#ifdef EXTRA_CC_MODES
4642 /* If this machine has CC modes other than CCmode, check to see if we
4643 need to use a different CC mode here. */
4644 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
4645#endif /* EXTRA_CC_MODES */
4646
4647#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
4648 /* If the mode changed, we have to change SET_DEST, the mode in the
4649 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4650 a hard register, just build new versions with the proper mode. If it
4651 is a pseudo, we lose unless it is only time we set the pseudo, in
4652 which case we can safely change its mode. */
4653 if (compare_mode != GET_MODE (dest))
4654 {
4655 int regno = REGNO (dest);
4274 rtx new_dest = gen_rtx (REG, compare_mode, regno);
4656 rtx new_dest = gen_rtx_REG (compare_mode, regno);
4275
4276 if (regno < FIRST_PSEUDO_REGISTER
4657
4658 if (regno < FIRST_PSEUDO_REGISTER
4277 || (reg_n_sets[regno] == 1 && ! REG_USERVAR_P (dest)))
4659 || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
4278 {
4279 if (regno >= FIRST_PSEUDO_REGISTER)
4280 SUBST (regno_reg_rtx[regno], new_dest);
4281
4282 SUBST (SET_DEST (x), new_dest);
4283 SUBST (XEXP (*cc_use, 0), new_dest);
4284 other_changed = 1;
4285
4286 dest = new_dest;
4287 }
4288 }
4289#endif
4290
4291 /* If the code changed, we have to build a new comparison in
4292 undobuf.other_insn. */
4293 if (new_code != old_code)
4294 {
4295 unsigned HOST_WIDE_INT mask;
4296
4297 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4298 dest, const0_rtx));
4299
4300 /* If the only change we made was to change an EQ into an NE or
4301 vice versa, OP0 has only one bit that might be nonzero, and OP1
4302 is zero, check if changing the user of the condition code will
4303 produce a valid insn. If it won't, we can keep the original code
4304 in that insn by surrounding our operation with an XOR. */
4305
4306 if (((old_code == NE && new_code == EQ)
4307 || (old_code == EQ && new_code == NE))
4308 && ! other_changed && op1 == const0_rtx
4309 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4310 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
4311 {
4312 rtx pat = PATTERN (other_insn), note = 0;
4313 int scratches;
4314
4315 if ((recog_for_combine (&pat, other_insn, &note, &scratches) < 0
4316 && ! check_asm_operands (pat)))
4317 {
4318 PUT_CODE (*cc_use, old_code);
4319 other_insn = 0;
4320
4321 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
4322 }
4323 }
4324
4325 other_changed = 1;
4326 }
4327
4328 if (other_changed)
4329 undobuf.other_insn = other_insn;
4330
4331#ifdef HAVE_cc0
4332 /* If we are now comparing against zero, change our source if
4333 needed. If we do not use cc0, we always have a COMPARE. */
4334 if (op1 == const0_rtx && dest == cc0_rtx)
4335 {
4336 SUBST (SET_SRC (x), op0);
4337 src = op0;
4338 }
4339 else
4340#endif
4341
4342 /* Otherwise, if we didn't previously have a COMPARE in the
4343 correct mode, we need one. */
4344 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4345 {
4346 SUBST (SET_SRC (x),
4347 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4348 src = SET_SRC (x);
4349 }
4350 else
4351 {
4352 /* Otherwise, update the COMPARE if needed. */
4353 SUBST (XEXP (src, 0), op0);
4354 SUBST (XEXP (src, 1), op1);
4355 }
4356 }
4357 else
4358 {
4359 /* Get SET_SRC in a form where we have placed back any
4360 compound expressions. Then do the checks below. */
4361 src = make_compound_operation (src, SET);
4362 SUBST (SET_SRC (x), src);
4363 }
4364
4365 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4366 and X being a REG or (subreg (reg)), we may be able to convert this to
4367 (set (subreg:m2 x) (op)).
4368
4369 We can always do this if M1 is narrower than M2 because that means that
4370 we only care about the low bits of the result.
4371
4372 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
4660 {
4661 if (regno >= FIRST_PSEUDO_REGISTER)
4662 SUBST (regno_reg_rtx[regno], new_dest);
4663
4664 SUBST (SET_DEST (x), new_dest);
4665 SUBST (XEXP (*cc_use, 0), new_dest);
4666 other_changed = 1;
4667
4668 dest = new_dest;
4669 }
4670 }
4671#endif
4672
4673 /* If the code changed, we have to build a new comparison in
4674 undobuf.other_insn. */
4675 if (new_code != old_code)
4676 {
4677 unsigned HOST_WIDE_INT mask;
4678
4679 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4680 dest, const0_rtx));
4681
4682 /* If the only change we made was to change an EQ into an NE or
4683 vice versa, OP0 has only one bit that might be nonzero, and OP1
4684 is zero, check if changing the user of the condition code will
4685 produce a valid insn. If it won't, we can keep the original code
4686 in that insn by surrounding our operation with an XOR. */
4687
4688 if (((old_code == NE && new_code == EQ)
4689 || (old_code == EQ && new_code == NE))
4690 && ! other_changed && op1 == const0_rtx
4691 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4692 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
4693 {
4694 rtx pat = PATTERN (other_insn), note = 0;
4695 int scratches;
4696
4697 if ((recog_for_combine (&pat, other_insn, &note, &scratches) < 0
4698 && ! check_asm_operands (pat)))
4699 {
4700 PUT_CODE (*cc_use, old_code);
4701 other_insn = 0;
4702
4703 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
4704 }
4705 }
4706
4707 other_changed = 1;
4708 }
4709
4710 if (other_changed)
4711 undobuf.other_insn = other_insn;
4712
4713#ifdef HAVE_cc0
4714 /* If we are now comparing against zero, change our source if
4715 needed. If we do not use cc0, we always have a COMPARE. */
4716 if (op1 == const0_rtx && dest == cc0_rtx)
4717 {
4718 SUBST (SET_SRC (x), op0);
4719 src = op0;
4720 }
4721 else
4722#endif
4723
4724 /* Otherwise, if we didn't previously have a COMPARE in the
4725 correct mode, we need one. */
4726 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4727 {
4728 SUBST (SET_SRC (x),
4729 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4730 src = SET_SRC (x);
4731 }
4732 else
4733 {
4734 /* Otherwise, update the COMPARE if needed. */
4735 SUBST (XEXP (src, 0), op0);
4736 SUBST (XEXP (src, 1), op1);
4737 }
4738 }
4739 else
4740 {
4741 /* Get SET_SRC in a form where we have placed back any
4742 compound expressions. Then do the checks below. */
4743 src = make_compound_operation (src, SET);
4744 SUBST (SET_SRC (x), src);
4745 }
4746
4747 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4748 and X being a REG or (subreg (reg)), we may be able to convert this to
4749 (set (subreg:m2 x) (op)).
4750
4751 We can always do this if M1 is narrower than M2 because that means that
4752 we only care about the low bits of the result.
4753
4754 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
4373 perform a narrower operation that requested since the high-order bits will
4755 perform a narrower operation than requested since the high-order bits will
4374 be undefined. On machine where it is defined, this transformation is safe
4375 as long as M1 and M2 have the same number of words. */
4376
4377 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4378 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4379 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4380 / UNITS_PER_WORD)
4381 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4382 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
4383#ifndef WORD_REGISTER_OPERATIONS
4384 && (GET_MODE_SIZE (GET_MODE (src))
4385 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4386#endif
4387#ifdef CLASS_CANNOT_CHANGE_SIZE
4388 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
4389 && (TEST_HARD_REG_BIT
4390 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
4391 REGNO (dest)))
4392 && (GET_MODE_SIZE (GET_MODE (src))
4393 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4394#endif
4395 && (GET_CODE (dest) == REG
4396 || (GET_CODE (dest) == SUBREG
4397 && GET_CODE (SUBREG_REG (dest)) == REG)))
4398 {
4399 SUBST (SET_DEST (x),
4400 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4401 dest));
4402 SUBST (SET_SRC (x), SUBREG_REG (src));
4403
4404 src = SET_SRC (x), dest = SET_DEST (x);
4405 }
4406
4407#ifdef LOAD_EXTEND_OP
4408 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4409 would require a paradoxical subreg. Replace the subreg with a
4756 be undefined. On machine where it is defined, this transformation is safe
4757 as long as M1 and M2 have the same number of words. */
4758
4759 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4760 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4761 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4762 / UNITS_PER_WORD)
4763 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4764 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
4765#ifndef WORD_REGISTER_OPERATIONS
4766 && (GET_MODE_SIZE (GET_MODE (src))
4767 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4768#endif
4769#ifdef CLASS_CANNOT_CHANGE_SIZE
4770 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
4771 && (TEST_HARD_REG_BIT
4772 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
4773 REGNO (dest)))
4774 && (GET_MODE_SIZE (GET_MODE (src))
4775 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4776#endif
4777 && (GET_CODE (dest) == REG
4778 || (GET_CODE (dest) == SUBREG
4779 && GET_CODE (SUBREG_REG (dest)) == REG)))
4780 {
4781 SUBST (SET_DEST (x),
4782 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4783 dest));
4784 SUBST (SET_SRC (x), SUBREG_REG (src));
4785
4786 src = SET_SRC (x), dest = SET_DEST (x);
4787 }
4788
4789#ifdef LOAD_EXTEND_OP
4790 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4791 would require a paradoxical subreg. Replace the subreg with a
4410 zero_extend to avoid the reload that would otherwise be required. */
4792 zero_extend to avoid the reload that would otherwise be required. */
4411
4412 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4413 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4414 && SUBREG_WORD (src) == 0
4415 && (GET_MODE_SIZE (GET_MODE (src))
4416 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4417 && GET_CODE (SUBREG_REG (src)) == MEM)
4418 {
4419 SUBST (SET_SRC (x),
4420 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4421 GET_MODE (src), XEXP (src, 0)));
4422
4423 src = SET_SRC (x);
4424 }
4425#endif
4426
4427 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4428 are comparing an item known to be 0 or -1 against 0, use a logical
4429 operation instead. Check for one of the arms being an IOR of the other
4430 arm with some value. We compute three terms to be IOR'ed together. In
4431 practice, at most two will be nonzero. Then we do the IOR's. */
4432
4433 if (GET_CODE (dest) != PC
4434 && GET_CODE (src) == IF_THEN_ELSE
4435 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
4436 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4437 && XEXP (XEXP (src, 0), 1) == const0_rtx
4438 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
4439#ifdef HAVE_conditional_move
4440 && ! can_conditionally_move_p (GET_MODE (src))
4441#endif
4442 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4443 GET_MODE (XEXP (XEXP (src, 0), 0)))
4444 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4445 && ! side_effects_p (src))
4446 {
4447 rtx true = (GET_CODE (XEXP (src, 0)) == NE
4448 ? XEXP (src, 1) : XEXP (src, 2));
4449 rtx false = (GET_CODE (XEXP (src, 0)) == NE
4450 ? XEXP (src, 2) : XEXP (src, 1));
4451 rtx term1 = const0_rtx, term2, term3;
4452
4453 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4454 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4455 else if (GET_CODE (true) == IOR
4456 && rtx_equal_p (XEXP (true, 1), false))
4457 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4458 else if (GET_CODE (false) == IOR
4459 && rtx_equal_p (XEXP (false, 0), true))
4460 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4461 else if (GET_CODE (false) == IOR
4462 && rtx_equal_p (XEXP (false, 1), true))
4463 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4464
4465 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4466 term3 = gen_binary (AND, GET_MODE (src),
4467 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
4468 XEXP (XEXP (src, 0), 0)),
4469 false);
4470
4471 SUBST (SET_SRC (x),
4472 gen_binary (IOR, GET_MODE (src),
4473 gen_binary (IOR, GET_MODE (src), term1, term2),
4474 term3));
4475
4476 src = SET_SRC (x);
4477 }
4478
4479 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4480 whole thing fail. */
4481 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4482 return src;
4483 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4484 return dest;
4485 else
4486 /* Convert this into a field assignment operation, if possible. */
4487 return make_field_assignment (x);
4488}
4489
4490/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4491 result. LAST is nonzero if this is the last retry. */
4492
4493static rtx
4494simplify_logical (x, last)
4495 rtx x;
4496 int last;
4497{
4498 enum machine_mode mode = GET_MODE (x);
4499 rtx op0 = XEXP (x, 0);
4500 rtx op1 = XEXP (x, 1);
4501
4502 switch (GET_CODE (x))
4503 {
4504 case AND:
4505 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4506 insn (and may simplify more). */
4507 if (GET_CODE (op0) == XOR
4508 && rtx_equal_p (XEXP (op0, 0), op1)
4509 && ! side_effects_p (op1))
4510 x = gen_binary (AND, mode,
4511 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
4512
4513 if (GET_CODE (op0) == XOR
4514 && rtx_equal_p (XEXP (op0, 1), op1)
4515 && ! side_effects_p (op1))
4516 x = gen_binary (AND, mode,
4517 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
4518
4519 /* Similarly for (~ (A ^ B)) & A. */
4520 if (GET_CODE (op0) == NOT
4521 && GET_CODE (XEXP (op0, 0)) == XOR
4522 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4523 && ! side_effects_p (op1))
4524 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4525
4526 if (GET_CODE (op0) == NOT
4527 && GET_CODE (XEXP (op0, 0)) == XOR
4528 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
4529 && ! side_effects_p (op1))
4530 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4531
4532 if (GET_CODE (op1) == CONST_INT)
4533 {
4534 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
4535
4536 /* If we have (ior (and (X C1) C2)) and the next restart would be
4537 the last, simplify this by making C1 as small as possible
4793
4794 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4795 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4796 && SUBREG_WORD (src) == 0
4797 && (GET_MODE_SIZE (GET_MODE (src))
4798 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4799 && GET_CODE (SUBREG_REG (src)) == MEM)
4800 {
4801 SUBST (SET_SRC (x),
4802 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4803 GET_MODE (src), XEXP (src, 0)));
4804
4805 src = SET_SRC (x);
4806 }
4807#endif
4808
4809 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4810 are comparing an item known to be 0 or -1 against 0, use a logical
4811 operation instead. Check for one of the arms being an IOR of the other
4812 arm with some value. We compute three terms to be IOR'ed together. In
4813 practice, at most two will be nonzero. Then we do the IOR's. */
4814
4815 if (GET_CODE (dest) != PC
4816 && GET_CODE (src) == IF_THEN_ELSE
4817 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
4818 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4819 && XEXP (XEXP (src, 0), 1) == const0_rtx
4820 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
4821#ifdef HAVE_conditional_move
4822 && ! can_conditionally_move_p (GET_MODE (src))
4823#endif
4824 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4825 GET_MODE (XEXP (XEXP (src, 0), 0)))
4826 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4827 && ! side_effects_p (src))
4828 {
4829 rtx true = (GET_CODE (XEXP (src, 0)) == NE
4830 ? XEXP (src, 1) : XEXP (src, 2));
4831 rtx false = (GET_CODE (XEXP (src, 0)) == NE
4832 ? XEXP (src, 2) : XEXP (src, 1));
4833 rtx term1 = const0_rtx, term2, term3;
4834
4835 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4836 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4837 else if (GET_CODE (true) == IOR
4838 && rtx_equal_p (XEXP (true, 1), false))
4839 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4840 else if (GET_CODE (false) == IOR
4841 && rtx_equal_p (XEXP (false, 0), true))
4842 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4843 else if (GET_CODE (false) == IOR
4844 && rtx_equal_p (XEXP (false, 1), true))
4845 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4846
4847 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4848 term3 = gen_binary (AND, GET_MODE (src),
4849 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
4850 XEXP (XEXP (src, 0), 0)),
4851 false);
4852
4853 SUBST (SET_SRC (x),
4854 gen_binary (IOR, GET_MODE (src),
4855 gen_binary (IOR, GET_MODE (src), term1, term2),
4856 term3));
4857
4858 src = SET_SRC (x);
4859 }
4860
4861 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4862 whole thing fail. */
4863 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4864 return src;
4865 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4866 return dest;
4867 else
4868 /* Convert this into a field assignment operation, if possible. */
4869 return make_field_assignment (x);
4870}
4871
4872/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4873 result. LAST is nonzero if this is the last retry. */
4874
4875static rtx
4876simplify_logical (x, last)
4877 rtx x;
4878 int last;
4879{
4880 enum machine_mode mode = GET_MODE (x);
4881 rtx op0 = XEXP (x, 0);
4882 rtx op1 = XEXP (x, 1);
4883
4884 switch (GET_CODE (x))
4885 {
4886 case AND:
4887 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4888 insn (and may simplify more). */
4889 if (GET_CODE (op0) == XOR
4890 && rtx_equal_p (XEXP (op0, 0), op1)
4891 && ! side_effects_p (op1))
4892 x = gen_binary (AND, mode,
4893 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
4894
4895 if (GET_CODE (op0) == XOR
4896 && rtx_equal_p (XEXP (op0, 1), op1)
4897 && ! side_effects_p (op1))
4898 x = gen_binary (AND, mode,
4899 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
4900
4901 /* Similarly for (~ (A ^ B)) & A. */
4902 if (GET_CODE (op0) == NOT
4903 && GET_CODE (XEXP (op0, 0)) == XOR
4904 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4905 && ! side_effects_p (op1))
4906 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4907
4908 if (GET_CODE (op0) == NOT
4909 && GET_CODE (XEXP (op0, 0)) == XOR
4910 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
4911 && ! side_effects_p (op1))
4912 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4913
4914 if (GET_CODE (op1) == CONST_INT)
4915 {
4916 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
4917
4918 /* If we have (ior (and (X C1) C2)) and the next restart would be
4919 the last, simplify this by making C1 as small as possible
4538 and then exit. */
4920 and then exit. */
4539 if (last
4540 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4541 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4542 && GET_CODE (op1) == CONST_INT)
4543 return gen_binary (IOR, mode,
4544 gen_binary (AND, mode, XEXP (op0, 0),
4545 GEN_INT (INTVAL (XEXP (op0, 1))
4546 & ~ INTVAL (op1))), op1);
4547
4548 if (GET_CODE (x) != AND)
4549 return x;
4550
4551 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
4552 || GET_RTX_CLASS (GET_CODE (x)) == '2')
4553 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4554 }
4555
4556 /* Convert (A | B) & A to A. */
4557 if (GET_CODE (op0) == IOR
4558 && (rtx_equal_p (XEXP (op0, 0), op1)
4559 || rtx_equal_p (XEXP (op0, 1), op1))
4560 && ! side_effects_p (XEXP (op0, 0))
4561 && ! side_effects_p (XEXP (op0, 1)))
4562 return op1;
4563
4564 /* In the following group of tests (and those in case IOR below),
4565 we start with some combination of logical operations and apply
4566 the distributive law followed by the inverse distributive law.
4567 Most of the time, this results in no change. However, if some of
4568 the operands are the same or inverses of each other, simplifications
4569 will result.
4570
4571 For example, (and (ior A B) (not B)) can occur as the result of
4572 expanding a bit field assignment. When we apply the distributive
4573 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4574 which then simplifies to (and (A (not B))).
4575
4576 If we have (and (ior A B) C), apply the distributive law and then
4577 the inverse distributive law to see if things simplify. */
4578
4579 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
4580 {
4581 x = apply_distributive_law
4582 (gen_binary (GET_CODE (op0), mode,
4583 gen_binary (AND, mode, XEXP (op0, 0), op1),
4584 gen_binary (AND, mode, XEXP (op0, 1), op1)));
4585 if (GET_CODE (x) != AND)
4586 return x;
4587 }
4588
4589 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
4590 return apply_distributive_law
4591 (gen_binary (GET_CODE (op1), mode,
4592 gen_binary (AND, mode, XEXP (op1, 0), op0),
4593 gen_binary (AND, mode, XEXP (op1, 1), op0)));
4594
4595 /* Similarly, taking advantage of the fact that
4596 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4597
4598 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
4599 return apply_distributive_law
4600 (gen_binary (XOR, mode,
4601 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
4602 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
4603
4604 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
4605 return apply_distributive_law
4606 (gen_binary (XOR, mode,
4607 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
4608 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
4609 break;
4610
4611 case IOR:
4612 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
4613 if (GET_CODE (op1) == CONST_INT
4614 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4615 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
4616 return op1;
4617
4618 /* Convert (A & B) | A to A. */
4619 if (GET_CODE (op0) == AND
4620 && (rtx_equal_p (XEXP (op0, 0), op1)
4621 || rtx_equal_p (XEXP (op0, 1), op1))
4622 && ! side_effects_p (XEXP (op0, 0))
4623 && ! side_effects_p (XEXP (op0, 1)))
4624 return op1;
4625
4626 /* If we have (ior (and A B) C), apply the distributive law and then
4627 the inverse distributive law to see if things simplify. */
4628
4629 if (GET_CODE (op0) == AND)
4630 {
4631 x = apply_distributive_law
4632 (gen_binary (AND, mode,
4633 gen_binary (IOR, mode, XEXP (op0, 0), op1),
4634 gen_binary (IOR, mode, XEXP (op0, 1), op1)));
4635
4636 if (GET_CODE (x) != IOR)
4637 return x;
4638 }
4639
4640 if (GET_CODE (op1) == AND)
4641 {
4642 x = apply_distributive_law
4643 (gen_binary (AND, mode,
4644 gen_binary (IOR, mode, XEXP (op1, 0), op0),
4645 gen_binary (IOR, mode, XEXP (op1, 1), op0)));
4646
4647 if (GET_CODE (x) != IOR)
4648 return x;
4649 }
4650
4651 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4652 mode size to (rotate A CX). */
4653
4654 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
4655 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
4656 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
4657 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4658 && GET_CODE (XEXP (op1, 1)) == CONST_INT
4659 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
4660 == GET_MODE_BITSIZE (mode)))
4921 if (last
4922 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4923 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4924 && GET_CODE (op1) == CONST_INT)
4925 return gen_binary (IOR, mode,
4926 gen_binary (AND, mode, XEXP (op0, 0),
4927 GEN_INT (INTVAL (XEXP (op0, 1))
4928 & ~ INTVAL (op1))), op1);
4929
4930 if (GET_CODE (x) != AND)
4931 return x;
4932
4933 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
4934 || GET_RTX_CLASS (GET_CODE (x)) == '2')
4935 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4936 }
4937
4938 /* Convert (A | B) & A to A. */
4939 if (GET_CODE (op0) == IOR
4940 && (rtx_equal_p (XEXP (op0, 0), op1)
4941 || rtx_equal_p (XEXP (op0, 1), op1))
4942 && ! side_effects_p (XEXP (op0, 0))
4943 && ! side_effects_p (XEXP (op0, 1)))
4944 return op1;
4945
4946 /* In the following group of tests (and those in case IOR below),
4947 we start with some combination of logical operations and apply
4948 the distributive law followed by the inverse distributive law.
4949 Most of the time, this results in no change. However, if some of
4950 the operands are the same or inverses of each other, simplifications
4951 will result.
4952
4953 For example, (and (ior A B) (not B)) can occur as the result of
4954 expanding a bit field assignment. When we apply the distributive
4955 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4956 which then simplifies to (and (A (not B))).
4957
4958 If we have (and (ior A B) C), apply the distributive law and then
4959 the inverse distributive law to see if things simplify. */
4960
4961 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
4962 {
4963 x = apply_distributive_law
4964 (gen_binary (GET_CODE (op0), mode,
4965 gen_binary (AND, mode, XEXP (op0, 0), op1),
4966 gen_binary (AND, mode, XEXP (op0, 1), op1)));
4967 if (GET_CODE (x) != AND)
4968 return x;
4969 }
4970
4971 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
4972 return apply_distributive_law
4973 (gen_binary (GET_CODE (op1), mode,
4974 gen_binary (AND, mode, XEXP (op1, 0), op0),
4975 gen_binary (AND, mode, XEXP (op1, 1), op0)));
4976
4977 /* Similarly, taking advantage of the fact that
4978 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4979
4980 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
4981 return apply_distributive_law
4982 (gen_binary (XOR, mode,
4983 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
4984 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
4985
4986 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
4987 return apply_distributive_law
4988 (gen_binary (XOR, mode,
4989 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
4990 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
4991 break;
4992
4993 case IOR:
4994 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
4995 if (GET_CODE (op1) == CONST_INT
4996 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4997 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
4998 return op1;
4999
5000 /* Convert (A & B) | A to A. */
5001 if (GET_CODE (op0) == AND
5002 && (rtx_equal_p (XEXP (op0, 0), op1)
5003 || rtx_equal_p (XEXP (op0, 1), op1))
5004 && ! side_effects_p (XEXP (op0, 0))
5005 && ! side_effects_p (XEXP (op0, 1)))
5006 return op1;
5007
5008 /* If we have (ior (and A B) C), apply the distributive law and then
5009 the inverse distributive law to see if things simplify. */
5010
5011 if (GET_CODE (op0) == AND)
5012 {
5013 x = apply_distributive_law
5014 (gen_binary (AND, mode,
5015 gen_binary (IOR, mode, XEXP (op0, 0), op1),
5016 gen_binary (IOR, mode, XEXP (op0, 1), op1)));
5017
5018 if (GET_CODE (x) != IOR)
5019 return x;
5020 }
5021
5022 if (GET_CODE (op1) == AND)
5023 {
5024 x = apply_distributive_law
5025 (gen_binary (AND, mode,
5026 gen_binary (IOR, mode, XEXP (op1, 0), op0),
5027 gen_binary (IOR, mode, XEXP (op1, 1), op0)));
5028
5029 if (GET_CODE (x) != IOR)
5030 return x;
5031 }
5032
5033 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5034 mode size to (rotate A CX). */
5035
5036 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5037 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5038 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5039 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5040 && GET_CODE (XEXP (op1, 1)) == CONST_INT
5041 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
5042 == GET_MODE_BITSIZE (mode)))
4661 return gen_rtx (ROTATE, mode, XEXP (op0, 0),
4662 (GET_CODE (op0) == ASHIFT
4663 ? XEXP (op0, 1) : XEXP (op1, 1)));
5043 return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5044 (GET_CODE (op0) == ASHIFT
5045 ? XEXP (op0, 1) : XEXP (op1, 1)));
4664
4665 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
4666 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
4667 does not affect any of the bits in OP1, it can really be done
4668 as a PLUS and we can associate. We do this by seeing if OP1
4669 can be safely shifted left C bits. */
4670 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
4671 && GET_CODE (XEXP (op0, 0)) == PLUS
4672 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
4673 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4674 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
4675 {
4676 int count = INTVAL (XEXP (op0, 1));
4677 HOST_WIDE_INT mask = INTVAL (op1) << count;
4678
4679 if (mask >> count == INTVAL (op1)
4680 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
4681 {
4682 SUBST (XEXP (XEXP (op0, 0), 1),
4683 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
4684 return op0;
4685 }
4686 }
4687 break;
4688
4689 case XOR:
4690 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4691 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4692 (NOT y). */
4693 {
4694 int num_negated = 0;
4695
4696 if (GET_CODE (op0) == NOT)
4697 num_negated++, op0 = XEXP (op0, 0);
4698 if (GET_CODE (op1) == NOT)
4699 num_negated++, op1 = XEXP (op1, 0);
4700
4701 if (num_negated == 2)
4702 {
4703 SUBST (XEXP (x, 0), op0);
4704 SUBST (XEXP (x, 1), op1);
4705 }
4706 else if (num_negated == 1)
4707 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
4708 }
4709
4710 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4711 correspond to a machine insn or result in further simplifications
4712 if B is a constant. */
4713
4714 if (GET_CODE (op0) == AND
4715 && rtx_equal_p (XEXP (op0, 1), op1)
4716 && ! side_effects_p (op1))
4717 return gen_binary (AND, mode,
4718 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
4719 op1);
4720
4721 else if (GET_CODE (op0) == AND
4722 && rtx_equal_p (XEXP (op0, 0), op1)
4723 && ! side_effects_p (op1))
4724 return gen_binary (AND, mode,
4725 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
4726 op1);
4727
5046
5047 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5048 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
5049 does not affect any of the bits in OP1, it can really be done
5050 as a PLUS and we can associate. We do this by seeing if OP1
5051 can be safely shifted left C bits. */
5052 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5053 && GET_CODE (XEXP (op0, 0)) == PLUS
5054 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5055 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5056 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5057 {
5058 int count = INTVAL (XEXP (op0, 1));
5059 HOST_WIDE_INT mask = INTVAL (op1) << count;
5060
5061 if (mask >> count == INTVAL (op1)
5062 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5063 {
5064 SUBST (XEXP (XEXP (op0, 0), 1),
5065 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5066 return op0;
5067 }
5068 }
5069 break;
5070
5071 case XOR:
5072 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5073 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5074 (NOT y). */
5075 {
5076 int num_negated = 0;
5077
5078 if (GET_CODE (op0) == NOT)
5079 num_negated++, op0 = XEXP (op0, 0);
5080 if (GET_CODE (op1) == NOT)
5081 num_negated++, op1 = XEXP (op1, 0);
5082
5083 if (num_negated == 2)
5084 {
5085 SUBST (XEXP (x, 0), op0);
5086 SUBST (XEXP (x, 1), op1);
5087 }
5088 else if (num_negated == 1)
5089 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
5090 }
5091
5092 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
5093 correspond to a machine insn or result in further simplifications
5094 if B is a constant. */
5095
5096 if (GET_CODE (op0) == AND
5097 && rtx_equal_p (XEXP (op0, 1), op1)
5098 && ! side_effects_p (op1))
5099 return gen_binary (AND, mode,
5100 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
5101 op1);
5102
5103 else if (GET_CODE (op0) == AND
5104 && rtx_equal_p (XEXP (op0, 0), op1)
5105 && ! side_effects_p (op1))
5106 return gen_binary (AND, mode,
5107 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
5108 op1);
5109
4728#if STORE_FLAG_VALUE == 1
4729 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
5110 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4730 comparison. */
4731 if (op1 == const1_rtx
5111 comparison if STORE_FLAG_VALUE is 1. */
5112 if (STORE_FLAG_VALUE == 1
5113 && op1 == const1_rtx
4732 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4733 && reversible_comparison_p (op0))
4734 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4735 mode, XEXP (op0, 0), XEXP (op0, 1));
4736
4737 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4738 is (lt foo (const_int 0)), so we can perform the above
5114 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5115 && reversible_comparison_p (op0))
5116 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5117 mode, XEXP (op0, 0), XEXP (op0, 1));
5118
5119 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5120 is (lt foo (const_int 0)), so we can perform the above
4739 simplification. */
5121 simplification if STORE_FLAG_VALUE is 1. */
4740
5122
4741 if (op1 == const1_rtx
5123 if (STORE_FLAG_VALUE == 1
5124 && op1 == const1_rtx
4742 && GET_CODE (op0) == LSHIFTRT
4743 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4744 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
4745 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
5125 && GET_CODE (op0) == LSHIFTRT
5126 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5127 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5128 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
4746#endif
4747
4748 /* (xor (comparison foo bar) (const_int sign-bit))
4749 when STORE_FLAG_VALUE is the sign bit. */
4750 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5129
5130 /* (xor (comparison foo bar) (const_int sign-bit))
5131 when STORE_FLAG_VALUE is the sign bit. */
5132 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4751 && (STORE_FLAG_VALUE
5133 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4752 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4753 && op1 == const_true_rtx
4754 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4755 && reversible_comparison_p (op0))
4756 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4757 mode, XEXP (op0, 0), XEXP (op0, 1));
4758 break;
5134 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5135 && op1 == const_true_rtx
5136 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5137 && reversible_comparison_p (op0))
5138 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5139 mode, XEXP (op0, 0), XEXP (op0, 1));
5140 break;
5141
5142 default:
5143 abort ();
4759 }
4760
4761 return x;
4762}
4763
4764/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4765 operations" because they can be replaced with two more basic operations.
4766 ZERO_EXTEND is also considered "compound" because it can be replaced with
4767 an AND operation, which is simpler, though only one operation.
4768
4769 The function expand_compound_operation is called with an rtx expression
4770 and will convert it to the appropriate shifts and AND operations,
4771 simplifying at each stage.
4772
4773 The function make_compound_operation is called to convert an expression
4774 consisting of shifts and ANDs into the equivalent compound expression.
4775 It is the inverse of this function, loosely speaking. */
4776
4777static rtx
4778expand_compound_operation (x)
4779 rtx x;
4780{
4781 int pos = 0, len;
4782 int unsignedp = 0;
4783 int modewidth;
4784 rtx tem;
4785
4786 switch (GET_CODE (x))
4787 {
4788 case ZERO_EXTEND:
4789 unsignedp = 1;
4790 case SIGN_EXTEND:
4791 /* We can't necessarily use a const_int for a multiword mode;
4792 it depends on implicitly extending the value.
4793 Since we don't know the right way to extend it,
4794 we can't tell whether the implicit way is right.
4795
4796 Even for a mode that is no wider than a const_int,
4797 we can't win, because we need to sign extend one of its bits through
4798 the rest of it, and we don't know which bit. */
4799 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
4800 return x;
4801
4802 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
4803 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
4804 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
4805 reloaded. If not for that, MEM's would very rarely be safe.
4806
4807 Reject MODEs bigger than a word, because we might not be able
4808 to reference a two-register group starting with an arbitrary register
4809 (and currently gen_lowpart might crash for a SUBREG). */
4810
4811 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
4812 return x;
4813
4814 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4815 /* If the inner object has VOIDmode (the only way this can happen
4816 is if it is a ASM_OPERANDS), we can't do anything since we don't
4817 know how much masking to do. */
4818 if (len == 0)
4819 return x;
4820
4821 break;
4822
4823 case ZERO_EXTRACT:
4824 unsignedp = 1;
4825 case SIGN_EXTRACT:
4826 /* If the operand is a CLOBBER, just return it. */
4827 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4828 return XEXP (x, 0);
4829
4830 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4831 || GET_CODE (XEXP (x, 2)) != CONST_INT
4832 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4833 return x;
4834
4835 len = INTVAL (XEXP (x, 1));
4836 pos = INTVAL (XEXP (x, 2));
4837
4838 /* If this goes outside the object being extracted, replace the object
4839 with a (use (mem ...)) construct that only combine understands
4840 and is used only for this purpose. */
4841 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5144 }
5145
5146 return x;
5147}
5148
5149/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5150 operations" because they can be replaced with two more basic operations.
5151 ZERO_EXTEND is also considered "compound" because it can be replaced with
5152 an AND operation, which is simpler, though only one operation.
5153
5154 The function expand_compound_operation is called with an rtx expression
5155 and will convert it to the appropriate shifts and AND operations,
5156 simplifying at each stage.
5157
5158 The function make_compound_operation is called to convert an expression
5159 consisting of shifts and ANDs into the equivalent compound expression.
5160 It is the inverse of this function, loosely speaking. */
5161
5162static rtx
5163expand_compound_operation (x)
5164 rtx x;
5165{
5166 int pos = 0, len;
5167 int unsignedp = 0;
5168 int modewidth;
5169 rtx tem;
5170
5171 switch (GET_CODE (x))
5172 {
5173 case ZERO_EXTEND:
5174 unsignedp = 1;
5175 case SIGN_EXTEND:
5176 /* We can't necessarily use a const_int for a multiword mode;
5177 it depends on implicitly extending the value.
5178 Since we don't know the right way to extend it,
5179 we can't tell whether the implicit way is right.
5180
5181 Even for a mode that is no wider than a const_int,
5182 we can't win, because we need to sign extend one of its bits through
5183 the rest of it, and we don't know which bit. */
5184 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5185 return x;
5186
5187 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5188 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5189 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5190 reloaded. If not for that, MEM's would very rarely be safe.
5191
5192 Reject MODEs bigger than a word, because we might not be able
5193 to reference a two-register group starting with an arbitrary register
5194 (and currently gen_lowpart might crash for a SUBREG). */
5195
5196 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5197 return x;
5198
5199 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5200 /* If the inner object has VOIDmode (the only way this can happen
5201 is if it is a ASM_OPERANDS), we can't do anything since we don't
5202 know how much masking to do. */
5203 if (len == 0)
5204 return x;
5205
5206 break;
5207
5208 case ZERO_EXTRACT:
5209 unsignedp = 1;
5210 case SIGN_EXTRACT:
5211 /* If the operand is a CLOBBER, just return it. */
5212 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5213 return XEXP (x, 0);
5214
5215 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5216 || GET_CODE (XEXP (x, 2)) != CONST_INT
5217 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5218 return x;
5219
5220 len = INTVAL (XEXP (x, 1));
5221 pos = INTVAL (XEXP (x, 2));
5222
5223 /* If this goes outside the object being extracted, replace the object
5224 with a (use (mem ...)) construct that only combine understands
5225 and is used only for this purpose. */
5226 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4842 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
5227 SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
4843
4844 if (BITS_BIG_ENDIAN)
4845 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4846
4847 break;
4848
4849 default:
4850 return x;
4851 }
4852
5228
5229 if (BITS_BIG_ENDIAN)
5230 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5231
5232 break;
5233
5234 default:
5235 return x;
5236 }
5237
5238 /* We can optimize some special cases of ZERO_EXTEND. */
5239 if (GET_CODE (x) == ZERO_EXTEND)
5240 {
5241 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5242 know that the last value didn't have any inappropriate bits
5243 set. */
5244 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5245 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5246 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5247 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5248 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5249 return XEXP (XEXP (x, 0), 0);
5250
5251 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5252 if (GET_CODE (XEXP (x, 0)) == SUBREG
5253 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5254 && subreg_lowpart_p (XEXP (x, 0))
5255 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5256 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5257 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5258 return SUBREG_REG (XEXP (x, 0));
5259
5260 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5261 is a comparison and STORE_FLAG_VALUE permits. This is like
5262 the first case, but it works even when GET_MODE (x) is larger
5263 than HOST_WIDE_INT. */
5264 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5265 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5266 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5267 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5268 <= HOST_BITS_PER_WIDE_INT)
5269 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5270 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5271 return XEXP (XEXP (x, 0), 0);
5272
5273 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5274 if (GET_CODE (XEXP (x, 0)) == SUBREG
5275 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5276 && subreg_lowpart_p (XEXP (x, 0))
5277 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5278 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5279 <= HOST_BITS_PER_WIDE_INT)
5280 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5281 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5282 return SUBREG_REG (XEXP (x, 0));
5283
5284 /* If sign extension is cheaper than zero extension, then use it
5285 if we know that no extraneous bits are set, and that the high
5286 bit is not set. */
5287 if (flag_expensive_optimizations
5288 && ((GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5289 && ((nonzero_bits (XEXP (x, 0), GET_MODE (x))
5290 & ~ (((unsigned HOST_WIDE_INT)
5291 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5292 >> 1))
5293 == 0))
5294 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5295 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5296 <= HOST_BITS_PER_WIDE_INT)
5297 && (((HOST_WIDE_INT) STORE_FLAG_VALUE
5298 & ~ (((unsigned HOST_WIDE_INT)
5299 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5300 >> 1))
5301 == 0))))
5302 {
5303 rtx temp = gen_rtx_SIGN_EXTEND (GET_MODE (x), XEXP (x, 0));
5304
5305 if (rtx_cost (temp, SET) < rtx_cost (x, SET))
5306 return expand_compound_operation (temp);
5307 }
5308 }
5309
4853 /* If we reach here, we want to return a pair of shifts. The inner
4854 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4855 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4856 logical depending on the value of UNSIGNEDP.
4857
4858 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4859 converted into an AND of a shift.
4860
4861 We must check for the case where the left shift would have a negative
4862 count. This can happen in a case like (x >> 31) & 255 on machines
4863 that can't shift by a constant. On those machines, we would first
4864 combine the shift with the AND to produce a variable-position
4865 extraction. Then the constant of 31 would be substituted in to produce
4866 a such a position. */
4867
4868 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4869 if (modewidth >= pos - len)
4870 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
4871 GET_MODE (x),
4872 simplify_shift_const (NULL_RTX, ASHIFT,
4873 GET_MODE (x),
4874 XEXP (x, 0),
4875 modewidth - pos - len),
4876 modewidth - len);
4877
4878 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4879 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4880 simplify_shift_const (NULL_RTX, LSHIFTRT,
4881 GET_MODE (x),
4882 XEXP (x, 0), pos),
4883 ((HOST_WIDE_INT) 1 << len) - 1);
4884 else
4885 /* Any other cases we can't handle. */
4886 return x;
4887
4888
4889 /* If we couldn't do this for some reason, return the original
4890 expression. */
4891 if (GET_CODE (tem) == CLOBBER)
4892 return x;
4893
4894 return tem;
4895}
4896
4897/* X is a SET which contains an assignment of one object into
4898 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4899 or certain SUBREGS). If possible, convert it into a series of
4900 logical operations.
4901
4902 We half-heartedly support variable positions, but do not at all
4903 support variable lengths. */
4904
4905static rtx
4906expand_field_assignment (x)
4907 rtx x;
4908{
4909 rtx inner;
5310 /* If we reach here, we want to return a pair of shifts. The inner
5311 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5312 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5313 logical depending on the value of UNSIGNEDP.
5314
5315 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5316 converted into an AND of a shift.
5317
5318 We must check for the case where the left shift would have a negative
5319 count. This can happen in a case like (x >> 31) & 255 on machines
5320 that can't shift by a constant. On those machines, we would first
5321 combine the shift with the AND to produce a variable-position
5322 extraction. Then the constant of 31 would be substituted in to produce
5323 a such a position. */
5324
5325 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5326 if (modewidth >= pos - len)
5327 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
5328 GET_MODE (x),
5329 simplify_shift_const (NULL_RTX, ASHIFT,
5330 GET_MODE (x),
5331 XEXP (x, 0),
5332 modewidth - pos - len),
5333 modewidth - len);
5334
5335 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5336 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5337 simplify_shift_const (NULL_RTX, LSHIFTRT,
5338 GET_MODE (x),
5339 XEXP (x, 0), pos),
5340 ((HOST_WIDE_INT) 1 << len) - 1);
5341 else
5342 /* Any other cases we can't handle. */
5343 return x;
5344
5345
5346 /* If we couldn't do this for some reason, return the original
5347 expression. */
5348 if (GET_CODE (tem) == CLOBBER)
5349 return x;
5350
5351 return tem;
5352}
5353
5354/* X is a SET which contains an assignment of one object into
5355 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5356 or certain SUBREGS). If possible, convert it into a series of
5357 logical operations.
5358
5359 We half-heartedly support variable positions, but do not at all
5360 support variable lengths. */
5361
5362static rtx
5363expand_field_assignment (x)
5364 rtx x;
5365{
5366 rtx inner;
4910 rtx pos; /* Always counts from low bit. */
5367 rtx pos; /* Always counts from low bit. */
4911 int len;
4912 rtx mask;
4913 enum machine_mode compute_mode;
4914
4915 /* Loop until we find something we can't simplify. */
4916 while (1)
4917 {
4918 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4919 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4920 {
4921 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4922 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
5368 int len;
5369 rtx mask;
5370 enum machine_mode compute_mode;
5371
5372 /* Loop until we find something we can't simplify. */
5373 while (1)
5374 {
5375 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5376 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5377 {
5378 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5379 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4923 pos = const0_rtx;
5380 pos = GEN_INT (BITS_PER_WORD * SUBREG_WORD (XEXP (SET_DEST (x), 0)));
4924 }
4925 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4926 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4927 {
4928 inner = XEXP (SET_DEST (x), 0);
4929 len = INTVAL (XEXP (SET_DEST (x), 1));
4930 pos = XEXP (SET_DEST (x), 2);
4931
4932 /* If the position is constant and spans the width of INNER,
4933 surround INNER with a USE to indicate this. */
4934 if (GET_CODE (pos) == CONST_INT
4935 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5381 }
5382 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5383 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5384 {
5385 inner = XEXP (SET_DEST (x), 0);
5386 len = INTVAL (XEXP (SET_DEST (x), 1));
5387 pos = XEXP (SET_DEST (x), 2);
5388
5389 /* If the position is constant and spans the width of INNER,
5390 surround INNER with a USE to indicate this. */
5391 if (GET_CODE (pos) == CONST_INT
5392 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4936 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
5393 inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
4937
4938 if (BITS_BIG_ENDIAN)
4939 {
4940 if (GET_CODE (pos) == CONST_INT)
4941 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4942 - INTVAL (pos));
4943 else if (GET_CODE (pos) == MINUS
4944 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4945 && (INTVAL (XEXP (pos, 1))
4946 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4947 /* If position is ADJUST - X, new position is X. */
4948 pos = XEXP (pos, 0);
4949 else
4950 pos = gen_binary (MINUS, GET_MODE (pos),
4951 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4952 - len),
4953 pos);
4954 }
4955 }
4956
4957 /* A SUBREG between two modes that occupy the same numbers of words
4958 can be done by moving the SUBREG to the source. */
4959 else if (GET_CODE (SET_DEST (x)) == SUBREG
4960 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4961 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4962 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4963 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4964 {
5394
5395 if (BITS_BIG_ENDIAN)
5396 {
5397 if (GET_CODE (pos) == CONST_INT)
5398 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5399 - INTVAL (pos));
5400 else if (GET_CODE (pos) == MINUS
5401 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5402 && (INTVAL (XEXP (pos, 1))
5403 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5404 /* If position is ADJUST - X, new position is X. */
5405 pos = XEXP (pos, 0);
5406 else
5407 pos = gen_binary (MINUS, GET_MODE (pos),
5408 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5409 - len),
5410 pos);
5411 }
5412 }
5413
5414 /* A SUBREG between two modes that occupy the same numbers of words
5415 can be done by moving the SUBREG to the source. */
5416 else if (GET_CODE (SET_DEST (x)) == SUBREG
5417 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5418 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5419 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5420 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5421 {
4965 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4966 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4967 SET_SRC (x)));
5422 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
5423 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
5424 SET_SRC (x)));
4968 continue;
4969 }
4970 else
4971 break;
4972
4973 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4974 inner = SUBREG_REG (inner);
4975
4976 compute_mode = GET_MODE (inner);
4977
4978 /* Compute a mask of LEN bits, if we can do this on the host machine. */
4979 if (len < HOST_BITS_PER_WIDE_INT)
4980 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
4981 else
4982 break;
4983
4984 /* Now compute the equivalent expression. Make a copy of INNER
4985 for the SET_DEST in case it is a MEM into which we will substitute;
4986 we don't want shared RTL in that case. */
5425 continue;
5426 }
5427 else
5428 break;
5429
5430 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5431 inner = SUBREG_REG (inner);
5432
5433 compute_mode = GET_MODE (inner);
5434
5435 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5436 if (len < HOST_BITS_PER_WIDE_INT)
5437 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
5438 else
5439 break;
5440
5441 /* Now compute the equivalent expression. Make a copy of INNER
5442 for the SET_DEST in case it is a MEM into which we will substitute;
5443 we don't want shared RTL in that case. */
4987 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4988 gen_binary (IOR, compute_mode,
4989 gen_binary (AND, compute_mode,
4990 gen_unary (NOT, compute_mode,
4991 compute_mode,
4992 gen_binary (ASHIFT,
4993 compute_mode,
4994 mask, pos)),
4995 inner),
4996 gen_binary (ASHIFT, compute_mode,
4997 gen_binary (AND, compute_mode,
4998 gen_lowpart_for_combine
4999 (compute_mode,
5000 SET_SRC (x)),
5001 mask),
5002 pos)));
5444 x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
5445 gen_binary (IOR, compute_mode,
5446 gen_binary (AND, compute_mode,
5447 gen_unary (NOT, compute_mode,
5448 compute_mode,
5449 gen_binary (ASHIFT,
5450 compute_mode,
5451 mask, pos)),
5452 inner),
5453 gen_binary (ASHIFT, compute_mode,
5454 gen_binary (AND, compute_mode,
5455 gen_lowpart_for_combine
5456 (compute_mode,
5457 SET_SRC (x)),
5458 mask),
5459 pos)));
5003 }
5004
5005 return x;
5006}
5007
5008/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5009 it is an RTX that represents a variable starting position; otherwise,
5010 POS is the (constant) starting bit position (counted from the LSB).
5011
5012 INNER may be a USE. This will occur when we started with a bitfield
5013 that went outside the boundary of the object in memory, which is
5014 allowed on most machines. To isolate this case, we produce a USE
5015 whose mode is wide enough and surround the MEM with it. The only
5016 code that understands the USE is this routine. If it is not removed,
5017 it will cause the resulting insn not to match.
5018
5019 UNSIGNEDP is non-zero for an unsigned reference and zero for a
5020 signed reference.
5021
5022 IN_DEST is non-zero if this is a reference in the destination of a
5023 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5024 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5025 be used.
5026
5027 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5028 ZERO_EXTRACT should be built even for bits starting at bit 0.
5029
5460 }
5461
5462 return x;
5463}
5464
5465/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5466 it is an RTX that represents a variable starting position; otherwise,
5467 POS is the (constant) starting bit position (counted from the LSB).
5468
5469 INNER may be a USE. This will occur when we started with a bitfield
5470 that went outside the boundary of the object in memory, which is
5471 allowed on most machines. To isolate this case, we produce a USE
5472 whose mode is wide enough and surround the MEM with it. The only
5473 code that understands the USE is this routine. If it is not removed,
5474 it will cause the resulting insn not to match.
5475
5476 UNSIGNEDP is non-zero for an unsigned reference and zero for a
5477 signed reference.
5478
5479 IN_DEST is non-zero if this is a reference in the destination of a
5480 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5481 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5482 be used.
5483
5484 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5485 ZERO_EXTRACT should be built even for bits starting at bit 0.
5486
5030 MODE is the desired mode of the result (if IN_DEST == 0). */
5487 MODE is the desired mode of the result (if IN_DEST == 0).
5031
5488
5489 The result is an RTX for the extraction or NULL_RTX if the target
5490 can't handle it. */
5491
5032static rtx
5033make_extraction (mode, inner, pos, pos_rtx, len,
5034 unsignedp, in_dest, in_compare)
5035 enum machine_mode mode;
5036 rtx inner;
5037 int pos;
5038 rtx pos_rtx;
5039 int len;
5040 int unsignedp;
5041 int in_dest, in_compare;
5042{
5043 /* This mode describes the size of the storage area
5044 to fetch the overall value from. Within that, we
5045 ignore the POS lowest bits, etc. */
5046 enum machine_mode is_mode = GET_MODE (inner);
5047 enum machine_mode inner_mode;
5492static rtx
5493make_extraction (mode, inner, pos, pos_rtx, len,
5494 unsignedp, in_dest, in_compare)
5495 enum machine_mode mode;
5496 rtx inner;
5497 int pos;
5498 rtx pos_rtx;
5499 int len;
5500 int unsignedp;
5501 int in_dest, in_compare;
5502{
5503 /* This mode describes the size of the storage area
5504 to fetch the overall value from. Within that, we
5505 ignore the POS lowest bits, etc. */
5506 enum machine_mode is_mode = GET_MODE (inner);
5507 enum machine_mode inner_mode;
5048 enum machine_mode wanted_mem_mode = byte_mode;
5508 enum machine_mode wanted_inner_mode = byte_mode;
5509 enum machine_mode wanted_inner_reg_mode = word_mode;
5049 enum machine_mode pos_mode = word_mode;
5050 enum machine_mode extraction_mode = word_mode;
5051 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5052 int spans_byte = 0;
5053 rtx new = 0;
5054 rtx orig_pos_rtx = pos_rtx;
5055 int orig_pos;
5056
5057 /* Get some information about INNER and get the innermost object. */
5058 if (GET_CODE (inner) == USE)
5059 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
5060 /* We don't need to adjust the position because we set up the USE
5061 to pretend that it was a full-word object. */
5062 spans_byte = 1, inner = XEXP (inner, 0);
5063 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5064 {
5065 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5066 consider just the QI as the memory to extract from.
5067 The subreg adds or removes high bits; its mode is
5068 irrelevant to the meaning of this extraction,
5069 since POS and LEN count from the lsb. */
5070 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5071 is_mode = GET_MODE (SUBREG_REG (inner));
5072 inner = SUBREG_REG (inner);
5073 }
5074
5075 inner_mode = GET_MODE (inner);
5076
5077 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
5078 pos = INTVAL (pos_rtx), pos_rtx = 0;
5079
5080 /* See if this can be done without an extraction. We never can if the
5081 width of the field is not the same as that of some integer mode. For
5082 registers, we can only avoid the extraction if the position is at the
5083 low-order bit and this is either not in the destination or we have the
5084 appropriate STRICT_LOW_PART operation available.
5085
5086 For MEM, we can avoid an extract if the field starts on an appropriate
5087 boundary and we can change the mode of the memory reference. However,
5088 we cannot directly access the MEM if we have a USE and the underlying
5089 MEM is not TMODE. This combination means that MEM was being used in a
5090 context where bits outside its mode were being referenced; that is only
5091 valid in bit-field insns. */
5092
5093 if (tmode != BLKmode
5094 && ! (spans_byte && inner_mode != tmode)
5510 enum machine_mode pos_mode = word_mode;
5511 enum machine_mode extraction_mode = word_mode;
5512 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5513 int spans_byte = 0;
5514 rtx new = 0;
5515 rtx orig_pos_rtx = pos_rtx;
5516 int orig_pos;
5517
5518 /* Get some information about INNER and get the innermost object. */
5519 if (GET_CODE (inner) == USE)
5520 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
5521 /* We don't need to adjust the position because we set up the USE
5522 to pretend that it was a full-word object. */
5523 spans_byte = 1, inner = XEXP (inner, 0);
5524 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5525 {
5526 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5527 consider just the QI as the memory to extract from.
5528 The subreg adds or removes high bits; its mode is
5529 irrelevant to the meaning of this extraction,
5530 since POS and LEN count from the lsb. */
5531 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5532 is_mode = GET_MODE (SUBREG_REG (inner));
5533 inner = SUBREG_REG (inner);
5534 }
5535
5536 inner_mode = GET_MODE (inner);
5537
5538 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
5539 pos = INTVAL (pos_rtx), pos_rtx = 0;
5540
5541 /* See if this can be done without an extraction. We never can if the
5542 width of the field is not the same as that of some integer mode. For
5543 registers, we can only avoid the extraction if the position is at the
5544 low-order bit and this is either not in the destination or we have the
5545 appropriate STRICT_LOW_PART operation available.
5546
5547 For MEM, we can avoid an extract if the field starts on an appropriate
5548 boundary and we can change the mode of the memory reference. However,
5549 we cannot directly access the MEM if we have a USE and the underlying
5550 MEM is not TMODE. This combination means that MEM was being used in a
5551 context where bits outside its mode were being referenced; that is only
5552 valid in bit-field insns. */
5553
5554 if (tmode != BLKmode
5555 && ! (spans_byte && inner_mode != tmode)
5095 && ((pos_rtx == 0 && pos == 0 && GET_CODE (inner) != MEM
5556 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5557 && GET_CODE (inner) != MEM
5096 && (! in_dest
5097 || (GET_CODE (inner) == REG
5098 && (movstrict_optab->handlers[(int) tmode].insn_code
5099 != CODE_FOR_nothing))))
5100 || (GET_CODE (inner) == MEM && pos_rtx == 0
5101 && (pos
5102 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5103 : BITS_PER_UNIT)) == 0
5104 /* We can't do this if we are widening INNER_MODE (it
5105 may not be aligned, for one thing). */
5106 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5107 && (inner_mode == tmode
5108 || (! mode_dependent_address_p (XEXP (inner, 0))
5109 && ! MEM_VOLATILE_P (inner))))))
5110 {
5111 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5112 field. If the original and current mode are the same, we need not
5113 adjust the offset. Otherwise, we do if bytes big endian.
5114
5558 && (! in_dest
5559 || (GET_CODE (inner) == REG
5560 && (movstrict_optab->handlers[(int) tmode].insn_code
5561 != CODE_FOR_nothing))))
5562 || (GET_CODE (inner) == MEM && pos_rtx == 0
5563 && (pos
5564 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5565 : BITS_PER_UNIT)) == 0
5566 /* We can't do this if we are widening INNER_MODE (it
5567 may not be aligned, for one thing). */
5568 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5569 && (inner_mode == tmode
5570 || (! mode_dependent_address_p (XEXP (inner, 0))
5571 && ! MEM_VOLATILE_P (inner))))))
5572 {
5573 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5574 field. If the original and current mode are the same, we need not
5575 adjust the offset. Otherwise, we do if bytes big endian.
5576
5115 If INNER is not a MEM, get a piece consisting of the just the field
5116 of interest (in this case POS must be 0). */
5577 If INNER is not a MEM, get a piece consisting of just the field
5578 of interest (in this case POS % BITS_PER_WORD must be 0). */
5117
5118 if (GET_CODE (inner) == MEM)
5119 {
5120 int offset;
5121 /* POS counts from lsb, but make OFFSET count in memory order. */
5122 if (BYTES_BIG_ENDIAN)
5123 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5124 else
5125 offset = pos / BITS_PER_UNIT;
5126
5579
5580 if (GET_CODE (inner) == MEM)
5581 {
5582 int offset;
5583 /* POS counts from lsb, but make OFFSET count in memory order. */
5584 if (BYTES_BIG_ENDIAN)
5585 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5586 else
5587 offset = pos / BITS_PER_UNIT;
5588
5127 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
5589 new = gen_rtx_MEM (tmode, plus_constant (XEXP (inner, 0), offset));
5128 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5129 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5130 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5131 }
5132 else if (GET_CODE (inner) == REG)
5133 {
5134 /* We can't call gen_lowpart_for_combine here since we always want
5135 a SUBREG and it would sometimes return a new hard register. */
5136 if (tmode != inner_mode)
5590 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5591 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5592 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5593 }
5594 else if (GET_CODE (inner) == REG)
5595 {
5596 /* We can't call gen_lowpart_for_combine here since we always want
5597 a SUBREG and it would sometimes return a new hard register. */
5598 if (tmode != inner_mode)
5137 new = gen_rtx (SUBREG, tmode, inner,
5138 (WORDS_BIG_ENDIAN
5139 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5140 ? ((GET_MODE_SIZE (inner_mode)
5141 - GET_MODE_SIZE (tmode))
5142 / UNITS_PER_WORD)
5143 : 0));
5599 new = gen_rtx_SUBREG (tmode, inner,
5600 (WORDS_BIG_ENDIAN
5601 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5602 ? (((GET_MODE_SIZE (inner_mode)
5603 - GET_MODE_SIZE (tmode))
5604 / UNITS_PER_WORD)
5605 - pos / BITS_PER_WORD)
5606 : pos / BITS_PER_WORD));
5144 else
5145 new = inner;
5146 }
5147 else
5148 new = force_to_mode (inner, tmode,
5149 len >= HOST_BITS_PER_WIDE_INT
5150 ? GET_MODE_MASK (tmode)
5151 : ((HOST_WIDE_INT) 1 << len) - 1,
5152 NULL_RTX, 0);
5153
5154 /* If this extraction is going into the destination of a SET,
5155 make a STRICT_LOW_PART unless we made a MEM. */
5156
5157 if (in_dest)
5158 return (GET_CODE (new) == MEM ? new
5159 : (GET_CODE (new) != SUBREG
5607 else
5608 new = inner;
5609 }
5610 else
5611 new = force_to_mode (inner, tmode,
5612 len >= HOST_BITS_PER_WIDE_INT
5613 ? GET_MODE_MASK (tmode)
5614 : ((HOST_WIDE_INT) 1 << len) - 1,
5615 NULL_RTX, 0);
5616
5617 /* If this extraction is going into the destination of a SET,
5618 make a STRICT_LOW_PART unless we made a MEM. */
5619
5620 if (in_dest)
5621 return (GET_CODE (new) == MEM ? new
5622 : (GET_CODE (new) != SUBREG
5160 ? gen_rtx (CLOBBER, tmode, const0_rtx)
5623 ? gen_rtx_CLOBBER (tmode, const0_rtx)
5161 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
5162
5163 /* Otherwise, sign- or zero-extend unless we already are in the
5164 proper mode. */
5165
5166 return (mode == tmode ? new
5167 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5168 mode, new));
5169 }
5170
5171 /* Unless this is a COMPARE or we have a funny memory reference,
5172 don't do anything with zero-extending field extracts starting at
5173 the low-order bit since they are simple AND operations. */
5174 if (pos_rtx == 0 && pos == 0 && ! in_dest
5175 && ! in_compare && ! spans_byte && unsignedp)
5176 return 0;
5177
5178 /* Unless we are allowed to span bytes, reject this if we would be
5179 spanning bytes or if the position is not a constant and the length
5180 is not 1. In all other cases, we would only be going outside
5181 out object in cases when an original shift would have been
5182 undefined. */
5183 if (! spans_byte
5184 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5185 || (pos_rtx != 0 && len != 1)))
5186 return 0;
5187
5624 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
5625
5626 /* Otherwise, sign- or zero-extend unless we already are in the
5627 proper mode. */
5628
5629 return (mode == tmode ? new
5630 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5631 mode, new));
5632 }
5633
5634 /* Unless this is a COMPARE or we have a funny memory reference,
5635 don't do anything with zero-extending field extracts starting at
5636 the low-order bit since they are simple AND operations. */
5637 if (pos_rtx == 0 && pos == 0 && ! in_dest
5638 && ! in_compare && ! spans_byte && unsignedp)
5639 return 0;
5640
5641 /* Unless we are allowed to span bytes, reject this if we would be
5642 spanning bytes or if the position is not a constant and the length
5643 is not 1. In all other cases, we would only be going outside
5644 out object in cases when an original shift would have been
5645 undefined. */
5646 if (! spans_byte
5647 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5648 || (pos_rtx != 0 && len != 1)))
5649 return 0;
5650
5188 /* Get the mode to use should INNER be a MEM, the mode for the position,
5651 /* Get the mode to use should INNER not be a MEM, the mode for the position,
5189 and the mode for the result. */
5190#ifdef HAVE_insv
5191 if (in_dest)
5192 {
5652 and the mode for the result. */
5653#ifdef HAVE_insv
5654 if (in_dest)
5655 {
5193 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
5656 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
5194 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5195 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5196 }
5197#endif
5198
5199#ifdef HAVE_extzv
5200 if (! in_dest && unsignedp)
5201 {
5657 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5658 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5659 }
5660#endif
5661
5662#ifdef HAVE_extzv
5663 if (! in_dest && unsignedp)
5664 {
5202 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
5665 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
5203 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5204 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5205 }
5206#endif
5207
5208#ifdef HAVE_extv
5209 if (! in_dest && ! unsignedp)
5210 {
5666 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5667 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5668 }
5669#endif
5670
5671#ifdef HAVE_extv
5672 if (! in_dest && ! unsignedp)
5673 {
5211 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
5674 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
5212 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5213 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5214 }
5215#endif
5216
5217 /* Never narrow an object, since that might not be safe. */
5218
5219 if (mode != VOIDmode
5220 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5221 extraction_mode = mode;
5222
5223 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5224 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5225 pos_mode = GET_MODE (pos_rtx);
5226
5675 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5676 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5677 }
5678#endif
5679
5680 /* Never narrow an object, since that might not be safe. */
5681
5682 if (mode != VOIDmode
5683 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5684 extraction_mode = mode;
5685
5686 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5687 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5688 pos_mode = GET_MODE (pos_rtx);
5689
5227 /* If this is not from memory or we have to change the mode of memory and
5228 cannot, the desired mode is EXTRACTION_MODE. */
5229 if (GET_CODE (inner) != MEM
5230 || (inner_mode != wanted_mem_mode
5231 && (mode_dependent_address_p (XEXP (inner, 0))
5232 || MEM_VOLATILE_P (inner))))
5233 wanted_mem_mode = extraction_mode;
5690 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
5691 if we have to change the mode of memory and cannot, the desired mode is
5692 EXTRACTION_MODE. */
5693 if (GET_CODE (inner) != MEM)
5694 wanted_inner_mode = wanted_inner_reg_mode;
5695 else if (inner_mode != wanted_inner_mode
5696 && (mode_dependent_address_p (XEXP (inner, 0))
5697 || MEM_VOLATILE_P (inner)))
5698 wanted_inner_mode = extraction_mode;
5234
5235 orig_pos = pos;
5236
5237 if (BITS_BIG_ENDIAN)
5238 {
5699
5700 orig_pos = pos;
5701
5702 if (BITS_BIG_ENDIAN)
5703 {
5239 /* If position is constant, compute new position. Otherwise,
5240 build subtraction. */
5704 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
5705 BITS_BIG_ENDIAN style. If position is constant, compute new
5706 position. Otherwise, build subtraction.
5707 Note that POS is relative to the mode of the original argument.
5708 If it's a MEM we need to recompute POS relative to that.
5709 However, if we're extracting from (or inserting into) a register,
5710 we want to recompute POS relative to wanted_inner_mode. */
5711 int width = (GET_CODE (inner) == MEM
5712 ? GET_MODE_BITSIZE (is_mode)
5713 : GET_MODE_BITSIZE (wanted_inner_mode));
5714
5241 if (pos_rtx == 0)
5715 if (pos_rtx == 0)
5242 pos = (MAX (GET_MODE_BITSIZE (is_mode),
5243 GET_MODE_BITSIZE (wanted_mem_mode))
5244 - len - pos);
5716 pos = width - len - pos;
5245 else
5246 pos_rtx
5247 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5717 else
5718 pos_rtx
5719 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5248 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
5249 GET_MODE_BITSIZE (wanted_mem_mode))
5250 - len),
5251 pos_rtx);
5720 GEN_INT (width - len), pos_rtx);
5721 /* POS may be less than 0 now, but we check for that below.
5722 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
5252 }
5253
5254 /* If INNER has a wider mode, make it smaller. If this is a constant
5255 extract, try to adjust the byte to point to the byte containing
5256 the value. */
5723 }
5724
5725 /* If INNER has a wider mode, make it smaller. If this is a constant
5726 extract, try to adjust the byte to point to the byte containing
5727 the value. */
5257 if (wanted_mem_mode != VOIDmode
5258 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
5728 if (wanted_inner_mode != VOIDmode
5729 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
5259 && ((GET_CODE (inner) == MEM
5730 && ((GET_CODE (inner) == MEM
5260 && (inner_mode == wanted_mem_mode
5731 && (inner_mode == wanted_inner_mode
5261 || (! mode_dependent_address_p (XEXP (inner, 0))
5262 && ! MEM_VOLATILE_P (inner))))))
5263 {
5264 int offset = 0;
5265
5266 /* The computations below will be correct if the machine is big
5267 endian in both bits and bytes or little endian in bits and bytes.
5268 If it is mixed, we must adjust. */
5269
5270 /* If bytes are big endian and we had a paradoxical SUBREG, we must
5732 || (! mode_dependent_address_p (XEXP (inner, 0))
5733 && ! MEM_VOLATILE_P (inner))))))
5734 {
5735 int offset = 0;
5736
5737 /* The computations below will be correct if the machine is big
5738 endian in both bits and bytes or little endian in bits and bytes.
5739 If it is mixed, we must adjust. */
5740
5741 /* If bytes are big endian and we had a paradoxical SUBREG, we must
5271 adjust OFFSET to compensate. */
5742 adjust OFFSET to compensate. */
5272 if (BYTES_BIG_ENDIAN
5273 && ! spans_byte
5274 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5275 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
5276
5277 /* If this is a constant position, we can move to the desired byte. */
5278 if (pos_rtx == 0)
5279 {
5280 offset += pos / BITS_PER_UNIT;
5743 if (BYTES_BIG_ENDIAN
5744 && ! spans_byte
5745 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5746 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
5747
5748 /* If this is a constant position, we can move to the desired byte. */
5749 if (pos_rtx == 0)
5750 {
5751 offset += pos / BITS_PER_UNIT;
5281 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
5752 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
5282 }
5283
5284 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5285 && ! spans_byte
5753 }
5754
5755 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5756 && ! spans_byte
5286 && is_mode != wanted_mem_mode)
5757 && is_mode != wanted_inner_mode)
5287 offset = (GET_MODE_SIZE (is_mode)
5758 offset = (GET_MODE_SIZE (is_mode)
5288 - GET_MODE_SIZE (wanted_mem_mode) - offset);
5759 - GET_MODE_SIZE (wanted_inner_mode) - offset);
5289
5760
5290 if (offset != 0 || inner_mode != wanted_mem_mode)
5761 if (offset != 0 || inner_mode != wanted_inner_mode)
5291 {
5762 {
5292 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
5293 plus_constant (XEXP (inner, 0), offset));
5763 rtx newmem = gen_rtx_MEM (wanted_inner_mode,
5764 plus_constant (XEXP (inner, 0), offset));
5294 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5295 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5296 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5297 inner = newmem;
5298 }
5299 }
5300
5765 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5766 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5767 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5768 inner = newmem;
5769 }
5770 }
5771
5301 /* If INNER is not memory, we can always get it into the proper mode. */
5772 /* If INNER is not memory, we can always get it into the proper mode. If we
5773 are changing its mode, POS must be a constant and smaller than the size
5774 of the new mode. */
5302 else if (GET_CODE (inner) != MEM)
5775 else if (GET_CODE (inner) != MEM)
5303 inner = force_to_mode (inner, extraction_mode,
5304 pos_rtx || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5305 ? GET_MODE_MASK (extraction_mode)
5306 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5307 NULL_RTX, 0);
5776 {
5777 if (GET_MODE (inner) != wanted_inner_mode
5778 && (pos_rtx != 0
5779 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
5780 return 0;
5308
5781
5782 inner = force_to_mode (inner, wanted_inner_mode,
5783 pos_rtx
5784 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5785 ? GET_MODE_MASK (wanted_inner_mode)
5786 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5787 NULL_RTX, 0);
5788 }
5789
5309 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5310 have to zero extend. Otherwise, we can just use a SUBREG. */
5311 if (pos_rtx != 0
5312 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5313 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
5314 else if (pos_rtx != 0
5315 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5316 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5317
5318 /* Make POS_RTX unless we already have it and it is correct. If we don't
5319 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
5790 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5791 have to zero extend. Otherwise, we can just use a SUBREG. */
5792 if (pos_rtx != 0
5793 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5794 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
5795 else if (pos_rtx != 0
5796 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5797 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5798
5799 /* Make POS_RTX unless we already have it and it is correct. If we don't
5800 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
5320 be a CONST_INT. */
5801 be a CONST_INT. */
5321 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5322 pos_rtx = orig_pos_rtx;
5323
5324 else if (pos_rtx == 0)
5325 pos_rtx = GEN_INT (pos);
5326
5327 /* Make the required operation. See if we can use existing rtx. */
5328 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5329 extraction_mode, inner, GEN_INT (len), pos_rtx);
5330 if (! in_dest)
5331 new = gen_lowpart_for_combine (mode, new);
5332
5333 return new;
5334}
5335
5336/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5337 with any other operations in X. Return X without that shift if so. */
5338
5339static rtx
5340extract_left_shift (x, count)
5341 rtx x;
5342 int count;
5343{
5344 enum rtx_code code = GET_CODE (x);
5345 enum machine_mode mode = GET_MODE (x);
5346 rtx tem;
5347
5348 switch (code)
5349 {
5350 case ASHIFT:
5351 /* This is the shift itself. If it is wide enough, we will return
5352 either the value being shifted if the shift count is equal to
5353 COUNT or a shift for the difference. */
5354 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5355 && INTVAL (XEXP (x, 1)) >= count)
5356 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5357 INTVAL (XEXP (x, 1)) - count);
5358 break;
5359
5360 case NEG: case NOT:
5361 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5362 return gen_unary (code, mode, mode, tem);
5363
5364 break;
5365
5366 case PLUS: case IOR: case XOR: case AND:
5367 /* If we can safely shift this constant and we find the inner shift,
5368 make a new operation. */
5369 if (GET_CODE (XEXP (x,1)) == CONST_INT
5802 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5803 pos_rtx = orig_pos_rtx;
5804
5805 else if (pos_rtx == 0)
5806 pos_rtx = GEN_INT (pos);
5807
5808 /* Make the required operation. See if we can use existing rtx. */
5809 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5810 extraction_mode, inner, GEN_INT (len), pos_rtx);
5811 if (! in_dest)
5812 new = gen_lowpart_for_combine (mode, new);
5813
5814 return new;
5815}
5816
5817/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5818 with any other operations in X. Return X without that shift if so. */
5819
5820static rtx
5821extract_left_shift (x, count)
5822 rtx x;
5823 int count;
5824{
5825 enum rtx_code code = GET_CODE (x);
5826 enum machine_mode mode = GET_MODE (x);
5827 rtx tem;
5828
5829 switch (code)
5830 {
5831 case ASHIFT:
5832 /* This is the shift itself. If it is wide enough, we will return
5833 either the value being shifted if the shift count is equal to
5834 COUNT or a shift for the difference. */
5835 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5836 && INTVAL (XEXP (x, 1)) >= count)
5837 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5838 INTVAL (XEXP (x, 1)) - count);
5839 break;
5840
5841 case NEG: case NOT:
5842 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5843 return gen_unary (code, mode, mode, tem);
5844
5845 break;
5846
5847 case PLUS: case IOR: case XOR: case AND:
5848 /* If we can safely shift this constant and we find the inner shift,
5849 make a new operation. */
5850 if (GET_CODE (XEXP (x,1)) == CONST_INT
5370 && (INTVAL (XEXP (x, 1)) & (((HOST_WIDE_INT) 1 << count)) - 1) == 0
5851 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
5371 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5372 return gen_binary (code, mode, tem,
5373 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5374
5375 break;
5852 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5853 return gen_binary (code, mode, tem,
5854 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5855
5856 break;
5857
5858 default:
5859 break;
5376 }
5377
5378 return 0;
5379}
5380
5381/* Look at the expression rooted at X. Look for expressions
5382 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5383 Form these expressions.
5384
5385 Return the new rtx, usually just X.
5386
5387 Also, for machines like the Vax that don't have logical shift insns,
5388 try to convert logical to arithmetic shift operations in cases where
5389 they are equivalent. This undoes the canonicalizations to logical
5390 shifts done elsewhere.
5391
5392 We try, as much as possible, to re-use rtl expressions to save memory.
5393
5394 IN_CODE says what kind of expression we are processing. Normally, it is
5395 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5396 being kludges), it is MEM. When processing the arguments of a comparison
5397 or a COMPARE against zero, it is COMPARE. */
5398
5399static rtx
5400make_compound_operation (x, in_code)
5401 rtx x;
5402 enum rtx_code in_code;
5403{
5404 enum rtx_code code = GET_CODE (x);
5405 enum machine_mode mode = GET_MODE (x);
5406 int mode_width = GET_MODE_BITSIZE (mode);
5407 rtx rhs, lhs;
5408 enum rtx_code next_code;
5409 int i;
5410 rtx new = 0;
5411 rtx tem;
5412 char *fmt;
5413
5414 /* Select the code to be used in recursive calls. Once we are inside an
5415 address, we stay there. If we have a comparison, set to COMPARE,
5416 but once inside, go back to our default of SET. */
5417
5418 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
5419 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5420 && XEXP (x, 1) == const0_rtx) ? COMPARE
5421 : in_code == COMPARE ? SET : in_code);
5422
5423 /* Process depending on the code of this operation. If NEW is set
5424 non-zero, it will be returned. */
5425
5426 switch (code)
5427 {
5428 case ASHIFT:
5429 /* Convert shifts by constants into multiplications if inside
5430 an address. */
5431 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5432 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5433 && INTVAL (XEXP (x, 1)) >= 0)
5434 {
5435 new = make_compound_operation (XEXP (x, 0), next_code);
5436 new = gen_rtx_combine (MULT, mode, new,
5437 GEN_INT ((HOST_WIDE_INT) 1
5438 << INTVAL (XEXP (x, 1))));
5439 }
5440 break;
5441
5442 case AND:
5443 /* If the second operand is not a constant, we can't do anything
5444 with it. */
5445 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5446 break;
5447
5448 /* If the constant is a power of two minus one and the first operand
5449 is a logical right shift, make an extraction. */
5450 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5451 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5452 {
5453 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5454 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5455 0, in_code == COMPARE);
5456 }
5457
5458 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5459 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5460 && subreg_lowpart_p (XEXP (x, 0))
5461 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5462 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5463 {
5464 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5465 next_code);
5466 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
5467 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5468 0, in_code == COMPARE);
5469 }
5470 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
5471 else if ((GET_CODE (XEXP (x, 0)) == XOR
5472 || GET_CODE (XEXP (x, 0)) == IOR)
5473 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5474 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5475 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5476 {
5477 /* Apply the distributive law, and then try to make extractions. */
5478 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
5860 }
5861
5862 return 0;
5863}
5864
5865/* Look at the expression rooted at X. Look for expressions
5866 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5867 Form these expressions.
5868
5869 Return the new rtx, usually just X.
5870
5871 Also, for machines like the Vax that don't have logical shift insns,
5872 try to convert logical to arithmetic shift operations in cases where
5873 they are equivalent. This undoes the canonicalizations to logical
5874 shifts done elsewhere.
5875
5876 We try, as much as possible, to re-use rtl expressions to save memory.
5877
5878 IN_CODE says what kind of expression we are processing. Normally, it is
5879 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5880 being kludges), it is MEM. When processing the arguments of a comparison
5881 or a COMPARE against zero, it is COMPARE. */
5882
5883static rtx
5884make_compound_operation (x, in_code)
5885 rtx x;
5886 enum rtx_code in_code;
5887{
5888 enum rtx_code code = GET_CODE (x);
5889 enum machine_mode mode = GET_MODE (x);
5890 int mode_width = GET_MODE_BITSIZE (mode);
5891 rtx rhs, lhs;
5892 enum rtx_code next_code;
5893 int i;
5894 rtx new = 0;
5895 rtx tem;
5896 char *fmt;
5897
5898 /* Select the code to be used in recursive calls. Once we are inside an
5899 address, we stay there. If we have a comparison, set to COMPARE,
5900 but once inside, go back to our default of SET. */
5901
5902 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
5903 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5904 && XEXP (x, 1) == const0_rtx) ? COMPARE
5905 : in_code == COMPARE ? SET : in_code);
5906
5907 /* Process depending on the code of this operation. If NEW is set
5908 non-zero, it will be returned. */
5909
5910 switch (code)
5911 {
5912 case ASHIFT:
5913 /* Convert shifts by constants into multiplications if inside
5914 an address. */
5915 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5916 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5917 && INTVAL (XEXP (x, 1)) >= 0)
5918 {
5919 new = make_compound_operation (XEXP (x, 0), next_code);
5920 new = gen_rtx_combine (MULT, mode, new,
5921 GEN_INT ((HOST_WIDE_INT) 1
5922 << INTVAL (XEXP (x, 1))));
5923 }
5924 break;
5925
5926 case AND:
5927 /* If the second operand is not a constant, we can't do anything
5928 with it. */
5929 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5930 break;
5931
5932 /* If the constant is a power of two minus one and the first operand
5933 is a logical right shift, make an extraction. */
5934 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5935 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5936 {
5937 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5938 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5939 0, in_code == COMPARE);
5940 }
5941
5942 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5943 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5944 && subreg_lowpart_p (XEXP (x, 0))
5945 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5946 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5947 {
5948 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5949 next_code);
5950 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
5951 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5952 0, in_code == COMPARE);
5953 }
5954 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
5955 else if ((GET_CODE (XEXP (x, 0)) == XOR
5956 || GET_CODE (XEXP (x, 0)) == IOR)
5957 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5958 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5959 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5960 {
5961 /* Apply the distributive law, and then try to make extractions. */
5962 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
5479 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 0),
5480 XEXP (x, 1)),
5481 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 1),
5482 XEXP (x, 1)));
5963 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
5964 XEXP (x, 1)),
5965 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
5966 XEXP (x, 1)));
5483 new = make_compound_operation (new, in_code);
5484 }
5485
5486 /* If we are have (and (rotate X C) M) and C is larger than the number
5487 of bits in M, this is an extraction. */
5488
5489 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5490 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5491 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5492 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
5493 {
5494 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5495 new = make_extraction (mode, new,
5496 (GET_MODE_BITSIZE (mode)
5497 - INTVAL (XEXP (XEXP (x, 0), 1))),
5498 NULL_RTX, i, 1, 0, in_code == COMPARE);
5499 }
5500
5501 /* On machines without logical shifts, if the operand of the AND is
5502 a logical shift and our mask turns off all the propagated sign
5503 bits, we can replace the logical shift with an arithmetic shift. */
5504 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5505 && (lshr_optab->handlers[(int) mode].insn_code
5506 == CODE_FOR_nothing)
5507 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5508 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5509 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5510 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5511 && mode_width <= HOST_BITS_PER_WIDE_INT)
5512 {
5513 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
5514
5515 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5516 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5517 SUBST (XEXP (x, 0),
5518 gen_rtx_combine (ASHIFTRT, mode,
5519 make_compound_operation (XEXP (XEXP (x, 0), 0),
5520 next_code),
5521 XEXP (XEXP (x, 0), 1)));
5522 }
5523
5524 /* If the constant is one less than a power of two, this might be
5525 representable by an extraction even if no shift is present.
5526 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5527 we are in a COMPARE. */
5528 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5529 new = make_extraction (mode,
5530 make_compound_operation (XEXP (x, 0),
5531 next_code),
5532 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
5533
5534 /* If we are in a comparison and this is an AND with a power of two,
5535 convert this into the appropriate bit extract. */
5536 else if (in_code == COMPARE
5537 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5538 new = make_extraction (mode,
5539 make_compound_operation (XEXP (x, 0),
5540 next_code),
5541 i, NULL_RTX, 1, 1, 0, 1);
5542
5543 break;
5544
5545 case LSHIFTRT:
5546 /* If the sign bit is known to be zero, replace this with an
5547 arithmetic shift. */
5548 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5549 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5550 && mode_width <= HOST_BITS_PER_WIDE_INT
5551 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
5552 {
5553 new = gen_rtx_combine (ASHIFTRT, mode,
5554 make_compound_operation (XEXP (x, 0),
5555 next_code),
5556 XEXP (x, 1));
5557 break;
5558 }
5559
5967 new = make_compound_operation (new, in_code);
5968 }
5969
5970 /* If we are have (and (rotate X C) M) and C is larger than the number
5971 of bits in M, this is an extraction. */
5972
5973 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5974 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5975 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5976 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
5977 {
5978 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5979 new = make_extraction (mode, new,
5980 (GET_MODE_BITSIZE (mode)
5981 - INTVAL (XEXP (XEXP (x, 0), 1))),
5982 NULL_RTX, i, 1, 0, in_code == COMPARE);
5983 }
5984
5985 /* On machines without logical shifts, if the operand of the AND is
5986 a logical shift and our mask turns off all the propagated sign
5987 bits, we can replace the logical shift with an arithmetic shift. */
5988 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5989 && (lshr_optab->handlers[(int) mode].insn_code
5990 == CODE_FOR_nothing)
5991 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5992 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5993 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5994 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5995 && mode_width <= HOST_BITS_PER_WIDE_INT)
5996 {
5997 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
5998
5999 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6000 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6001 SUBST (XEXP (x, 0),
6002 gen_rtx_combine (ASHIFTRT, mode,
6003 make_compound_operation (XEXP (XEXP (x, 0), 0),
6004 next_code),
6005 XEXP (XEXP (x, 0), 1)));
6006 }
6007
6008 /* If the constant is one less than a power of two, this might be
6009 representable by an extraction even if no shift is present.
6010 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6011 we are in a COMPARE. */
6012 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6013 new = make_extraction (mode,
6014 make_compound_operation (XEXP (x, 0),
6015 next_code),
6016 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6017
6018 /* If we are in a comparison and this is an AND with a power of two,
6019 convert this into the appropriate bit extract. */
6020 else if (in_code == COMPARE
6021 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6022 new = make_extraction (mode,
6023 make_compound_operation (XEXP (x, 0),
6024 next_code),
6025 i, NULL_RTX, 1, 1, 0, 1);
6026
6027 break;
6028
6029 case LSHIFTRT:
6030 /* If the sign bit is known to be zero, replace this with an
6031 arithmetic shift. */
6032 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
6033 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6034 && mode_width <= HOST_BITS_PER_WIDE_INT
6035 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6036 {
6037 new = gen_rtx_combine (ASHIFTRT, mode,
6038 make_compound_operation (XEXP (x, 0),
6039 next_code),
6040 XEXP (x, 1));
6041 break;
6042 }
6043
5560 /* ... fall through ... */
6044 /* ... fall through ... */
5561
5562 case ASHIFTRT:
5563 lhs = XEXP (x, 0);
5564 rhs = XEXP (x, 1);
5565
5566 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5567 this is a SIGN_EXTRACT. */
5568 if (GET_CODE (rhs) == CONST_INT
5569 && GET_CODE (lhs) == ASHIFT
5570 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
5571 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
5572 {
5573 new = make_compound_operation (XEXP (lhs, 0), next_code);
5574 new = make_extraction (mode, new,
5575 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
5576 NULL_RTX, mode_width - INTVAL (rhs),
5577 code == LSHIFTRT, 0, in_code == COMPARE);
5578 }
5579
5580 /* See if we have operations between an ASHIFTRT and an ASHIFT.
5581 If so, try to merge the shifts into a SIGN_EXTEND. We could
5582 also do this for some cases of SIGN_EXTRACT, but it doesn't
5583 seem worth the effort; the case checked for occurs on Alpha. */
5584
5585 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
5586 && ! (GET_CODE (lhs) == SUBREG
5587 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
5588 && GET_CODE (rhs) == CONST_INT
5589 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
5590 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
5591 new = make_extraction (mode, make_compound_operation (new, next_code),
5592 0, NULL_RTX, mode_width - INTVAL (rhs),
5593 code == LSHIFTRT, 0, in_code == COMPARE);
5594
5595 break;
5596
5597 case SUBREG:
5598 /* Call ourselves recursively on the inner expression. If we are
5599 narrowing the object and it has a different RTL code from
5600 what it originally did, do this SUBREG as a force_to_mode. */
5601
5602 tem = make_compound_operation (SUBREG_REG (x), in_code);
5603 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
5604 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
5605 && subreg_lowpart_p (x))
5606 {
5607 rtx newer = force_to_mode (tem, mode,
5608 GET_MODE_MASK (mode), NULL_RTX, 0);
5609
5610 /* If we have something other than a SUBREG, we might have
5611 done an expansion, so rerun outselves. */
5612 if (GET_CODE (newer) != SUBREG)
5613 newer = make_compound_operation (newer, in_code);
5614
5615 return newer;
5616 }
6045
6046 case ASHIFTRT:
6047 lhs = XEXP (x, 0);
6048 rhs = XEXP (x, 1);
6049
6050 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6051 this is a SIGN_EXTRACT. */
6052 if (GET_CODE (rhs) == CONST_INT
6053 && GET_CODE (lhs) == ASHIFT
6054 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6055 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6056 {
6057 new = make_compound_operation (XEXP (lhs, 0), next_code);
6058 new = make_extraction (mode, new,
6059 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6060 NULL_RTX, mode_width - INTVAL (rhs),
6061 code == LSHIFTRT, 0, in_code == COMPARE);
6062 }
6063
6064 /* See if we have operations between an ASHIFTRT and an ASHIFT.
6065 If so, try to merge the shifts into a SIGN_EXTEND. We could
6066 also do this for some cases of SIGN_EXTRACT, but it doesn't
6067 seem worth the effort; the case checked for occurs on Alpha. */
6068
6069 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6070 && ! (GET_CODE (lhs) == SUBREG
6071 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6072 && GET_CODE (rhs) == CONST_INT
6073 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6074 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6075 new = make_extraction (mode, make_compound_operation (new, next_code),
6076 0, NULL_RTX, mode_width - INTVAL (rhs),
6077 code == LSHIFTRT, 0, in_code == COMPARE);
6078
6079 break;
6080
6081 case SUBREG:
6082 /* Call ourselves recursively on the inner expression. If we are
6083 narrowing the object and it has a different RTL code from
6084 what it originally did, do this SUBREG as a force_to_mode. */
6085
6086 tem = make_compound_operation (SUBREG_REG (x), in_code);
6087 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6088 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6089 && subreg_lowpart_p (x))
6090 {
6091 rtx newer = force_to_mode (tem, mode,
6092 GET_MODE_MASK (mode), NULL_RTX, 0);
6093
6094 /* If we have something other than a SUBREG, we might have
6095 done an expansion, so rerun outselves. */
6096 if (GET_CODE (newer) != SUBREG)
6097 newer = make_compound_operation (newer, in_code);
6098
6099 return newer;
6100 }
6101
6102 /* If this is a paradoxical subreg, and the new code is a sign or
6103 zero extension, omit the subreg and widen the extension. If it
6104 is a regular subreg, we can still get rid of the subreg by not
6105 widening so much, or in fact removing the extension entirely. */
6106 if ((GET_CODE (tem) == SIGN_EXTEND
6107 || GET_CODE (tem) == ZERO_EXTEND)
6108 && subreg_lowpart_p (x))
6109 {
6110 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6111 || (GET_MODE_SIZE (mode) >
6112 GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6113 tem = gen_rtx_combine (GET_CODE (tem), mode, XEXP (tem, 0));
6114 else
6115 tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6116 return tem;
6117 }
6118 break;
6119
6120 default:
6121 break;
5617 }
5618
5619 if (new)
5620 {
5621 x = gen_lowpart_for_combine (mode, new);
5622 code = GET_CODE (x);
5623 }
5624
5625 /* Now recursively process each operand of this operation. */
5626 fmt = GET_RTX_FORMAT (code);
5627 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5628 if (fmt[i] == 'e')
5629 {
5630 new = make_compound_operation (XEXP (x, i), next_code);
5631 SUBST (XEXP (x, i), new);
5632 }
5633
5634 return x;
5635}
5636
5637/* Given M see if it is a value that would select a field of bits
5638 within an item, but not the entire word. Return -1 if not.
5639 Otherwise, return the starting position of the field, where 0 is the
5640 low-order bit.
5641
5642 *PLEN is set to the length of the field. */
5643
5644static int
5645get_pos_from_mask (m, plen)
5646 unsigned HOST_WIDE_INT m;
5647 int *plen;
5648{
5649 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5650 int pos = exact_log2 (m & - m);
5651
5652 if (pos < 0)
5653 return -1;
5654
5655 /* Now shift off the low-order zero bits and see if we have a power of
5656 two minus 1. */
5657 *plen = exact_log2 ((m >> pos) + 1);
5658
5659 if (*plen <= 0)
5660 return -1;
5661
5662 return pos;
5663}
5664
5665/* See if X can be simplified knowing that we will only refer to it in
5666 MODE and will only refer to those bits that are nonzero in MASK.
5667 If other bits are being computed or if masking operations are done
5668 that select a superset of the bits in MASK, they can sometimes be
5669 ignored.
5670
5671 Return a possibly simplified expression, but always convert X to
5672 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
5673
5674 Also, if REG is non-zero and X is a register equal in value to REG,
5675 replace X with REG.
5676
5677 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
5678 are all off in X. This is used when X will be complemented, by either
5679 NOT, NEG, or XOR. */
5680
5681static rtx
5682force_to_mode (x, mode, mask, reg, just_select)
5683 rtx x;
5684 enum machine_mode mode;
5685 unsigned HOST_WIDE_INT mask;
5686 rtx reg;
5687 int just_select;
5688{
5689 enum rtx_code code = GET_CODE (x);
5690 int next_select = just_select || code == XOR || code == NOT || code == NEG;
5691 enum machine_mode op_mode;
5692 unsigned HOST_WIDE_INT fuller_mask, nonzero;
5693 rtx op0, op1, temp;
5694
6122 }
6123
6124 if (new)
6125 {
6126 x = gen_lowpart_for_combine (mode, new);
6127 code = GET_CODE (x);
6128 }
6129
6130 /* Now recursively process each operand of this operation. */
6131 fmt = GET_RTX_FORMAT (code);
6132 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6133 if (fmt[i] == 'e')
6134 {
6135 new = make_compound_operation (XEXP (x, i), next_code);
6136 SUBST (XEXP (x, i), new);
6137 }
6138
6139 return x;
6140}
6141
6142/* Given M see if it is a value that would select a field of bits
6143 within an item, but not the entire word. Return -1 if not.
6144 Otherwise, return the starting position of the field, where 0 is the
6145 low-order bit.
6146
6147 *PLEN is set to the length of the field. */
6148
6149static int
6150get_pos_from_mask (m, plen)
6151 unsigned HOST_WIDE_INT m;
6152 int *plen;
6153{
6154 /* Get the bit number of the first 1 bit from the right, -1 if none. */
6155 int pos = exact_log2 (m & - m);
6156
6157 if (pos < 0)
6158 return -1;
6159
6160 /* Now shift off the low-order zero bits and see if we have a power of
6161 two minus 1. */
6162 *plen = exact_log2 ((m >> pos) + 1);
6163
6164 if (*plen <= 0)
6165 return -1;
6166
6167 return pos;
6168}
6169
6170/* See if X can be simplified knowing that we will only refer to it in
6171 MODE and will only refer to those bits that are nonzero in MASK.
6172 If other bits are being computed or if masking operations are done
6173 that select a superset of the bits in MASK, they can sometimes be
6174 ignored.
6175
6176 Return a possibly simplified expression, but always convert X to
6177 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
6178
6179 Also, if REG is non-zero and X is a register equal in value to REG,
6180 replace X with REG.
6181
6182 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6183 are all off in X. This is used when X will be complemented, by either
6184 NOT, NEG, or XOR. */
6185
6186static rtx
6187force_to_mode (x, mode, mask, reg, just_select)
6188 rtx x;
6189 enum machine_mode mode;
6190 unsigned HOST_WIDE_INT mask;
6191 rtx reg;
6192 int just_select;
6193{
6194 enum rtx_code code = GET_CODE (x);
6195 int next_select = just_select || code == XOR || code == NOT || code == NEG;
6196 enum machine_mode op_mode;
6197 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6198 rtx op0, op1, temp;
6199
5695 /* If this is a CALL, don't do anything. Some of the code below
5696 will do the wrong thing since the mode of a CALL is VOIDmode. */
5697 if (code == CALL)
6200 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6201 code below will do the wrong thing since the mode of such an
6202 expression is VOIDmode.
6203
6204 Also do nothing if X is a CLOBBER; this can happen if X was
6205 the return value from a call to gen_lowpart_for_combine. */
6206 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
5698 return x;
5699
5700 /* We want to perform the operation is its present mode unless we know
5701 that the operation is valid in MODE, in which case we do the operation
5702 in MODE. */
5703 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
5704 && code_to_optab[(int) code] != 0
5705 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
5706 != CODE_FOR_nothing))
5707 ? mode : GET_MODE (x));
5708
5709 /* It is not valid to do a right-shift in a narrower mode
5710 than the one it came in with. */
5711 if ((code == LSHIFTRT || code == ASHIFTRT)
5712 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
5713 op_mode = GET_MODE (x);
5714
5715 /* Truncate MASK to fit OP_MODE. */
5716 if (op_mode)
5717 mask &= GET_MODE_MASK (op_mode);
5718
5719 /* When we have an arithmetic operation, or a shift whose count we
5720 do not know, we need to assume that all bit the up to the highest-order
5721 bit in MASK will be needed. This is how we form such a mask. */
5722 if (op_mode)
5723 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
5724 ? GET_MODE_MASK (op_mode)
5725 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
5726 else
5727 fuller_mask = ~ (HOST_WIDE_INT) 0;
5728
5729 /* Determine what bits of X are guaranteed to be (non)zero. */
5730 nonzero = nonzero_bits (x, mode);
5731
5732 /* If none of the bits in X are needed, return a zero. */
5733 if (! just_select && (nonzero & mask) == 0)
5734 return const0_rtx;
5735
5736 /* If X is a CONST_INT, return a new one. Do this here since the
5737 test below will fail. */
5738 if (GET_CODE (x) == CONST_INT)
5739 {
5740 HOST_WIDE_INT cval = INTVAL (x) & mask;
5741 int width = GET_MODE_BITSIZE (mode);
5742
5743 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
5744 number, sign extend it. */
5745 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
5746 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5747 cval |= (HOST_WIDE_INT) -1 << width;
5748
5749 return GEN_INT (cval);
5750 }
5751
5752 /* If X is narrower than MODE and we want all the bits in X's mode, just
5753 get X in the proper mode. */
5754 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
5755 && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
5756 return gen_lowpart_for_combine (mode, x);
5757
5758 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
5759 MASK are already known to be zero in X, we need not do anything. */
5760 if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
5761 return x;
5762
5763 switch (code)
5764 {
5765 case CLOBBER:
5766 /* If X is a (clobber (const_int)), return it since we know we are
6207 return x;
6208
6209 /* We want to perform the operation is its present mode unless we know
6210 that the operation is valid in MODE, in which case we do the operation
6211 in MODE. */
6212 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6213 && code_to_optab[(int) code] != 0
6214 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
6215 != CODE_FOR_nothing))
6216 ? mode : GET_MODE (x));
6217
6218 /* It is not valid to do a right-shift in a narrower mode
6219 than the one it came in with. */
6220 if ((code == LSHIFTRT || code == ASHIFTRT)
6221 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6222 op_mode = GET_MODE (x);
6223
6224 /* Truncate MASK to fit OP_MODE. */
6225 if (op_mode)
6226 mask &= GET_MODE_MASK (op_mode);
6227
6228 /* When we have an arithmetic operation, or a shift whose count we
6229 do not know, we need to assume that all bit the up to the highest-order
6230 bit in MASK will be needed. This is how we form such a mask. */
6231 if (op_mode)
6232 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6233 ? GET_MODE_MASK (op_mode)
6234 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
6235 else
6236 fuller_mask = ~ (HOST_WIDE_INT) 0;
6237
6238 /* Determine what bits of X are guaranteed to be (non)zero. */
6239 nonzero = nonzero_bits (x, mode);
6240
6241 /* If none of the bits in X are needed, return a zero. */
6242 if (! just_select && (nonzero & mask) == 0)
6243 return const0_rtx;
6244
6245 /* If X is a CONST_INT, return a new one. Do this here since the
6246 test below will fail. */
6247 if (GET_CODE (x) == CONST_INT)
6248 {
6249 HOST_WIDE_INT cval = INTVAL (x) & mask;
6250 int width = GET_MODE_BITSIZE (mode);
6251
6252 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6253 number, sign extend it. */
6254 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6255 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6256 cval |= (HOST_WIDE_INT) -1 << width;
6257
6258 return GEN_INT (cval);
6259 }
6260
6261 /* If X is narrower than MODE and we want all the bits in X's mode, just
6262 get X in the proper mode. */
6263 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6264 && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
6265 return gen_lowpart_for_combine (mode, x);
6266
6267 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6268 MASK are already known to be zero in X, we need not do anything. */
6269 if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
6270 return x;
6271
6272 switch (code)
6273 {
6274 case CLOBBER:
6275 /* If X is a (clobber (const_int)), return it since we know we are
5767 generating something that won't match. */
6276 generating something that won't match. */
5768 return x;
5769
5770 case USE:
5771 /* X is a (use (mem ..)) that was made from a bit-field extraction that
5772 spanned the boundary of the MEM. If we are now masking so it is
5773 within that boundary, we don't need the USE any more. */
5774 if (! BITS_BIG_ENDIAN
5775 && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5776 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
5777 break;
5778
5779 case SIGN_EXTEND:
5780 case ZERO_EXTEND:
5781 case ZERO_EXTRACT:
5782 case SIGN_EXTRACT:
5783 x = expand_compound_operation (x);
5784 if (GET_CODE (x) != code)
5785 return force_to_mode (x, mode, mask, reg, next_select);
5786 break;
5787
5788 case REG:
5789 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5790 || rtx_equal_p (reg, get_last_value (x))))
5791 x = reg;
5792 break;
5793
5794 case SUBREG:
5795 if (subreg_lowpart_p (x)
5796 /* We can ignore the effect of this SUBREG if it narrows the mode or
5797 if the constant masks to zero all the bits the mode doesn't
5798 have. */
5799 && ((GET_MODE_SIZE (GET_MODE (x))
5800 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
5801 || (0 == (mask
5802 & GET_MODE_MASK (GET_MODE (x))
5803 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
5804 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
5805 break;
5806
5807 case AND:
5808 /* If this is an AND with a constant, convert it into an AND
5809 whose constant is the AND of that constant with MASK. If it
5810 remains an AND of MASK, delete it since it is redundant. */
5811
5812 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
5813 {
5814 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
5815 mask & INTVAL (XEXP (x, 1)));
5816
5817 /* If X is still an AND, see if it is an AND with a mask that
5818 is just some low-order bits. If so, and it is MASK, we don't
5819 need it. */
5820
5821 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5822 && INTVAL (XEXP (x, 1)) == mask)
5823 x = XEXP (x, 0);
5824
5825 /* If it remains an AND, try making another AND with the bits
5826 in the mode mask that aren't in MASK turned on. If the
5827 constant in the AND is wide enough, this might make a
5828 cheaper constant. */
5829
5830 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5831 && GET_MODE_MASK (GET_MODE (x)) != mask
5832 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
5833 {
5834 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
5835 | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
5836 int width = GET_MODE_BITSIZE (GET_MODE (x));
5837 rtx y;
5838
5839 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
5840 number, sign extend it. */
5841 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
5842 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5843 cval |= (HOST_WIDE_INT) -1 << width;
5844
5845 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
5846 if (rtx_cost (y, SET) < rtx_cost (x, SET))
5847 x = y;
5848 }
5849
5850 break;
5851 }
5852
5853 goto binop;
5854
5855 case PLUS:
5856 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5857 low-order bits (as in an alignment operation) and FOO is already
5858 aligned to that boundary, mask C1 to that boundary as well.
5859 This may eliminate that PLUS and, later, the AND. */
5860
5861 {
5862 int width = GET_MODE_BITSIZE (mode);
5863 unsigned HOST_WIDE_INT smask = mask;
5864
5865 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
5866 number, sign extend it. */
5867
5868 if (width < HOST_BITS_PER_WIDE_INT
5869 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5870 smask |= (HOST_WIDE_INT) -1 << width;
5871
5872 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6277 return x;
6278
6279 case USE:
6280 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6281 spanned the boundary of the MEM. If we are now masking so it is
6282 within that boundary, we don't need the USE any more. */
6283 if (! BITS_BIG_ENDIAN
6284 && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6285 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6286 break;
6287
6288 case SIGN_EXTEND:
6289 case ZERO_EXTEND:
6290 case ZERO_EXTRACT:
6291 case SIGN_EXTRACT:
6292 x = expand_compound_operation (x);
6293 if (GET_CODE (x) != code)
6294 return force_to_mode (x, mode, mask, reg, next_select);
6295 break;
6296
6297 case REG:
6298 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6299 || rtx_equal_p (reg, get_last_value (x))))
6300 x = reg;
6301 break;
6302
6303 case SUBREG:
6304 if (subreg_lowpart_p (x)
6305 /* We can ignore the effect of this SUBREG if it narrows the mode or
6306 if the constant masks to zero all the bits the mode doesn't
6307 have. */
6308 && ((GET_MODE_SIZE (GET_MODE (x))
6309 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6310 || (0 == (mask
6311 & GET_MODE_MASK (GET_MODE (x))
6312 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
6313 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
6314 break;
6315
6316 case AND:
6317 /* If this is an AND with a constant, convert it into an AND
6318 whose constant is the AND of that constant with MASK. If it
6319 remains an AND of MASK, delete it since it is redundant. */
6320
6321 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6322 {
6323 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6324 mask & INTVAL (XEXP (x, 1)));
6325
6326 /* If X is still an AND, see if it is an AND with a mask that
6327 is just some low-order bits. If so, and it is MASK, we don't
6328 need it. */
6329
6330 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6331 && INTVAL (XEXP (x, 1)) == mask)
6332 x = XEXP (x, 0);
6333
6334 /* If it remains an AND, try making another AND with the bits
6335 in the mode mask that aren't in MASK turned on. If the
6336 constant in the AND is wide enough, this might make a
6337 cheaper constant. */
6338
6339 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6340 && GET_MODE_MASK (GET_MODE (x)) != mask
6341 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
6342 {
6343 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6344 | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
6345 int width = GET_MODE_BITSIZE (GET_MODE (x));
6346 rtx y;
6347
6348 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6349 number, sign extend it. */
6350 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6351 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6352 cval |= (HOST_WIDE_INT) -1 << width;
6353
6354 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6355 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6356 x = y;
6357 }
6358
6359 break;
6360 }
6361
6362 goto binop;
6363
6364 case PLUS:
6365 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6366 low-order bits (as in an alignment operation) and FOO is already
6367 aligned to that boundary, mask C1 to that boundary as well.
6368 This may eliminate that PLUS and, later, the AND. */
6369
6370 {
6371 int width = GET_MODE_BITSIZE (mode);
6372 unsigned HOST_WIDE_INT smask = mask;
6373
6374 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6375 number, sign extend it. */
6376
6377 if (width < HOST_BITS_PER_WIDE_INT
6378 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6379 smask |= (HOST_WIDE_INT) -1 << width;
6380
6381 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5873 && exact_log2 (- smask) >= 0
5874 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
5875 && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
5876 return force_to_mode (plus_constant (XEXP (x, 0),
5877 INTVAL (XEXP (x, 1)) & mask),
5878 mode, mask, reg, next_select);
6382 && exact_log2 (- smask) >= 0)
6383 {
6384#ifdef STACK_BIAS
6385 if (STACK_BIAS
6386 && (XEXP (x, 0) == stack_pointer_rtx
6387 || XEXP (x, 0) == frame_pointer_rtx))
6388 {
6389 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6390 unsigned HOST_WIDE_INT sp_mask = GET_MODE_MASK (mode);
6391
6392 sp_mask &= ~ (sp_alignment - 1);
6393 if ((sp_mask & ~ mask) == 0
6394 && ((INTVAL (XEXP (x, 1)) - STACK_BIAS) & ~ mask) != 0)
6395 return force_to_mode (plus_constant (XEXP (x, 0),
6396 ((INTVAL (XEXP (x, 1)) -
6397 STACK_BIAS) & mask)
6398 + STACK_BIAS),
6399 mode, mask, reg, next_select);
6400 }
6401#endif
6402 if ((nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
6403 && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
6404 return force_to_mode (plus_constant (XEXP (x, 0),
6405 INTVAL (XEXP (x, 1)) & mask),
6406 mode, mask, reg, next_select);
6407 }
5879 }
5880
6408 }
6409
5881 /* ... fall through ... */
6410 /* ... fall through ... */
5882
5883 case MINUS:
5884 case MULT:
5885 /* For PLUS, MINUS and MULT, we need any bits less significant than the
5886 most significant bit in MASK since carries from those bits will
5887 affect the bits we are interested in. */
5888 mask = fuller_mask;
5889 goto binop;
5890
5891 case IOR:
5892 case XOR:
5893 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5894 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5895 operation which may be a bitfield extraction. Ensure that the
5896 constant we form is not wider than the mode of X. */
5897
5898 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5899 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5900 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5901 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5902 && GET_CODE (XEXP (x, 1)) == CONST_INT
5903 && ((INTVAL (XEXP (XEXP (x, 0), 1))
5904 + floor_log2 (INTVAL (XEXP (x, 1))))
5905 < GET_MODE_BITSIZE (GET_MODE (x)))
5906 && (INTVAL (XEXP (x, 1))
6411
6412 case MINUS:
6413 case MULT:
6414 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6415 most significant bit in MASK since carries from those bits will
6416 affect the bits we are interested in. */
6417 mask = fuller_mask;
6418 goto binop;
6419
6420 case IOR:
6421 case XOR:
6422 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6423 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6424 operation which may be a bitfield extraction. Ensure that the
6425 constant we form is not wider than the mode of X. */
6426
6427 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6428 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6429 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6430 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6431 && GET_CODE (XEXP (x, 1)) == CONST_INT
6432 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6433 + floor_log2 (INTVAL (XEXP (x, 1))))
6434 < GET_MODE_BITSIZE (GET_MODE (x)))
6435 && (INTVAL (XEXP (x, 1))
5907 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x)) == 0))
6436 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
5908 {
5909 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
5910 << INTVAL (XEXP (XEXP (x, 0), 1)));
5911 temp = gen_binary (GET_CODE (x), GET_MODE (x),
5912 XEXP (XEXP (x, 0), 0), temp);
6437 {
6438 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6439 << INTVAL (XEXP (XEXP (x, 0), 1)));
6440 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6441 XEXP (XEXP (x, 0), 0), temp);
5913 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (x, 1));
6442 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6443 XEXP (XEXP (x, 0), 1));
5914 return force_to_mode (x, mode, mask, reg, next_select);
5915 }
5916
5917 binop:
5918 /* For most binary operations, just propagate into the operation and
5919 change the mode if we have an operation of that mode. */
5920
5921 op0 = gen_lowpart_for_combine (op_mode,
5922 force_to_mode (XEXP (x, 0), mode, mask,
5923 reg, next_select));
5924 op1 = gen_lowpart_for_combine (op_mode,
5925 force_to_mode (XEXP (x, 1), mode, mask,
5926 reg, next_select));
5927
5928 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
5929 MASK since OP1 might have been sign-extended but we never want
5930 to turn on extra bits, since combine might have previously relied
5931 on them being off. */
5932 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
5933 && (INTVAL (op1) & mask) != 0)
5934 op1 = GEN_INT (INTVAL (op1) & mask);
5935
5936 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
5937 x = gen_binary (code, op_mode, op0, op1);
5938 break;
5939
5940 case ASHIFT:
5941 /* For left shifts, do the same, but just for the first operand.
5942 However, we cannot do anything with shifts where we cannot
5943 guarantee that the counts are smaller than the size of the mode
5944 because such a count will have a different meaning in a
5945 wider mode. */
5946
5947 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
5948 && INTVAL (XEXP (x, 1)) >= 0
5949 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
5950 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
5951 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
5952 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
5953 break;
5954
5955 /* If the shift count is a constant and we can do arithmetic in
5956 the mode of the shift, refine which bits we need. Otherwise, use the
5957 conservative form of the mask. */
5958 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5959 && INTVAL (XEXP (x, 1)) >= 0
5960 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
5961 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
5962 mask >>= INTVAL (XEXP (x, 1));
5963 else
5964 mask = fuller_mask;
5965
5966 op0 = gen_lowpart_for_combine (op_mode,
5967 force_to_mode (XEXP (x, 0), op_mode,
5968 mask, reg, next_select));
5969
5970 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
5971 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
5972 break;
5973
5974 case LSHIFTRT:
5975 /* Here we can only do something if the shift count is a constant,
5976 this shift constant is valid for the host, and we can do arithmetic
5977 in OP_MODE. */
5978
5979 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5980 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5981 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
5982 {
5983 rtx inner = XEXP (x, 0);
5984
5985 /* Select the mask of the bits we need for the shift operand. */
5986 mask <<= INTVAL (XEXP (x, 1));
5987
5988 /* We can only change the mode of the shift if we can do arithmetic
5989 in the mode of the shift and MASK is no wider than the width of
5990 OP_MODE. */
5991 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
5992 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
5993 op_mode = GET_MODE (x);
5994
5995 inner = force_to_mode (inner, op_mode, mask, reg, next_select);
5996
5997 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
5998 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
5999 }
6000
6001 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6002 shift and AND produces only copies of the sign bit (C2 is one less
6003 than a power of two), we can do this with just a shift. */
6004
6005 if (GET_CODE (x) == LSHIFTRT
6006 && GET_CODE (XEXP (x, 1)) == CONST_INT
6007 && ((INTVAL (XEXP (x, 1))
6008 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6009 >= GET_MODE_BITSIZE (GET_MODE (x)))
6010 && exact_log2 (mask + 1) >= 0
6011 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6012 >= exact_log2 (mask + 1)))
6013 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6014 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6015 - exact_log2 (mask + 1)));
6016 break;
6017
6018 case ASHIFTRT:
6019 /* If we are just looking for the sign bit, we don't need this shift at
6020 all, even if it has a variable count. */
6021 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6022 && (mask == ((HOST_WIDE_INT) 1
6023 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
6024 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6025
6026 /* If this is a shift by a constant, get a mask that contains those bits
6027 that are not copies of the sign bit. We then have two cases: If
6028 MASK only includes those bits, this can be a logical shift, which may
6029 allow simplifications. If MASK is a single-bit field not within
6030 those bits, we are requesting a copy of the sign bit and hence can
6031 shift the sign bit to the appropriate location. */
6032
6033 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
6034 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6035 {
6036 int i = -1;
6037
6038 /* If the considered data is wider then HOST_WIDE_INT, we can't
6039 represent a mask for all its bits in a single scalar.
6040 But we only care about the lower bits, so calculate these. */
6041
6042 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
6043 {
6444 return force_to_mode (x, mode, mask, reg, next_select);
6445 }
6446
6447 binop:
6448 /* For most binary operations, just propagate into the operation and
6449 change the mode if we have an operation of that mode. */
6450
6451 op0 = gen_lowpart_for_combine (op_mode,
6452 force_to_mode (XEXP (x, 0), mode, mask,
6453 reg, next_select));
6454 op1 = gen_lowpart_for_combine (op_mode,
6455 force_to_mode (XEXP (x, 1), mode, mask,
6456 reg, next_select));
6457
6458 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
6459 MASK since OP1 might have been sign-extended but we never want
6460 to turn on extra bits, since combine might have previously relied
6461 on them being off. */
6462 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
6463 && (INTVAL (op1) & mask) != 0)
6464 op1 = GEN_INT (INTVAL (op1) & mask);
6465
6466 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6467 x = gen_binary (code, op_mode, op0, op1);
6468 break;
6469
6470 case ASHIFT:
6471 /* For left shifts, do the same, but just for the first operand.
6472 However, we cannot do anything with shifts where we cannot
6473 guarantee that the counts are smaller than the size of the mode
6474 because such a count will have a different meaning in a
6475 wider mode. */
6476
6477 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6478 && INTVAL (XEXP (x, 1)) >= 0
6479 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6480 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6481 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
6482 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
6483 break;
6484
6485 /* If the shift count is a constant and we can do arithmetic in
6486 the mode of the shift, refine which bits we need. Otherwise, use the
6487 conservative form of the mask. */
6488 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6489 && INTVAL (XEXP (x, 1)) >= 0
6490 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6491 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6492 mask >>= INTVAL (XEXP (x, 1));
6493 else
6494 mask = fuller_mask;
6495
6496 op0 = gen_lowpart_for_combine (op_mode,
6497 force_to_mode (XEXP (x, 0), op_mode,
6498 mask, reg, next_select));
6499
6500 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6501 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
6502 break;
6503
6504 case LSHIFTRT:
6505 /* Here we can only do something if the shift count is a constant,
6506 this shift constant is valid for the host, and we can do arithmetic
6507 in OP_MODE. */
6508
6509 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6510 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6511 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6512 {
6513 rtx inner = XEXP (x, 0);
6514
6515 /* Select the mask of the bits we need for the shift operand. */
6516 mask <<= INTVAL (XEXP (x, 1));
6517
6518 /* We can only change the mode of the shift if we can do arithmetic
6519 in the mode of the shift and MASK is no wider than the width of
6520 OP_MODE. */
6521 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
6522 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
6523 op_mode = GET_MODE (x);
6524
6525 inner = force_to_mode (inner, op_mode, mask, reg, next_select);
6526
6527 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6528 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
6529 }
6530
6531 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6532 shift and AND produces only copies of the sign bit (C2 is one less
6533 than a power of two), we can do this with just a shift. */
6534
6535 if (GET_CODE (x) == LSHIFTRT
6536 && GET_CODE (XEXP (x, 1)) == CONST_INT
6537 && ((INTVAL (XEXP (x, 1))
6538 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6539 >= GET_MODE_BITSIZE (GET_MODE (x)))
6540 && exact_log2 (mask + 1) >= 0
6541 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6542 >= exact_log2 (mask + 1)))
6543 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6544 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6545 - exact_log2 (mask + 1)));
6546 break;
6547
6548 case ASHIFTRT:
6549 /* If we are just looking for the sign bit, we don't need this shift at
6550 all, even if it has a variable count. */
6551 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6552 && (mask == ((HOST_WIDE_INT) 1
6553 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
6554 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6555
6556 /* If this is a shift by a constant, get a mask that contains those bits
6557 that are not copies of the sign bit. We then have two cases: If
6558 MASK only includes those bits, this can be a logical shift, which may
6559 allow simplifications. If MASK is a single-bit field not within
6560 those bits, we are requesting a copy of the sign bit and hence can
6561 shift the sign bit to the appropriate location. */
6562
6563 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
6564 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6565 {
6566 int i = -1;
6567
6568 /* If the considered data is wider then HOST_WIDE_INT, we can't
6569 represent a mask for all its bits in a single scalar.
6570 But we only care about the lower bits, so calculate these. */
6571
6572 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
6573 {
6044 nonzero = ~(HOST_WIDE_INT)0;
6574 nonzero = ~ (HOST_WIDE_INT) 0;
6045
6046 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6047 is the number of bits a full-width mask would have set.
6048 We need only shift if these are fewer than nonzero can
6049 hold. If not, we must keep all bits set in nonzero. */
6050
6051 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6052 < HOST_BITS_PER_WIDE_INT)
6053 nonzero >>= INTVAL (XEXP (x, 1))
6054 + HOST_BITS_PER_WIDE_INT
6055 - GET_MODE_BITSIZE (GET_MODE (x)) ;
6056 }
6057 else
6058 {
6059 nonzero = GET_MODE_MASK (GET_MODE (x));
6060 nonzero >>= INTVAL (XEXP (x, 1));
6061 }
6062
6063 if ((mask & ~ nonzero) == 0
6064 || (i = exact_log2 (mask)) >= 0)
6065 {
6066 x = simplify_shift_const
6067 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6068 i < 0 ? INTVAL (XEXP (x, 1))
6069 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
6070
6071 if (GET_CODE (x) != ASHIFTRT)
6072 return force_to_mode (x, mode, mask, reg, next_select);
6073 }
6074 }
6075
6076 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
6077 even if the shift count isn't a constant. */
6078 if (mask == 1)
6079 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
6080
6081 /* If this is a sign-extension operation that just affects bits
6082 we don't care about, remove it. Be sure the call above returned
6083 something that is still a shift. */
6084
6085 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
6086 && GET_CODE (XEXP (x, 1)) == CONST_INT
6087 && INTVAL (XEXP (x, 1)) >= 0
6088 && (INTVAL (XEXP (x, 1))
6089 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
6090 && GET_CODE (XEXP (x, 0)) == ASHIFT
6091 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6092 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
6093 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
6094 reg, next_select);
6095
6096 break;
6097
6098 case ROTATE:
6099 case ROTATERT:
6100 /* If the shift count is constant and we can do computations
6101 in the mode of X, compute where the bits we care about are.
6102 Otherwise, we can't do anything. Don't change the mode of
6103 the shift or propagate MODE into the shift, though. */
6104 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6105 && INTVAL (XEXP (x, 1)) >= 0)
6106 {
6107 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
6108 GET_MODE (x), GEN_INT (mask),
6109 XEXP (x, 1));
6110 if (temp && GET_CODE(temp) == CONST_INT)
6111 SUBST (XEXP (x, 0),
6112 force_to_mode (XEXP (x, 0), GET_MODE (x),
6113 INTVAL (temp), reg, next_select));
6114 }
6115 break;
6116
6117 case NEG:
6118 /* If we just want the low-order bit, the NEG isn't needed since it
6119 won't change the low-order bit. */
6120 if (mask == 1)
6121 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6122
6123 /* We need any bits less significant than the most significant bit in
6124 MASK since carries from those bits will affect the bits we are
6125 interested in. */
6126 mask = fuller_mask;
6127 goto unop;
6128
6129 case NOT:
6130 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6131 same as the XOR case above. Ensure that the constant we form is not
6132 wider than the mode of X. */
6133
6134 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6135 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6136 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6137 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6138 < GET_MODE_BITSIZE (GET_MODE (x)))
6139 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6140 {
6141 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6142 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6143 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6144
6145 return force_to_mode (x, mode, mask, reg, next_select);
6146 }
6147
6575
6576 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6577 is the number of bits a full-width mask would have set.
6578 We need only shift if these are fewer than nonzero can
6579 hold. If not, we must keep all bits set in nonzero. */
6580
6581 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6582 < HOST_BITS_PER_WIDE_INT)
6583 nonzero >>= INTVAL (XEXP (x, 1))
6584 + HOST_BITS_PER_WIDE_INT
6585 - GET_MODE_BITSIZE (GET_MODE (x)) ;
6586 }
6587 else
6588 {
6589 nonzero = GET_MODE_MASK (GET_MODE (x));
6590 nonzero >>= INTVAL (XEXP (x, 1));
6591 }
6592
6593 if ((mask & ~ nonzero) == 0
6594 || (i = exact_log2 (mask)) >= 0)
6595 {
6596 x = simplify_shift_const
6597 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6598 i < 0 ? INTVAL (XEXP (x, 1))
6599 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
6600
6601 if (GET_CODE (x) != ASHIFTRT)
6602 return force_to_mode (x, mode, mask, reg, next_select);
6603 }
6604 }
6605
6606 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
6607 even if the shift count isn't a constant. */
6608 if (mask == 1)
6609 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
6610
6611 /* If this is a sign-extension operation that just affects bits
6612 we don't care about, remove it. Be sure the call above returned
6613 something that is still a shift. */
6614
6615 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
6616 && GET_CODE (XEXP (x, 1)) == CONST_INT
6617 && INTVAL (XEXP (x, 1)) >= 0
6618 && (INTVAL (XEXP (x, 1))
6619 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
6620 && GET_CODE (XEXP (x, 0)) == ASHIFT
6621 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6622 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
6623 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
6624 reg, next_select);
6625
6626 break;
6627
6628 case ROTATE:
6629 case ROTATERT:
6630 /* If the shift count is constant and we can do computations
6631 in the mode of X, compute where the bits we care about are.
6632 Otherwise, we can't do anything. Don't change the mode of
6633 the shift or propagate MODE into the shift, though. */
6634 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6635 && INTVAL (XEXP (x, 1)) >= 0)
6636 {
6637 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
6638 GET_MODE (x), GEN_INT (mask),
6639 XEXP (x, 1));
6640 if (temp && GET_CODE(temp) == CONST_INT)
6641 SUBST (XEXP (x, 0),
6642 force_to_mode (XEXP (x, 0), GET_MODE (x),
6643 INTVAL (temp), reg, next_select));
6644 }
6645 break;
6646
6647 case NEG:
6648 /* If we just want the low-order bit, the NEG isn't needed since it
6649 won't change the low-order bit. */
6650 if (mask == 1)
6651 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6652
6653 /* We need any bits less significant than the most significant bit in
6654 MASK since carries from those bits will affect the bits we are
6655 interested in. */
6656 mask = fuller_mask;
6657 goto unop;
6658
6659 case NOT:
6660 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6661 same as the XOR case above. Ensure that the constant we form is not
6662 wider than the mode of X. */
6663
6664 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6665 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6666 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6667 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6668 < GET_MODE_BITSIZE (GET_MODE (x)))
6669 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6670 {
6671 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6672 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6673 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6674
6675 return force_to_mode (x, mode, mask, reg, next_select);
6676 }
6677
6678 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
6679 use the full mask inside the NOT. */
6680 mask = fuller_mask;
6681
6148 unop:
6149 op0 = gen_lowpart_for_combine (op_mode,
6150 force_to_mode (XEXP (x, 0), mode, mask,
6151 reg, next_select));
6152 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6153 x = gen_unary (code, op_mode, op_mode, op0);
6154 break;
6155
6156 case NE:
6157 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
6682 unop:
6683 op0 = gen_lowpart_for_combine (op_mode,
6684 force_to_mode (XEXP (x, 0), mode, mask,
6685 reg, next_select));
6686 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6687 x = gen_unary (code, op_mode, op_mode, op0);
6688 break;
6689
6690 case NE:
6691 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
6158 in STORE_FLAG_VALUE and FOO has no bits that might be nonzero not
6159 in CONST. */
6160 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 0) == const0_rtx
6161 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0)
6692 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
6693 which is equal to STORE_FLAG_VALUE. */
6694 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
6695 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
6696 && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
6162 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6163
6164 break;
6165
6166 case IF_THEN_ELSE:
6167 /* We have no way of knowing if the IF_THEN_ELSE can itself be
6168 written in a narrower mode. We play it safe and do not do so. */
6169
6170 SUBST (XEXP (x, 1),
6171 gen_lowpart_for_combine (GET_MODE (x),
6172 force_to_mode (XEXP (x, 1), mode,
6173 mask, reg, next_select)));
6174 SUBST (XEXP (x, 2),
6175 gen_lowpart_for_combine (GET_MODE (x),
6176 force_to_mode (XEXP (x, 2), mode,
6177 mask, reg,next_select)));
6178 break;
6697 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6698
6699 break;
6700
6701 case IF_THEN_ELSE:
6702 /* We have no way of knowing if the IF_THEN_ELSE can itself be
6703 written in a narrower mode. We play it safe and do not do so. */
6704
6705 SUBST (XEXP (x, 1),
6706 gen_lowpart_for_combine (GET_MODE (x),
6707 force_to_mode (XEXP (x, 1), mode,
6708 mask, reg, next_select)));
6709 SUBST (XEXP (x, 2),
6710 gen_lowpart_for_combine (GET_MODE (x),
6711 force_to_mode (XEXP (x, 2), mode,
6712 mask, reg,next_select)));
6713 break;
6714
6715 default:
6716 break;
6179 }
6180
6181 /* Ensure we return a value of the proper mode. */
6182 return gen_lowpart_for_combine (mode, x);
6183}
6184
6185/* Return nonzero if X is an expression that has one of two values depending on
6186 whether some other value is zero or nonzero. In that case, we return the
6187 value that is being tested, *PTRUE is set to the value if the rtx being
6188 returned has a nonzero value, and *PFALSE is set to the other alternative.
6189
6190 If we return zero, we set *PTRUE and *PFALSE to X. */
6191
6192static rtx
6193if_then_else_cond (x, ptrue, pfalse)
6194 rtx x;
6195 rtx *ptrue, *pfalse;
6196{
6197 enum machine_mode mode = GET_MODE (x);
6198 enum rtx_code code = GET_CODE (x);
6199 int size = GET_MODE_BITSIZE (mode);
6200 rtx cond0, cond1, true0, true1, false0, false1;
6201 unsigned HOST_WIDE_INT nz;
6202
6203 /* If this is a unary operation whose operand has one of two values, apply
6204 our opcode to compute those values. */
6205 if (GET_RTX_CLASS (code) == '1'
6206 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6207 {
6208 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6209 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
6210 return cond0;
6211 }
6212
6213 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
6214 make can't possibly match and would suppress other optimizations. */
6215 else if (code == COMPARE)
6216 ;
6217
6218 /* If this is a binary operation, see if either side has only one of two
6219 values. If either one does or if both do and they are conditional on
6220 the same value, compute the new true and false values. */
6221 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6222 || GET_RTX_CLASS (code) == '<')
6223 {
6224 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6225 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6226
6227 if ((cond0 != 0 || cond1 != 0)
6228 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6229 {
6717 }
6718
6719 /* Ensure we return a value of the proper mode. */
6720 return gen_lowpart_for_combine (mode, x);
6721}
6722
6723/* Return nonzero if X is an expression that has one of two values depending on
6724 whether some other value is zero or nonzero. In that case, we return the
6725 value that is being tested, *PTRUE is set to the value if the rtx being
6726 returned has a nonzero value, and *PFALSE is set to the other alternative.
6727
6728 If we return zero, we set *PTRUE and *PFALSE to X. */
6729
6730static rtx
6731if_then_else_cond (x, ptrue, pfalse)
6732 rtx x;
6733 rtx *ptrue, *pfalse;
6734{
6735 enum machine_mode mode = GET_MODE (x);
6736 enum rtx_code code = GET_CODE (x);
6737 int size = GET_MODE_BITSIZE (mode);
6738 rtx cond0, cond1, true0, true1, false0, false1;
6739 unsigned HOST_WIDE_INT nz;
6740
6741 /* If this is a unary operation whose operand has one of two values, apply
6742 our opcode to compute those values. */
6743 if (GET_RTX_CLASS (code) == '1'
6744 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6745 {
6746 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6747 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
6748 return cond0;
6749 }
6750
6751 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
6752 make can't possibly match and would suppress other optimizations. */
6753 else if (code == COMPARE)
6754 ;
6755
6756 /* If this is a binary operation, see if either side has only one of two
6757 values. If either one does or if both do and they are conditional on
6758 the same value, compute the new true and false values. */
6759 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6760 || GET_RTX_CLASS (code) == '<')
6761 {
6762 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6763 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6764
6765 if ((cond0 != 0 || cond1 != 0)
6766 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6767 {
6768 /* If if_then_else_cond returned zero, then true/false are the
6769 same rtl. We must copy one of them to prevent invalid rtl
6770 sharing. */
6771 if (cond0 == 0)
6772 true0 = copy_rtx (true0);
6773 else if (cond1 == 0)
6774 true1 = copy_rtx (true1);
6775
6230 *ptrue = gen_binary (code, mode, true0, true1);
6231 *pfalse = gen_binary (code, mode, false0, false1);
6232 return cond0 ? cond0 : cond1;
6233 }
6234
6776 *ptrue = gen_binary (code, mode, true0, true1);
6777 *pfalse = gen_binary (code, mode, false0, false1);
6778 return cond0 ? cond0 : cond1;
6779 }
6780
6235#if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
6236
6237 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
6781 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
6238 operands is zero when the other is non-zero, and vice-versa. */
6782 operands is zero when the other is non-zero, and vice-versa,
6783 and STORE_FLAG_VALUE is 1 or -1. */
6239
6784
6240 if ((code == PLUS || code == IOR || code == XOR || code == MINUS
6785 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6786 && (code == PLUS || code == IOR || code == XOR || code == MINUS
6241 || code == UMAX)
6242 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6243 {
6244 rtx op0 = XEXP (XEXP (x, 0), 1);
6245 rtx op1 = XEXP (XEXP (x, 1), 1);
6246
6247 cond0 = XEXP (XEXP (x, 0), 0);
6248 cond1 = XEXP (XEXP (x, 1), 0);
6249
6250 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6251 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6252 && reversible_comparison_p (cond1)
6253 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6254 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6255 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6256 || ((swap_condition (GET_CODE (cond0))
6257 == reverse_condition (GET_CODE (cond1)))
6258 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6259 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6260 && ! side_effects_p (x))
6261 {
6262 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6263 *pfalse = gen_binary (MULT, mode,
6264 (code == MINUS
6265 ? gen_unary (NEG, mode, mode, op1) : op1),
6266 const_true_rtx);
6267 return cond0;
6268 }
6269 }
6270
6271 /* Similarly for MULT, AND and UMIN, execpt that for these the result
6272 is always zero. */
6787 || code == UMAX)
6788 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6789 {
6790 rtx op0 = XEXP (XEXP (x, 0), 1);
6791 rtx op1 = XEXP (XEXP (x, 1), 1);
6792
6793 cond0 = XEXP (XEXP (x, 0), 0);
6794 cond1 = XEXP (XEXP (x, 1), 0);
6795
6796 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6797 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6798 && reversible_comparison_p (cond1)
6799 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6800 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6801 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6802 || ((swap_condition (GET_CODE (cond0))
6803 == reverse_condition (GET_CODE (cond1)))
6804 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6805 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6806 && ! side_effects_p (x))
6807 {
6808 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6809 *pfalse = gen_binary (MULT, mode,
6810 (code == MINUS
6811 ? gen_unary (NEG, mode, mode, op1) : op1),
6812 const_true_rtx);
6813 return cond0;
6814 }
6815 }
6816
6817 /* Similarly for MULT, AND and UMIN, execpt that for these the result
6818 is always zero. */
6273 if ((code == MULT || code == AND || code == UMIN)
6819 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6820 && (code == MULT || code == AND || code == UMIN)
6274 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6275 {
6276 cond0 = XEXP (XEXP (x, 0), 0);
6277 cond1 = XEXP (XEXP (x, 1), 0);
6278
6279 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6280 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6281 && reversible_comparison_p (cond1)
6282 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6283 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6284 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6285 || ((swap_condition (GET_CODE (cond0))
6286 == reverse_condition (GET_CODE (cond1)))
6287 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6288 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6289 && ! side_effects_p (x))
6290 {
6291 *ptrue = *pfalse = const0_rtx;
6292 return cond0;
6293 }
6294 }
6821 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6822 {
6823 cond0 = XEXP (XEXP (x, 0), 0);
6824 cond1 = XEXP (XEXP (x, 1), 0);
6825
6826 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6827 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6828 && reversible_comparison_p (cond1)
6829 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6830 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6831 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6832 || ((swap_condition (GET_CODE (cond0))
6833 == reverse_condition (GET_CODE (cond1)))
6834 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6835 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6836 && ! side_effects_p (x))
6837 {
6838 *ptrue = *pfalse = const0_rtx;
6839 return cond0;
6840 }
6841 }
6295#endif
6296 }
6297
6298 else if (code == IF_THEN_ELSE)
6299 {
6300 /* If we have IF_THEN_ELSE already, extract the condition and
6301 canonicalize it if it is NE or EQ. */
6302 cond0 = XEXP (x, 0);
6303 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6304 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6305 return XEXP (cond0, 0);
6306 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6307 {
6308 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6309 return XEXP (cond0, 0);
6310 }
6311 else
6312 return cond0;
6313 }
6314
6315 /* If X is a normal SUBREG with both inner and outer modes integral,
6316 we can narrow both the true and false values of the inner expression,
6317 if there is a condition. */
6318 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6319 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6320 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6321 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6322 &true0, &false0)))
6323 {
6324 *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6325 *pfalse
6326 = force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6327
6328 return cond0;
6329 }
6330
6331 /* If X is a constant, this isn't special and will cause confusions
6332 if we treat it as such. Likewise if it is equivalent to a constant. */
6333 else if (CONSTANT_P (x)
6334 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6335 ;
6336
6337 /* If X is known to be either 0 or -1, those are the true and
6338 false values when testing X. */
6339 else if (num_sign_bit_copies (x, mode) == size)
6340 {
6341 *ptrue = constm1_rtx, *pfalse = const0_rtx;
6342 return x;
6343 }
6344
6345 /* Likewise for 0 or a single bit. */
6346 else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6347 {
6348 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6349 return x;
6350 }
6351
6352 /* Otherwise fail; show no condition with true and false values the same. */
6353 *ptrue = *pfalse = x;
6354 return 0;
6355}
6356
6357/* Return the value of expression X given the fact that condition COND
6358 is known to be true when applied to REG as its first operand and VAL
6359 as its second. X is known to not be shared and so can be modified in
6360 place.
6361
6362 We only handle the simplest cases, and specifically those cases that
6363 arise with IF_THEN_ELSE expressions. */
6364
6365static rtx
6366known_cond (x, cond, reg, val)
6367 rtx x;
6368 enum rtx_code cond;
6369 rtx reg, val;
6370{
6371 enum rtx_code code = GET_CODE (x);
6372 rtx temp;
6373 char *fmt;
6374 int i, j;
6375
6376 if (side_effects_p (x))
6377 return x;
6378
6379 if (cond == EQ && rtx_equal_p (x, reg))
6380 return val;
6381
6382 /* If X is (abs REG) and we know something about REG's relationship
6383 with zero, we may be able to simplify this. */
6384
6385 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6386 switch (cond)
6387 {
6388 case GE: case GT: case EQ:
6389 return XEXP (x, 0);
6390 case LT: case LE:
6391 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
6392 XEXP (x, 0));
6842 }
6843
6844 else if (code == IF_THEN_ELSE)
6845 {
6846 /* If we have IF_THEN_ELSE already, extract the condition and
6847 canonicalize it if it is NE or EQ. */
6848 cond0 = XEXP (x, 0);
6849 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6850 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6851 return XEXP (cond0, 0);
6852 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6853 {
6854 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6855 return XEXP (cond0, 0);
6856 }
6857 else
6858 return cond0;
6859 }
6860
6861 /* If X is a normal SUBREG with both inner and outer modes integral,
6862 we can narrow both the true and false values of the inner expression,
6863 if there is a condition. */
6864 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6865 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6866 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6867 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6868 &true0, &false0)))
6869 {
6870 *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6871 *pfalse
6872 = force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6873
6874 return cond0;
6875 }
6876
6877 /* If X is a constant, this isn't special and will cause confusions
6878 if we treat it as such. Likewise if it is equivalent to a constant. */
6879 else if (CONSTANT_P (x)
6880 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6881 ;
6882
6883 /* If X is known to be either 0 or -1, those are the true and
6884 false values when testing X. */
6885 else if (num_sign_bit_copies (x, mode) == size)
6886 {
6887 *ptrue = constm1_rtx, *pfalse = const0_rtx;
6888 return x;
6889 }
6890
6891 /* Likewise for 0 or a single bit. */
6892 else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6893 {
6894 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6895 return x;
6896 }
6897
6898 /* Otherwise fail; show no condition with true and false values the same. */
6899 *ptrue = *pfalse = x;
6900 return 0;
6901}
6902
6903/* Return the value of expression X given the fact that condition COND
6904 is known to be true when applied to REG as its first operand and VAL
6905 as its second. X is known to not be shared and so can be modified in
6906 place.
6907
6908 We only handle the simplest cases, and specifically those cases that
6909 arise with IF_THEN_ELSE expressions. */
6910
6911static rtx
6912known_cond (x, cond, reg, val)
6913 rtx x;
6914 enum rtx_code cond;
6915 rtx reg, val;
6916{
6917 enum rtx_code code = GET_CODE (x);
6918 rtx temp;
6919 char *fmt;
6920 int i, j;
6921
6922 if (side_effects_p (x))
6923 return x;
6924
6925 if (cond == EQ && rtx_equal_p (x, reg))
6926 return val;
6927
6928 /* If X is (abs REG) and we know something about REG's relationship
6929 with zero, we may be able to simplify this. */
6930
6931 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6932 switch (cond)
6933 {
6934 case GE: case GT: case EQ:
6935 return XEXP (x, 0);
6936 case LT: case LE:
6937 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
6938 XEXP (x, 0));
6939 default:
6940 break;
6393 }
6394
6395 /* The only other cases we handle are MIN, MAX, and comparisons if the
6396 operands are the same as REG and VAL. */
6397
6398 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6399 {
6400 if (rtx_equal_p (XEXP (x, 0), val))
6401 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6402
6403 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6404 {
6405 if (GET_RTX_CLASS (code) == '<')
6406 return (comparison_dominates_p (cond, code) ? const_true_rtx
6407 : (comparison_dominates_p (cond,
6408 reverse_condition (code))
6409 ? const0_rtx : x));
6410
6411 else if (code == SMAX || code == SMIN
6412 || code == UMIN || code == UMAX)
6413 {
6414 int unsignedp = (code == UMIN || code == UMAX);
6415
6416 if (code == SMAX || code == UMAX)
6417 cond = reverse_condition (cond);
6418
6419 switch (cond)
6420 {
6421 case GE: case GT:
6422 return unsignedp ? x : XEXP (x, 1);
6423 case LE: case LT:
6424 return unsignedp ? x : XEXP (x, 0);
6425 case GEU: case GTU:
6426 return unsignedp ? XEXP (x, 1) : x;
6427 case LEU: case LTU:
6428 return unsignedp ? XEXP (x, 0) : x;
6941 }
6942
6943 /* The only other cases we handle are MIN, MAX, and comparisons if the
6944 operands are the same as REG and VAL. */
6945
6946 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6947 {
6948 if (rtx_equal_p (XEXP (x, 0), val))
6949 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6950
6951 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6952 {
6953 if (GET_RTX_CLASS (code) == '<')
6954 return (comparison_dominates_p (cond, code) ? const_true_rtx
6955 : (comparison_dominates_p (cond,
6956 reverse_condition (code))
6957 ? const0_rtx : x));
6958
6959 else if (code == SMAX || code == SMIN
6960 || code == UMIN || code == UMAX)
6961 {
6962 int unsignedp = (code == UMIN || code == UMAX);
6963
6964 if (code == SMAX || code == UMAX)
6965 cond = reverse_condition (cond);
6966
6967 switch (cond)
6968 {
6969 case GE: case GT:
6970 return unsignedp ? x : XEXP (x, 1);
6971 case LE: case LT:
6972 return unsignedp ? x : XEXP (x, 0);
6973 case GEU: case GTU:
6974 return unsignedp ? XEXP (x, 1) : x;
6975 case LEU: case LTU:
6976 return unsignedp ? XEXP (x, 0) : x;
6977 default:
6978 break;
6429 }
6430 }
6431 }
6432 }
6433
6434 fmt = GET_RTX_FORMAT (code);
6435 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6436 {
6437 if (fmt[i] == 'e')
6438 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6439 else if (fmt[i] == 'E')
6440 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6441 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6442 cond, reg, val));
6443 }
6444
6445 return x;
6446}
6447
6979 }
6980 }
6981 }
6982 }
6983
6984 fmt = GET_RTX_FORMAT (code);
6985 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6986 {
6987 if (fmt[i] == 'e')
6988 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6989 else if (fmt[i] == 'E')
6990 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6991 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6992 cond, reg, val));
6993 }
6994
6995 return x;
6996}
6997
6998/* See if X and Y are equal for the purposes of seeing if we can rewrite an
6999 assignment as a field assignment. */
7000
7001static int
7002rtx_equal_for_field_assignment_p (x, y)
7003 rtx x;
7004 rtx y;
7005{
7006 if (x == y || rtx_equal_p (x, y))
7007 return 1;
7008
7009 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7010 return 0;
7011
7012 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7013 Note that all SUBREGs of MEM are paradoxical; otherwise they
7014 would have been rewritten. */
7015 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7016 && GET_CODE (SUBREG_REG (y)) == MEM
7017 && rtx_equal_p (SUBREG_REG (y),
7018 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7019 return 1;
7020
7021 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7022 && GET_CODE (SUBREG_REG (x)) == MEM
7023 && rtx_equal_p (SUBREG_REG (x),
7024 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7025 return 1;
7026
7027 /* We used to see if get_last_value of X and Y were the same but that's
7028 not correct. In one direction, we'll cause the assignment to have
7029 the wrong destination and in the case, we'll import a register into this
7030 insn that might have already have been dead. So fail if none of the
7031 above cases are true. */
7032 return 0;
7033}
7034
6448/* See if X, a SET operation, can be rewritten as a bit-field assignment.
6449 Return that assignment if so.
6450
6451 We only handle the most common cases. */
6452
6453static rtx
6454make_field_assignment (x)
6455 rtx x;
6456{
6457 rtx dest = SET_DEST (x);
6458 rtx src = SET_SRC (x);
6459 rtx assign;
7035/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7036 Return that assignment if so.
7037
7038 We only handle the most common cases. */
7039
7040static rtx
7041make_field_assignment (x)
7042 rtx x;
7043{
7044 rtx dest = SET_DEST (x);
7045 rtx src = SET_SRC (x);
7046 rtx assign;
7047 rtx rhs, lhs;
6460 HOST_WIDE_INT c1;
6461 int pos, len;
6462 rtx other;
6463 enum machine_mode mode;
6464
6465 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
6466 a clear of a one-bit field. We will have changed it to
6467 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
6468 for a SUBREG. */
6469
6470 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
6471 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
6472 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7048 HOST_WIDE_INT c1;
7049 int pos, len;
7050 rtx other;
7051 enum machine_mode mode;
7052
7053 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7054 a clear of a one-bit field. We will have changed it to
7055 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
7056 for a SUBREG. */
7057
7058 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7059 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7060 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
6473 && (rtx_equal_p (dest, XEXP (src, 1))
6474 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6475 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
7061 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
6476 {
6477 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
6478 1, 1, 1, 0);
7062 {
7063 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7064 1, 1, 1, 0);
6479 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
7065 if (assign != 0)
7066 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7067 return x;
6480 }
6481
6482 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
6483 && subreg_lowpart_p (XEXP (src, 0))
6484 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
6485 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
6486 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
6487 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7068 }
7069
7070 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7071 && subreg_lowpart_p (XEXP (src, 0))
7072 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7073 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7074 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7075 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
6488 && (rtx_equal_p (dest, XEXP (src, 1))
6489 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6490 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
7076 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
6491 {
6492 assign = make_extraction (VOIDmode, dest, 0,
6493 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
6494 1, 1, 1, 0);
7077 {
7078 assign = make_extraction (VOIDmode, dest, 0,
7079 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7080 1, 1, 1, 0);
6495 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
7081 if (assign != 0)
7082 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7083 return x;
6496 }
6497
7084 }
7085
6498 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
7086 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
6499 one-bit field. */
6500 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
6501 && XEXP (XEXP (src, 0), 0) == const1_rtx
7087 one-bit field. */
7088 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7089 && XEXP (XEXP (src, 0), 0) == const1_rtx
6502 && (rtx_equal_p (dest, XEXP (src, 1))
6503 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6504 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
7090 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
6505 {
6506 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
6507 1, 1, 1, 0);
7091 {
7092 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7093 1, 1, 1, 0);
6508 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
7094 if (assign != 0)
7095 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
7096 return x;
6509 }
6510
6511 /* The other case we handle is assignments into a constant-position
7097 }
7098
7099 /* The other case we handle is assignments into a constant-position
6512 field. They look like (ior (and DEST C1) OTHER). If C1 represents
7100 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
6513 a mask that has all one bits except for a group of zero bits and
6514 OTHER is known to have zeros where C1 has ones, this is such an
6515 assignment. Compute the position and length from C1. Shift OTHER
6516 to the appropriate position, force it to the required mode, and
6517 make the extraction. Check for the AND in both operands. */
6518
7101 a mask that has all one bits except for a group of zero bits and
7102 OTHER is known to have zeros where C1 has ones, this is such an
7103 assignment. Compute the position and length from C1. Shift OTHER
7104 to the appropriate position, force it to the required mode, and
7105 make the extraction. Check for the AND in both operands. */
7106
6519 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
6520 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
6521 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
6522 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
6523 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
6524 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
6525 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
6526 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
6527 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
6528 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
6529 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
6530 dest)))
6531 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
7107 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
7108 return x;
7109
7110 rhs = expand_compound_operation (XEXP (src, 0));
7111 lhs = expand_compound_operation (XEXP (src, 1));
7112
7113 if (GET_CODE (rhs) == AND
7114 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7115 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7116 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7117 else if (GET_CODE (lhs) == AND
7118 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7119 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7120 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
6532 else
6533 return x;
6534
7121 else
7122 return x;
7123
6535 pos = get_pos_from_mask (c1 ^ GET_MODE_MASK (GET_MODE (dest)), &len);
7124 pos = get_pos_from_mask ((~ c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
6536 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
7125 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
6537 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
6538 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
7126 || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7127 || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
6539 return x;
6540
6541 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
7128 return x;
7129
7130 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
7131 if (assign == 0)
7132 return x;
6542
6543 /* The mode to use for the source is the mode of the assignment, or of
6544 what is inside a possible STRICT_LOW_PART. */
6545 mode = (GET_CODE (assign) == STRICT_LOW_PART
6546 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
6547
6548 /* Shift OTHER right POS places and make it the source, restricting it
6549 to the proper length and mode. */
6550
6551 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
6552 GET_MODE (src), other, pos),
6553 mode,
6554 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
6555 ? GET_MODE_MASK (mode)
6556 : ((HOST_WIDE_INT) 1 << len) - 1,
6557 dest, 0);
6558
6559 return gen_rtx_combine (SET, VOIDmode, assign, src);
6560}
6561
6562/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
6563 if so. */
6564
6565static rtx
6566apply_distributive_law (x)
6567 rtx x;
6568{
6569 enum rtx_code code = GET_CODE (x);
6570 rtx lhs, rhs, other;
6571 rtx tem;
6572 enum rtx_code inner_code;
6573
6574 /* Distributivity is not true for floating point.
6575 It can change the value. So don't do it.
6576 -- rms and moshier@world.std.com. */
6577 if (FLOAT_MODE_P (GET_MODE (x)))
6578 return x;
6579
6580 /* The outer operation can only be one of the following: */
6581 if (code != IOR && code != AND && code != XOR
6582 && code != PLUS && code != MINUS)
6583 return x;
6584
6585 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
6586
7133
7134 /* The mode to use for the source is the mode of the assignment, or of
7135 what is inside a possible STRICT_LOW_PART. */
7136 mode = (GET_CODE (assign) == STRICT_LOW_PART
7137 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
7138
7139 /* Shift OTHER right POS places and make it the source, restricting it
7140 to the proper length and mode. */
7141
7142 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7143 GET_MODE (src), other, pos),
7144 mode,
7145 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7146 ? GET_MODE_MASK (mode)
7147 : ((HOST_WIDE_INT) 1 << len) - 1,
7148 dest, 0);
7149
7150 return gen_rtx_combine (SET, VOIDmode, assign, src);
7151}
7152
7153/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7154 if so. */
7155
7156static rtx
7157apply_distributive_law (x)
7158 rtx x;
7159{
7160 enum rtx_code code = GET_CODE (x);
7161 rtx lhs, rhs, other;
7162 rtx tem;
7163 enum rtx_code inner_code;
7164
7165 /* Distributivity is not true for floating point.
7166 It can change the value. So don't do it.
7167 -- rms and moshier@world.std.com. */
7168 if (FLOAT_MODE_P (GET_MODE (x)))
7169 return x;
7170
7171 /* The outer operation can only be one of the following: */
7172 if (code != IOR && code != AND && code != XOR
7173 && code != PLUS && code != MINUS)
7174 return x;
7175
7176 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7177
6587 /* If either operand is a primitive we can't do anything, so get out fast. */
7178 /* If either operand is a primitive we can't do anything, so get out
7179 fast. */
6588 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
6589 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
6590 return x;
6591
6592 lhs = expand_compound_operation (lhs);
6593 rhs = expand_compound_operation (rhs);
6594 inner_code = GET_CODE (lhs);
6595 if (inner_code != GET_CODE (rhs))
6596 return x;
6597
6598 /* See if the inner and outer operations distribute. */
6599 switch (inner_code)
6600 {
6601 case LSHIFTRT:
6602 case ASHIFTRT:
6603 case AND:
6604 case IOR:
6605 /* These all distribute except over PLUS. */
6606 if (code == PLUS || code == MINUS)
6607 return x;
6608 break;
6609
6610 case MULT:
6611 if (code != PLUS && code != MINUS)
6612 return x;
6613 break;
6614
6615 case ASHIFT:
6616 /* This is also a multiply, so it distributes over everything. */
6617 break;
6618
6619 case SUBREG:
6620 /* Non-paradoxical SUBREGs distributes over all operations, provided
6621 the inner modes and word numbers are the same, this is an extraction
6622 of a low-order part, we don't convert an fp operation to int or
6623 vice versa, and we would not be converting a single-word
6624 operation into a multi-word operation. The latter test is not
6625 required, but it prevents generating unneeded multi-word operations.
6626 Some of the previous tests are redundant given the latter test, but
6627 are retained because they are required for correctness.
6628
6629 We produce the result slightly differently in this case. */
6630
6631 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
6632 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
6633 || ! subreg_lowpart_p (lhs)
6634 || (GET_MODE_CLASS (GET_MODE (lhs))
6635 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
6636 || (GET_MODE_SIZE (GET_MODE (lhs))
6637 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
6638 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
6639 return x;
6640
6641 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
6642 SUBREG_REG (lhs), SUBREG_REG (rhs));
6643 return gen_lowpart_for_combine (GET_MODE (x), tem);
6644
6645 default:
6646 return x;
6647 }
6648
6649 /* Set LHS and RHS to the inner operands (A and B in the example
6650 above) and set OTHER to the common operand (C in the example).
6651 These is only one way to do this unless the inner operation is
6652 commutative. */
6653 if (GET_RTX_CLASS (inner_code) == 'c'
6654 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
6655 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
6656 else if (GET_RTX_CLASS (inner_code) == 'c'
6657 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
6658 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
6659 else if (GET_RTX_CLASS (inner_code) == 'c'
6660 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
6661 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
6662 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
6663 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
6664 else
6665 return x;
6666
6667 /* Form the new inner operation, seeing if it simplifies first. */
6668 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
6669
6670 /* There is one exception to the general way of distributing:
6671 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
6672 if (code == XOR && inner_code == IOR)
6673 {
6674 inner_code = AND;
6675 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
6676 }
6677
6678 /* We may be able to continuing distributing the result, so call
6679 ourselves recursively on the inner operation before forming the
6680 outer operation, which we return. */
6681 return gen_binary (inner_code, GET_MODE (x),
6682 apply_distributive_law (tem), other);
6683}
6684
6685/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
6686 in MODE.
6687
6688 Return an equivalent form, if different from X. Otherwise, return X. If
6689 X is zero, we are to always construct the equivalent form. */
6690
6691static rtx
6692simplify_and_const_int (x, mode, varop, constop)
6693 rtx x;
6694 enum machine_mode mode;
6695 rtx varop;
6696 unsigned HOST_WIDE_INT constop;
6697{
6698 unsigned HOST_WIDE_INT nonzero;
6699 int width = GET_MODE_BITSIZE (mode);
6700 int i;
6701
6702 /* Simplify VAROP knowing that we will be only looking at some of the
6703 bits in it. */
6704 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
6705
6706 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
6707 CONST_INT, we are done. */
6708 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
6709 return varop;
6710
6711 /* See what bits may be nonzero in VAROP. Unlike the general case of
6712 a call to nonzero_bits, here we don't care about bits outside
6713 MODE. */
6714
6715 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
6716
6717 /* If this would be an entire word for the target, but is not for
6718 the host, then sign-extend on the host so that the number will look
6719 the same way on the host that it would on the target.
6720
6721 For example, when building a 64 bit alpha hosted 32 bit sparc
6722 targeted compiler, then we want the 32 bit unsigned value -1 to be
6723 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
6724 The later confuses the sparc backend. */
6725
6726 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
6727 && (nonzero & ((HOST_WIDE_INT) 1 << (width - 1))))
6728 nonzero |= ((HOST_WIDE_INT) (-1) << width);
6729
6730 /* Turn off all bits in the constant that are known to already be zero.
6731 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
6732 which is tested below. */
6733
6734 constop &= nonzero;
6735
6736 /* If we don't have any bits left, return zero. */
6737 if (constop == 0)
6738 return const0_rtx;
6739
6740 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
6741 a power of two, we can replace this with a ASHIFT. */
6742 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
6743 && (i = exact_log2 (constop)) >= 0)
6744 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
6745
6746 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
6747 or XOR, then try to apply the distributive law. This may eliminate
6748 operations if either branch can be simplified because of the AND.
6749 It may also make some cases more complex, but those cases probably
6750 won't match a pattern either with or without this. */
6751
6752 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
6753 return
6754 gen_lowpart_for_combine
6755 (mode,
6756 apply_distributive_law
6757 (gen_binary (GET_CODE (varop), GET_MODE (varop),
6758 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6759 XEXP (varop, 0), constop),
6760 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6761 XEXP (varop, 1), constop))));
6762
6763 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
6764 if we already had one (just check for the simplest cases). */
6765 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6766 && GET_MODE (XEXP (x, 0)) == mode
6767 && SUBREG_REG (XEXP (x, 0)) == varop)
6768 varop = XEXP (x, 0);
6769 else
6770 varop = gen_lowpart_for_combine (mode, varop);
6771
7180 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
7181 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
7182 return x;
7183
7184 lhs = expand_compound_operation (lhs);
7185 rhs = expand_compound_operation (rhs);
7186 inner_code = GET_CODE (lhs);
7187 if (inner_code != GET_CODE (rhs))
7188 return x;
7189
7190 /* See if the inner and outer operations distribute. */
7191 switch (inner_code)
7192 {
7193 case LSHIFTRT:
7194 case ASHIFTRT:
7195 case AND:
7196 case IOR:
7197 /* These all distribute except over PLUS. */
7198 if (code == PLUS || code == MINUS)
7199 return x;
7200 break;
7201
7202 case MULT:
7203 if (code != PLUS && code != MINUS)
7204 return x;
7205 break;
7206
7207 case ASHIFT:
7208 /* This is also a multiply, so it distributes over everything. */
7209 break;
7210
7211 case SUBREG:
7212 /* Non-paradoxical SUBREGs distributes over all operations, provided
7213 the inner modes and word numbers are the same, this is an extraction
7214 of a low-order part, we don't convert an fp operation to int or
7215 vice versa, and we would not be converting a single-word
7216 operation into a multi-word operation. The latter test is not
7217 required, but it prevents generating unneeded multi-word operations.
7218 Some of the previous tests are redundant given the latter test, but
7219 are retained because they are required for correctness.
7220
7221 We produce the result slightly differently in this case. */
7222
7223 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7224 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
7225 || ! subreg_lowpart_p (lhs)
7226 || (GET_MODE_CLASS (GET_MODE (lhs))
7227 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
7228 || (GET_MODE_SIZE (GET_MODE (lhs))
7229 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
7230 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
7231 return x;
7232
7233 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7234 SUBREG_REG (lhs), SUBREG_REG (rhs));
7235 return gen_lowpart_for_combine (GET_MODE (x), tem);
7236
7237 default:
7238 return x;
7239 }
7240
7241 /* Set LHS and RHS to the inner operands (A and B in the example
7242 above) and set OTHER to the common operand (C in the example).
7243 These is only one way to do this unless the inner operation is
7244 commutative. */
7245 if (GET_RTX_CLASS (inner_code) == 'c'
7246 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7247 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7248 else if (GET_RTX_CLASS (inner_code) == 'c'
7249 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7250 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7251 else if (GET_RTX_CLASS (inner_code) == 'c'
7252 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7253 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7254 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7255 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7256 else
7257 return x;
7258
7259 /* Form the new inner operation, seeing if it simplifies first. */
7260 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7261
7262 /* There is one exception to the general way of distributing:
7263 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
7264 if (code == XOR && inner_code == IOR)
7265 {
7266 inner_code = AND;
7267 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
7268 }
7269
7270 /* We may be able to continuing distributing the result, so call
7271 ourselves recursively on the inner operation before forming the
7272 outer operation, which we return. */
7273 return gen_binary (inner_code, GET_MODE (x),
7274 apply_distributive_law (tem), other);
7275}
7276
7277/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7278 in MODE.
7279
7280 Return an equivalent form, if different from X. Otherwise, return X. If
7281 X is zero, we are to always construct the equivalent form. */
7282
7283static rtx
7284simplify_and_const_int (x, mode, varop, constop)
7285 rtx x;
7286 enum machine_mode mode;
7287 rtx varop;
7288 unsigned HOST_WIDE_INT constop;
7289{
7290 unsigned HOST_WIDE_INT nonzero;
7291 int width = GET_MODE_BITSIZE (mode);
7292 int i;
7293
7294 /* Simplify VAROP knowing that we will be only looking at some of the
7295 bits in it. */
7296 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
7297
7298 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7299 CONST_INT, we are done. */
7300 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7301 return varop;
7302
7303 /* See what bits may be nonzero in VAROP. Unlike the general case of
7304 a call to nonzero_bits, here we don't care about bits outside
7305 MODE. */
7306
7307 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7308
7309 /* If this would be an entire word for the target, but is not for
7310 the host, then sign-extend on the host so that the number will look
7311 the same way on the host that it would on the target.
7312
7313 For example, when building a 64 bit alpha hosted 32 bit sparc
7314 targeted compiler, then we want the 32 bit unsigned value -1 to be
7315 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
7316 The later confuses the sparc backend. */
7317
7318 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
7319 && (nonzero & ((HOST_WIDE_INT) 1 << (width - 1))))
7320 nonzero |= ((HOST_WIDE_INT) (-1) << width);
7321
7322 /* Turn off all bits in the constant that are known to already be zero.
7323 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
7324 which is tested below. */
7325
7326 constop &= nonzero;
7327
7328 /* If we don't have any bits left, return zero. */
7329 if (constop == 0)
7330 return const0_rtx;
7331
7332 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7333 a power of two, we can replace this with a ASHIFT. */
7334 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7335 && (i = exact_log2 (constop)) >= 0)
7336 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7337
7338 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7339 or XOR, then try to apply the distributive law. This may eliminate
7340 operations if either branch can be simplified because of the AND.
7341 It may also make some cases more complex, but those cases probably
7342 won't match a pattern either with or without this. */
7343
7344 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7345 return
7346 gen_lowpart_for_combine
7347 (mode,
7348 apply_distributive_law
7349 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7350 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7351 XEXP (varop, 0), constop),
7352 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7353 XEXP (varop, 1), constop))));
7354
7355 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7356 if we already had one (just check for the simplest cases). */
7357 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7358 && GET_MODE (XEXP (x, 0)) == mode
7359 && SUBREG_REG (XEXP (x, 0)) == varop)
7360 varop = XEXP (x, 0);
7361 else
7362 varop = gen_lowpart_for_combine (mode, varop);
7363
6772 /* If we can't make the SUBREG, try to return what we were given. */
7364 /* If we can't make the SUBREG, try to return what we were given. */
6773 if (GET_CODE (varop) == CLOBBER)
6774 return x ? x : varop;
6775
6776 /* If we are only masking insignificant bits, return VAROP. */
6777 if (constop == nonzero)
6778 x = varop;
6779
6780 /* Otherwise, return an AND. See how much, if any, of X we can use. */
6781 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6782 x = gen_binary (AND, mode, varop, GEN_INT (constop));
6783
6784 else
6785 {
6786 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6787 || INTVAL (XEXP (x, 1)) != constop)
6788 SUBST (XEXP (x, 1), GEN_INT (constop));
6789
6790 SUBST (XEXP (x, 0), varop);
6791 }
6792
6793 return x;
6794}
6795
7365 if (GET_CODE (varop) == CLOBBER)
7366 return x ? x : varop;
7367
7368 /* If we are only masking insignificant bits, return VAROP. */
7369 if (constop == nonzero)
7370 x = varop;
7371
7372 /* Otherwise, return an AND. See how much, if any, of X we can use. */
7373 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
7374 x = gen_binary (AND, mode, varop, GEN_INT (constop));
7375
7376 else
7377 {
7378 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7379 || INTVAL (XEXP (x, 1)) != constop)
7380 SUBST (XEXP (x, 1), GEN_INT (constop));
7381
7382 SUBST (XEXP (x, 0), varop);
7383 }
7384
7385 return x;
7386}
7387
7388/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7389 We don't let nonzero_bits recur into num_sign_bit_copies, because that
7390 is less useful. We can't allow both, because that results in exponential
7391 run time recursion. There is a nullstone testcase that triggered
7392 this. This macro avoids accidental uses of num_sign_bit_copies. */
7393#define num_sign_bit_copies()
7394
6796/* Given an expression, X, compute which bits in X can be non-zero.
6797 We don't care about bits outside of those defined in MODE.
6798
6799 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
6800 a shift, AND, or zero_extract, we can do better. */
6801
6802static unsigned HOST_WIDE_INT
6803nonzero_bits (x, mode)
6804 rtx x;
6805 enum machine_mode mode;
6806{
6807 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
6808 unsigned HOST_WIDE_INT inner_nz;
6809 enum rtx_code code;
6810 int mode_width = GET_MODE_BITSIZE (mode);
6811 rtx tem;
6812
6813 /* For floating-point values, assume all bits are needed. */
6814 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
6815 return nonzero;
6816
6817 /* If X is wider than MODE, use its mode instead. */
6818 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
6819 {
6820 mode = GET_MODE (x);
6821 nonzero = GET_MODE_MASK (mode);
6822 mode_width = GET_MODE_BITSIZE (mode);
6823 }
6824
6825 if (mode_width > HOST_BITS_PER_WIDE_INT)
6826 /* Our only callers in this case look for single bit values. So
6827 just return the mode mask. Those tests will then be false. */
6828 return nonzero;
6829
6830#ifndef WORD_REGISTER_OPERATIONS
6831 /* If MODE is wider than X, but both are a single word for both the host
6832 and target machines, we can compute this from which bits of the
6833 object might be nonzero in its own mode, taking into account the fact
6834 that on many CISC machines, accessing an object in a wider mode
6835 causes the high-order bits to become undefined. So they are
6836 not known to be zero. */
6837
6838 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
6839 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
6840 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6841 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
6842 {
6843 nonzero &= nonzero_bits (x, GET_MODE (x));
6844 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
6845 return nonzero;
6846 }
6847#endif
6848
6849 code = GET_CODE (x);
6850 switch (code)
6851 {
6852 case REG:
6853#ifdef POINTERS_EXTEND_UNSIGNED
6854 /* If pointers extend unsigned and this is a pointer in Pmode, say that
6855 all the bits above ptr_mode are known to be zero. */
6856 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
6857 && REGNO_POINTER_FLAG (REGNO (x)))
6858 nonzero &= GET_MODE_MASK (ptr_mode);
6859#endif
6860
6861#ifdef STACK_BOUNDARY
6862 /* If this is the stack pointer, we may know something about its
6863 alignment. If PUSH_ROUNDING is defined, it is possible for the
6864 stack to be momentarily aligned only to that amount, so we pick
6865 the least alignment. */
6866
7395/* Given an expression, X, compute which bits in X can be non-zero.
7396 We don't care about bits outside of those defined in MODE.
7397
7398 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7399 a shift, AND, or zero_extract, we can do better. */
7400
7401static unsigned HOST_WIDE_INT
7402nonzero_bits (x, mode)
7403 rtx x;
7404 enum machine_mode mode;
7405{
7406 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7407 unsigned HOST_WIDE_INT inner_nz;
7408 enum rtx_code code;
7409 int mode_width = GET_MODE_BITSIZE (mode);
7410 rtx tem;
7411
7412 /* For floating-point values, assume all bits are needed. */
7413 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7414 return nonzero;
7415
7416 /* If X is wider than MODE, use its mode instead. */
7417 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7418 {
7419 mode = GET_MODE (x);
7420 nonzero = GET_MODE_MASK (mode);
7421 mode_width = GET_MODE_BITSIZE (mode);
7422 }
7423
7424 if (mode_width > HOST_BITS_PER_WIDE_INT)
7425 /* Our only callers in this case look for single bit values. So
7426 just return the mode mask. Those tests will then be false. */
7427 return nonzero;
7428
7429#ifndef WORD_REGISTER_OPERATIONS
7430 /* If MODE is wider than X, but both are a single word for both the host
7431 and target machines, we can compute this from which bits of the
7432 object might be nonzero in its own mode, taking into account the fact
7433 that on many CISC machines, accessing an object in a wider mode
7434 causes the high-order bits to become undefined. So they are
7435 not known to be zero. */
7436
7437 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7438 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7439 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7440 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
7441 {
7442 nonzero &= nonzero_bits (x, GET_MODE (x));
7443 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
7444 return nonzero;
7445 }
7446#endif
7447
7448 code = GET_CODE (x);
7449 switch (code)
7450 {
7451 case REG:
7452#ifdef POINTERS_EXTEND_UNSIGNED
7453 /* If pointers extend unsigned and this is a pointer in Pmode, say that
7454 all the bits above ptr_mode are known to be zero. */
7455 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
7456 && REGNO_POINTER_FLAG (REGNO (x)))
7457 nonzero &= GET_MODE_MASK (ptr_mode);
7458#endif
7459
7460#ifdef STACK_BOUNDARY
7461 /* If this is the stack pointer, we may know something about its
7462 alignment. If PUSH_ROUNDING is defined, it is possible for the
7463 stack to be momentarily aligned only to that amount, so we pick
7464 the least alignment. */
7465
6867 if (x == stack_pointer_rtx)
7466 /* We can't check for arg_pointer_rtx here, because it is not
7467 guaranteed to have as much alignment as the stack pointer.
7468 In particular, in the Irix6 n64 ABI, the stack has 128 bit
7469 alignment but the argument pointer has only 64 bit alignment. */
7470
7471 if ((x == frame_pointer_rtx
7472 || x == stack_pointer_rtx
7473 || x == hard_frame_pointer_rtx
7474 || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
7475 && REGNO (x) <= LAST_VIRTUAL_REGISTER))
7476#ifdef STACK_BIAS
7477 && !STACK_BIAS
7478#endif
7479 )
6868 {
6869 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6870
6871#ifdef PUSH_ROUNDING
7480 {
7481 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
7482
7483#ifdef PUSH_ROUNDING
6872 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
7484 if (REGNO (x) == STACK_POINTER_REGNUM)
7485 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
6873#endif
6874
6875 /* We must return here, otherwise we may get a worse result from
6876 one of the choices below. There is nothing useful below as
6877 far as the stack pointer is concerned. */
6878 return nonzero &= ~ (sp_alignment - 1);
6879 }
6880#endif
6881
6882 /* If X is a register whose nonzero bits value is current, use it.
6883 Otherwise, if X is a register whose value we can find, use that
6884 value. Otherwise, use the previously-computed global nonzero bits
6885 for this register. */
6886
6887 if (reg_last_set_value[REGNO (x)] != 0
6888 && reg_last_set_mode[REGNO (x)] == mode
7486#endif
7487
7488 /* We must return here, otherwise we may get a worse result from
7489 one of the choices below. There is nothing useful below as
7490 far as the stack pointer is concerned. */
7491 return nonzero &= ~ (sp_alignment - 1);
7492 }
7493#endif
7494
7495 /* If X is a register whose nonzero bits value is current, use it.
7496 Otherwise, if X is a register whose value we can find, use that
7497 value. Otherwise, use the previously-computed global nonzero bits
7498 for this register. */
7499
7500 if (reg_last_set_value[REGNO (x)] != 0
7501 && reg_last_set_mode[REGNO (x)] == mode
6889 && (reg_n_sets[REGNO (x)] == 1
7502 && (REG_N_SETS (REGNO (x)) == 1
6890 || reg_last_set_label[REGNO (x)] == label_tick)
6891 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6892 return reg_last_set_nonzero_bits[REGNO (x)];
6893
6894 tem = get_last_value (x);
6895
6896 if (tem)
6897 {
6898#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6899 /* If X is narrower than MODE and TEM is a non-negative
6900 constant that would appear negative in the mode of X,
6901 sign-extend it for use in reg_nonzero_bits because some
6902 machines (maybe most) will actually do the sign-extension
6903 and this is the conservative approach.
6904
6905 ??? For 2.5, try to tighten up the MD files in this regard
6906 instead of this kludge. */
6907
6908 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
6909 && GET_CODE (tem) == CONST_INT
6910 && INTVAL (tem) > 0
6911 && 0 != (INTVAL (tem)
6912 & ((HOST_WIDE_INT) 1
6913 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
6914 tem = GEN_INT (INTVAL (tem)
6915 | ((HOST_WIDE_INT) (-1)
6916 << GET_MODE_BITSIZE (GET_MODE (x))));
6917#endif
6918 return nonzero_bits (tem, mode);
6919 }
6920 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
6921 return reg_nonzero_bits[REGNO (x)] & nonzero;
6922 else
6923 return nonzero;
6924
6925 case CONST_INT:
6926#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6927 /* If X is negative in MODE, sign-extend the value. */
6928 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
6929 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
6930 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
6931#endif
6932
6933 return INTVAL (x);
6934
6935 case MEM:
6936#ifdef LOAD_EXTEND_OP
6937 /* In many, if not most, RISC machines, reading a byte from memory
6938 zeros the rest of the register. Noticing that fact saves a lot
6939 of extra zero-extends. */
6940 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
6941 nonzero &= GET_MODE_MASK (GET_MODE (x));
6942#endif
6943 break;
6944
6945 case EQ: case NE:
6946 case GT: case GTU:
6947 case LT: case LTU:
6948 case GE: case GEU:
6949 case LE: case LEU:
6950
6951 /* If this produces an integer result, we know which bits are set.
6952 Code here used to clear bits outside the mode of X, but that is
6953 now done above. */
6954
6955 if (GET_MODE_CLASS (mode) == MODE_INT
6956 && mode_width <= HOST_BITS_PER_WIDE_INT)
6957 nonzero = STORE_FLAG_VALUE;
6958 break;
6959
6960 case NEG:
7503 || reg_last_set_label[REGNO (x)] == label_tick)
7504 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7505 return reg_last_set_nonzero_bits[REGNO (x)];
7506
7507 tem = get_last_value (x);
7508
7509 if (tem)
7510 {
7511#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7512 /* If X is narrower than MODE and TEM is a non-negative
7513 constant that would appear negative in the mode of X,
7514 sign-extend it for use in reg_nonzero_bits because some
7515 machines (maybe most) will actually do the sign-extension
7516 and this is the conservative approach.
7517
7518 ??? For 2.5, try to tighten up the MD files in this regard
7519 instead of this kludge. */
7520
7521 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
7522 && GET_CODE (tem) == CONST_INT
7523 && INTVAL (tem) > 0
7524 && 0 != (INTVAL (tem)
7525 & ((HOST_WIDE_INT) 1
7526 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7527 tem = GEN_INT (INTVAL (tem)
7528 | ((HOST_WIDE_INT) (-1)
7529 << GET_MODE_BITSIZE (GET_MODE (x))));
7530#endif
7531 return nonzero_bits (tem, mode);
7532 }
7533 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
7534 return reg_nonzero_bits[REGNO (x)] & nonzero;
7535 else
7536 return nonzero;
7537
7538 case CONST_INT:
7539#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7540 /* If X is negative in MODE, sign-extend the value. */
7541 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
7542 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
7543 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
7544#endif
7545
7546 return INTVAL (x);
7547
7548 case MEM:
7549#ifdef LOAD_EXTEND_OP
7550 /* In many, if not most, RISC machines, reading a byte from memory
7551 zeros the rest of the register. Noticing that fact saves a lot
7552 of extra zero-extends. */
7553 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
7554 nonzero &= GET_MODE_MASK (GET_MODE (x));
7555#endif
7556 break;
7557
7558 case EQ: case NE:
7559 case GT: case GTU:
7560 case LT: case LTU:
7561 case GE: case GEU:
7562 case LE: case LEU:
7563
7564 /* If this produces an integer result, we know which bits are set.
7565 Code here used to clear bits outside the mode of X, but that is
7566 now done above. */
7567
7568 if (GET_MODE_CLASS (mode) == MODE_INT
7569 && mode_width <= HOST_BITS_PER_WIDE_INT)
7570 nonzero = STORE_FLAG_VALUE;
7571 break;
7572
7573 case NEG:
7574#if 0
7575 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7576 and num_sign_bit_copies. */
6961 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6962 == GET_MODE_BITSIZE (GET_MODE (x)))
6963 nonzero = 1;
7577 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7578 == GET_MODE_BITSIZE (GET_MODE (x)))
7579 nonzero = 1;
7580#endif
6964
6965 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
6966 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
6967 break;
6968
6969 case ABS:
7581
7582 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
7583 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
7584 break;
7585
7586 case ABS:
7587#if 0
7588 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7589 and num_sign_bit_copies. */
6970 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6971 == GET_MODE_BITSIZE (GET_MODE (x)))
6972 nonzero = 1;
7590 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7591 == GET_MODE_BITSIZE (GET_MODE (x)))
7592 nonzero = 1;
7593#endif
6973 break;
6974
6975 case TRUNCATE:
6976 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
6977 break;
6978
6979 case ZERO_EXTEND:
6980 nonzero &= nonzero_bits (XEXP (x, 0), mode);
6981 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6982 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6983 break;
6984
6985 case SIGN_EXTEND:
6986 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6987 Otherwise, show all the bits in the outer mode but not the inner
6988 may be non-zero. */
6989 inner_nz = nonzero_bits (XEXP (x, 0), mode);
6990 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6991 {
6992 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
7594 break;
7595
7596 case TRUNCATE:
7597 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
7598 break;
7599
7600 case ZERO_EXTEND:
7601 nonzero &= nonzero_bits (XEXP (x, 0), mode);
7602 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
7603 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
7604 break;
7605
7606 case SIGN_EXTEND:
7607 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
7608 Otherwise, show all the bits in the outer mode but not the inner
7609 may be non-zero. */
7610 inner_nz = nonzero_bits (XEXP (x, 0), mode);
7611 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
7612 {
7613 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6993 if (inner_nz &
6994 (((HOST_WIDE_INT) 1
6995 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
7614 if (inner_nz
7615 & (((HOST_WIDE_INT) 1
7616 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
6996 inner_nz |= (GET_MODE_MASK (mode)
6997 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6998 }
6999
7000 nonzero &= inner_nz;
7001 break;
7002
7003 case AND:
7004 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7005 & nonzero_bits (XEXP (x, 1), mode));
7006 break;
7007
7008 case XOR: case IOR:
7009 case UMIN: case UMAX: case SMIN: case SMAX:
7010 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7011 | nonzero_bits (XEXP (x, 1), mode));
7012 break;
7013
7014 case PLUS: case MINUS:
7015 case MULT:
7016 case DIV: case UDIV:
7017 case MOD: case UMOD:
7018 /* We can apply the rules of arithmetic to compute the number of
7019 high- and low-order zero bits of these operations. We start by
7020 computing the width (position of the highest-order non-zero bit)
7021 and the number of low-order zero bits for each value. */
7022 {
7023 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
7024 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
7025 int width0 = floor_log2 (nz0) + 1;
7026 int width1 = floor_log2 (nz1) + 1;
7027 int low0 = floor_log2 (nz0 & -nz0);
7028 int low1 = floor_log2 (nz1 & -nz1);
7029 HOST_WIDE_INT op0_maybe_minusp
7030 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7031 HOST_WIDE_INT op1_maybe_minusp
7032 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7033 int result_width = mode_width;
7034 int result_low = 0;
7035
7036 switch (code)
7037 {
7038 case PLUS:
7617 inner_nz |= (GET_MODE_MASK (mode)
7618 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
7619 }
7620
7621 nonzero &= inner_nz;
7622 break;
7623
7624 case AND:
7625 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7626 & nonzero_bits (XEXP (x, 1), mode));
7627 break;
7628
7629 case XOR: case IOR:
7630 case UMIN: case UMAX: case SMIN: case SMAX:
7631 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7632 | nonzero_bits (XEXP (x, 1), mode));
7633 break;
7634
7635 case PLUS: case MINUS:
7636 case MULT:
7637 case DIV: case UDIV:
7638 case MOD: case UMOD:
7639 /* We can apply the rules of arithmetic to compute the number of
7640 high- and low-order zero bits of these operations. We start by
7641 computing the width (position of the highest-order non-zero bit)
7642 and the number of low-order zero bits for each value. */
7643 {
7644 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
7645 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
7646 int width0 = floor_log2 (nz0) + 1;
7647 int width1 = floor_log2 (nz1) + 1;
7648 int low0 = floor_log2 (nz0 & -nz0);
7649 int low1 = floor_log2 (nz1 & -nz1);
7650 HOST_WIDE_INT op0_maybe_minusp
7651 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7652 HOST_WIDE_INT op1_maybe_minusp
7653 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7654 int result_width = mode_width;
7655 int result_low = 0;
7656
7657 switch (code)
7658 {
7659 case PLUS:
7660#ifdef STACK_BIAS
7661 if (STACK_BIAS
7662 && (XEXP (x, 0) == stack_pointer_rtx
7663 || XEXP (x, 0) == frame_pointer_rtx)
7664 && GET_CODE (XEXP (x, 1)) == CONST_INT)
7665 {
7666 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
7667
7668 nz0 = (GET_MODE_MASK (mode) & ~ (sp_alignment - 1));
7669 nz1 = INTVAL (XEXP (x, 1)) - STACK_BIAS;
7670 width0 = floor_log2 (nz0) + 1;
7671 width1 = floor_log2 (nz1) + 1;
7672 low0 = floor_log2 (nz0 & -nz0);
7673 low1 = floor_log2 (nz1 & -nz1);
7674 }
7675#endif
7039 result_width = MAX (width0, width1) + 1;
7040 result_low = MIN (low0, low1);
7041 break;
7042 case MINUS:
7043 result_low = MIN (low0, low1);
7044 break;
7045 case MULT:
7046 result_width = width0 + width1;
7047 result_low = low0 + low1;
7048 break;
7049 case DIV:
7050 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7051 result_width = width0;
7052 break;
7053 case UDIV:
7054 result_width = width0;
7055 break;
7056 case MOD:
7057 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7058 result_width = MIN (width0, width1);
7059 result_low = MIN (low0, low1);
7060 break;
7061 case UMOD:
7062 result_width = MIN (width0, width1);
7063 result_low = MIN (low0, low1);
7064 break;
7676 result_width = MAX (width0, width1) + 1;
7677 result_low = MIN (low0, low1);
7678 break;
7679 case MINUS:
7680 result_low = MIN (low0, low1);
7681 break;
7682 case MULT:
7683 result_width = width0 + width1;
7684 result_low = low0 + low1;
7685 break;
7686 case DIV:
7687 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7688 result_width = width0;
7689 break;
7690 case UDIV:
7691 result_width = width0;
7692 break;
7693 case MOD:
7694 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7695 result_width = MIN (width0, width1);
7696 result_low = MIN (low0, low1);
7697 break;
7698 case UMOD:
7699 result_width = MIN (width0, width1);
7700 result_low = MIN (low0, low1);
7701 break;
7702 default:
7703 abort ();
7065 }
7066
7067 if (result_width < mode_width)
7068 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
7069
7070 if (result_low > 0)
7071 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
7072 }
7073 break;
7074
7075 case ZERO_EXTRACT:
7076 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7077 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7078 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
7079 break;
7080
7081 case SUBREG:
7082 /* If this is a SUBREG formed for a promoted variable that has
7083 been zero-extended, we know that at least the high-order bits
7084 are zero, though others might be too. */
7085
7086 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
7087 nonzero = (GET_MODE_MASK (GET_MODE (x))
7088 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
7089
7090 /* If the inner mode is a single word for both the host and target
7091 machines, we can compute this from which bits of the inner
7092 object might be nonzero. */
7093 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
7094 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7095 <= HOST_BITS_PER_WIDE_INT))
7096 {
7097 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
7098
7704 }
7705
7706 if (result_width < mode_width)
7707 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
7708
7709 if (result_low > 0)
7710 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
7711 }
7712 break;
7713
7714 case ZERO_EXTRACT:
7715 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7716 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7717 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
7718 break;
7719
7720 case SUBREG:
7721 /* If this is a SUBREG formed for a promoted variable that has
7722 been zero-extended, we know that at least the high-order bits
7723 are zero, though others might be too. */
7724
7725 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
7726 nonzero = (GET_MODE_MASK (GET_MODE (x))
7727 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
7728
7729 /* If the inner mode is a single word for both the host and target
7730 machines, we can compute this from which bits of the inner
7731 object might be nonzero. */
7732 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
7733 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7734 <= HOST_BITS_PER_WIDE_INT))
7735 {
7736 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
7737
7099#ifndef WORD_REGISTER_OPERATIONS
7100 /* On many CISC machines, accessing an object in a wider mode
7101 causes the high-order bits to become undefined. So they are
7102 not known to be zero. */
7103 if (GET_MODE_SIZE (GET_MODE (x))
7104 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7105 nonzero |= (GET_MODE_MASK (GET_MODE (x))
7106 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
7738#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
7739 /* If this is a typical RISC machine, we only have to worry
7740 about the way loads are extended. */
7741 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
7742 ? (nonzero
7743 & (1L << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1)))
7744 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
7107#endif
7745#endif
7746 {
7747 /* On many CISC machines, accessing an object in a wider mode
7748 causes the high-order bits to become undefined. So they are
7749 not known to be zero. */
7750 if (GET_MODE_SIZE (GET_MODE (x))
7751 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7752 nonzero |= (GET_MODE_MASK (GET_MODE (x))
7753 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
7754 }
7108 }
7109 break;
7110
7111 case ASHIFTRT:
7112 case LSHIFTRT:
7113 case ASHIFT:
7114 case ROTATE:
7115 /* The nonzero bits are in two classes: any bits within MODE
7116 that aren't in GET_MODE (x) are always significant. The rest of the
7117 nonzero bits are those that are significant in the operand of
7118 the shift when shifted the appropriate number of bits. This
7119 shows that high-order bits are cleared by the right shift and
7120 low-order bits by left shifts. */
7121 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7122 && INTVAL (XEXP (x, 1)) >= 0
7123 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7124 {
7125 enum machine_mode inner_mode = GET_MODE (x);
7126 int width = GET_MODE_BITSIZE (inner_mode);
7127 int count = INTVAL (XEXP (x, 1));
7128 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
7129 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
7130 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
7131 unsigned HOST_WIDE_INT outer = 0;
7132
7133 if (mode_width > width)
7134 outer = (op_nonzero & nonzero & ~ mode_mask);
7135
7136 if (code == LSHIFTRT)
7137 inner >>= count;
7138 else if (code == ASHIFTRT)
7139 {
7140 inner >>= count;
7141
7142 /* If the sign bit may have been nonzero before the shift, we
7143 need to mark all the places it could have been copied to
7144 by the shift as possibly nonzero. */
7145 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7146 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
7147 }
7148 else if (code == ASHIFT)
7149 inner <<= count;
7150 else
7151 inner = ((inner << (count % width)
7152 | (inner >> (width - (count % width)))) & mode_mask);
7153
7154 nonzero &= (outer | inner);
7155 }
7156 break;
7157
7158 case FFS:
7159 /* This is at most the number of bits in the mode. */
7160 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
7161 break;
7162
7163 case IF_THEN_ELSE:
7164 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7165 | nonzero_bits (XEXP (x, 2), mode));
7166 break;
7755 }
7756 break;
7757
7758 case ASHIFTRT:
7759 case LSHIFTRT:
7760 case ASHIFT:
7761 case ROTATE:
7762 /* The nonzero bits are in two classes: any bits within MODE
7763 that aren't in GET_MODE (x) are always significant. The rest of the
7764 nonzero bits are those that are significant in the operand of
7765 the shift when shifted the appropriate number of bits. This
7766 shows that high-order bits are cleared by the right shift and
7767 low-order bits by left shifts. */
7768 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7769 && INTVAL (XEXP (x, 1)) >= 0
7770 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7771 {
7772 enum machine_mode inner_mode = GET_MODE (x);
7773 int width = GET_MODE_BITSIZE (inner_mode);
7774 int count = INTVAL (XEXP (x, 1));
7775 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
7776 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
7777 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
7778 unsigned HOST_WIDE_INT outer = 0;
7779
7780 if (mode_width > width)
7781 outer = (op_nonzero & nonzero & ~ mode_mask);
7782
7783 if (code == LSHIFTRT)
7784 inner >>= count;
7785 else if (code == ASHIFTRT)
7786 {
7787 inner >>= count;
7788
7789 /* If the sign bit may have been nonzero before the shift, we
7790 need to mark all the places it could have been copied to
7791 by the shift as possibly nonzero. */
7792 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7793 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
7794 }
7795 else if (code == ASHIFT)
7796 inner <<= count;
7797 else
7798 inner = ((inner << (count % width)
7799 | (inner >> (width - (count % width)))) & mode_mask);
7800
7801 nonzero &= (outer | inner);
7802 }
7803 break;
7804
7805 case FFS:
7806 /* This is at most the number of bits in the mode. */
7807 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
7808 break;
7809
7810 case IF_THEN_ELSE:
7811 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7812 | nonzero_bits (XEXP (x, 2), mode));
7813 break;
7814
7815 default:
7816 break;
7167 }
7168
7169 return nonzero;
7170}
7817 }
7818
7819 return nonzero;
7820}
7821
7822/* See the macro definition above. */
7823#undef num_sign_bit_copies
7171
7172/* Return the number of bits at the high-order end of X that are known to
7173 be equal to the sign bit. X will be used in mode MODE; if MODE is
7174 VOIDmode, X will be used in its own mode. The returned value will always
7175 be between 1 and the number of bits in MODE. */
7176
7177static int
7178num_sign_bit_copies (x, mode)
7179 rtx x;
7180 enum machine_mode mode;
7181{
7182 enum rtx_code code = GET_CODE (x);
7183 int bitwidth;
7184 int num0, num1, result;
7185 unsigned HOST_WIDE_INT nonzero;
7186 rtx tem;
7187
7188 /* If we weren't given a mode, use the mode of X. If the mode is still
7189 VOIDmode, we don't know anything. Likewise if one of the modes is
7190 floating-point. */
7191
7192 if (mode == VOIDmode)
7193 mode = GET_MODE (x);
7194
7195 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
7196 return 1;
7197
7198 bitwidth = GET_MODE_BITSIZE (mode);
7199
7824
7825/* Return the number of bits at the high-order end of X that are known to
7826 be equal to the sign bit. X will be used in mode MODE; if MODE is
7827 VOIDmode, X will be used in its own mode. The returned value will always
7828 be between 1 and the number of bits in MODE. */
7829
7830static int
7831num_sign_bit_copies (x, mode)
7832 rtx x;
7833 enum machine_mode mode;
7834{
7835 enum rtx_code code = GET_CODE (x);
7836 int bitwidth;
7837 int num0, num1, result;
7838 unsigned HOST_WIDE_INT nonzero;
7839 rtx tem;
7840
7841 /* If we weren't given a mode, use the mode of X. If the mode is still
7842 VOIDmode, we don't know anything. Likewise if one of the modes is
7843 floating-point. */
7844
7845 if (mode == VOIDmode)
7846 mode = GET_MODE (x);
7847
7848 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
7849 return 1;
7850
7851 bitwidth = GET_MODE_BITSIZE (mode);
7852
7200 /* For a smaller object, just ignore the high bits. */
7853 /* For a smaller object, just ignore the high bits. */
7201 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7202 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7203 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7204
7854 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7855 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7856 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7857
7858 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7859 {
7205#ifndef WORD_REGISTER_OPERATIONS
7206 /* If this machine does not do all register operations on the entire
7207 register and MODE is wider than the mode of X, we can say nothing
7208 at all about the high-order bits. */
7860#ifndef WORD_REGISTER_OPERATIONS
7861 /* If this machine does not do all register operations on the entire
7862 register and MODE is wider than the mode of X, we can say nothing
7863 at all about the high-order bits. */
7209 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7210 return 1;
7864 return 1;
7865#else
7866 /* Likewise on machines that do, if the mode of the object is smaller
7867 than a word and loads of that size don't sign extend, we can say
7868 nothing about the high order bits. */
7869 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
7870#ifdef LOAD_EXTEND_OP
7871 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
7211#endif
7872#endif
7873 )
7874 return 1;
7875#endif
7876 }
7212
7213 switch (code)
7214 {
7215 case REG:
7216
7217#ifdef POINTERS_EXTEND_UNSIGNED
7218 /* If pointers extend signed and this is a pointer in Pmode, say that
7219 all the bits above ptr_mode are known to be sign bit copies. */
7220 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
7221 && REGNO_POINTER_FLAG (REGNO (x)))
7222 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
7223#endif
7224
7225 if (reg_last_set_value[REGNO (x)] != 0
7226 && reg_last_set_mode[REGNO (x)] == mode
7877
7878 switch (code)
7879 {
7880 case REG:
7881
7882#ifdef POINTERS_EXTEND_UNSIGNED
7883 /* If pointers extend signed and this is a pointer in Pmode, say that
7884 all the bits above ptr_mode are known to be sign bit copies. */
7885 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
7886 && REGNO_POINTER_FLAG (REGNO (x)))
7887 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
7888#endif
7889
7890 if (reg_last_set_value[REGNO (x)] != 0
7891 && reg_last_set_mode[REGNO (x)] == mode
7227 && (reg_n_sets[REGNO (x)] == 1
7892 && (REG_N_SETS (REGNO (x)) == 1
7228 || reg_last_set_label[REGNO (x)] == label_tick)
7229 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7230 return reg_last_set_sign_bit_copies[REGNO (x)];
7231
7232 tem = get_last_value (x);
7233 if (tem != 0)
7234 return num_sign_bit_copies (tem, mode);
7235
7236 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
7237 return reg_sign_bit_copies[REGNO (x)];
7238 break;
7239
7240 case MEM:
7241#ifdef LOAD_EXTEND_OP
7242 /* Some RISC machines sign-extend all loads of smaller than a word. */
7243 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
7244 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
7245#endif
7246 break;
7247
7248 case CONST_INT:
7249 /* If the constant is negative, take its 1's complement and remask.
7250 Then see how many zero bits we have. */
7251 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
7252 if (bitwidth <= HOST_BITS_PER_WIDE_INT
7253 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7254 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
7255
7256 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
7257
7258 case SUBREG:
7259 /* If this is a SUBREG for a promoted object that is sign-extended
7260 and we are looking at it in a wider mode, we know that at least the
7261 high-order bits are known to be sign bit copies. */
7262
7263 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
7264 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7265 num_sign_bit_copies (SUBREG_REG (x), mode));
7266
7893 || reg_last_set_label[REGNO (x)] == label_tick)
7894 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7895 return reg_last_set_sign_bit_copies[REGNO (x)];
7896
7897 tem = get_last_value (x);
7898 if (tem != 0)
7899 return num_sign_bit_copies (tem, mode);
7900
7901 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
7902 return reg_sign_bit_copies[REGNO (x)];
7903 break;
7904
7905 case MEM:
7906#ifdef LOAD_EXTEND_OP
7907 /* Some RISC machines sign-extend all loads of smaller than a word. */
7908 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
7909 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
7910#endif
7911 break;
7912
7913 case CONST_INT:
7914 /* If the constant is negative, take its 1's complement and remask.
7915 Then see how many zero bits we have. */
7916 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
7917 if (bitwidth <= HOST_BITS_PER_WIDE_INT
7918 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7919 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
7920
7921 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
7922
7923 case SUBREG:
7924 /* If this is a SUBREG for a promoted object that is sign-extended
7925 and we are looking at it in a wider mode, we know that at least the
7926 high-order bits are known to be sign bit copies. */
7927
7928 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
7929 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7930 num_sign_bit_copies (SUBREG_REG (x), mode));
7931
7267 /* For a smaller object, just ignore the high bits. */
7932 /* For a smaller object, just ignore the high bits. */
7268 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7269 {
7270 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7271 return MAX (1, (num0
7272 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7273 - bitwidth)));
7274 }
7275
7276#ifdef WORD_REGISTER_OPERATIONS
7277#ifdef LOAD_EXTEND_OP
7278 /* For paradoxical SUBREGs on machines where all register operations
7279 affect the entire register, just look inside. Note that we are
7280 passing MODE to the recursive call, so the number of sign bit copies
7281 will remain relative to that mode, not the inner mode. */
7282
7283 /* This works only if loads sign extend. Otherwise, if we get a
7284 reload for the inner part, it may be loaded from the stack, and
7285 then we lose all sign bit copies that existed before the store
7286 to the stack. */
7287
7288 if ((GET_MODE_SIZE (GET_MODE (x))
7289 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7290 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
7291 return num_sign_bit_copies (SUBREG_REG (x), mode);
7292#endif
7293#endif
7294 break;
7295
7296 case SIGN_EXTRACT:
7297 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7298 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7299 break;
7300
7301 case SIGN_EXTEND:
7302 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7303 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7304
7305 case TRUNCATE:
7933 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7934 {
7935 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7936 return MAX (1, (num0
7937 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7938 - bitwidth)));
7939 }
7940
7941#ifdef WORD_REGISTER_OPERATIONS
7942#ifdef LOAD_EXTEND_OP
7943 /* For paradoxical SUBREGs on machines where all register operations
7944 affect the entire register, just look inside. Note that we are
7945 passing MODE to the recursive call, so the number of sign bit copies
7946 will remain relative to that mode, not the inner mode. */
7947
7948 /* This works only if loads sign extend. Otherwise, if we get a
7949 reload for the inner part, it may be loaded from the stack, and
7950 then we lose all sign bit copies that existed before the store
7951 to the stack. */
7952
7953 if ((GET_MODE_SIZE (GET_MODE (x))
7954 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7955 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
7956 return num_sign_bit_copies (SUBREG_REG (x), mode);
7957#endif
7958#endif
7959 break;
7960
7961 case SIGN_EXTRACT:
7962 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7963 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7964 break;
7965
7966 case SIGN_EXTEND:
7967 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7968 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7969
7970 case TRUNCATE:
7306 /* For a smaller object, just ignore the high bits. */
7971 /* For a smaller object, just ignore the high bits. */
7307 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7308 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7309 - bitwidth)));
7310
7311 case NOT:
7312 return num_sign_bit_copies (XEXP (x, 0), mode);
7313
7314 case ROTATE: case ROTATERT:
7315 /* If we are rotating left by a number of bits less than the number
7316 of sign bit copies, we can just subtract that amount from the
7317 number. */
7318 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7319 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
7320 {
7321 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7322 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
7323 : bitwidth - INTVAL (XEXP (x, 1))));
7324 }
7325 break;
7326
7327 case NEG:
7328 /* In general, this subtracts one sign bit copy. But if the value
7329 is known to be positive, the number of sign bit copies is the
7330 same as that of the input. Finally, if the input has just one bit
7331 that might be nonzero, all the bits are copies of the sign bit. */
7972 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7973 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7974 - bitwidth)));
7975
7976 case NOT:
7977 return num_sign_bit_copies (XEXP (x, 0), mode);
7978
7979 case ROTATE: case ROTATERT:
7980 /* If we are rotating left by a number of bits less than the number
7981 of sign bit copies, we can just subtract that amount from the
7982 number. */
7983 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7984 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
7985 {
7986 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7987 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
7988 : bitwidth - INTVAL (XEXP (x, 1))));
7989 }
7990 break;
7991
7992 case NEG:
7993 /* In general, this subtracts one sign bit copy. But if the value
7994 is known to be positive, the number of sign bit copies is the
7995 same as that of the input. Finally, if the input has just one bit
7996 that might be nonzero, all the bits are copies of the sign bit. */
7997 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7998 if (bitwidth > HOST_BITS_PER_WIDE_INT)
7999 return num0 > 1 ? num0 - 1 : 1;
8000
7332 nonzero = nonzero_bits (XEXP (x, 0), mode);
7333 if (nonzero == 1)
7334 return bitwidth;
7335
8001 nonzero = nonzero_bits (XEXP (x, 0), mode);
8002 if (nonzero == 1)
8003 return bitwidth;
8004
7336 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7337 if (num0 > 1
8005 if (num0 > 1
7338 && bitwidth <= HOST_BITS_PER_WIDE_INT
7339 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
7340 num0--;
7341
7342 return num0;
7343
7344 case IOR: case AND: case XOR:
7345 case SMIN: case SMAX: case UMIN: case UMAX:
7346 /* Logical operations will preserve the number of sign-bit copies.
7347 MIN and MAX operations always return one of the operands. */
7348 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7349 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7350 return MIN (num0, num1);
7351
7352 case PLUS: case MINUS:
7353 /* For addition and subtraction, we can have a 1-bit carry. However,
7354 if we are subtracting 1 from a positive number, there will not
7355 be such a carry. Furthermore, if the positive number is known to
7356 be 0 or 1, we know the result is either -1 or 0. */
7357
7358 if (code == PLUS && XEXP (x, 1) == constm1_rtx
7359 && bitwidth <= HOST_BITS_PER_WIDE_INT)
7360 {
7361 nonzero = nonzero_bits (XEXP (x, 0), mode);
7362 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
7363 return (nonzero == 1 || nonzero == 0 ? bitwidth
7364 : bitwidth - floor_log2 (nonzero) - 1);
7365 }
7366
7367 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7368 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7369 return MAX (1, MIN (num0, num1) - 1);
7370
7371 case MULT:
7372 /* The number of bits of the product is the sum of the number of
7373 bits of both terms. However, unless one of the terms if known
7374 to be positive, we must allow for an additional bit since negating
7375 a negative number can remove one sign bit copy. */
7376
7377 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7378 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7379
7380 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
7381 if (result > 0
8006 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
8007 num0--;
8008
8009 return num0;
8010
8011 case IOR: case AND: case XOR:
8012 case SMIN: case SMAX: case UMIN: case UMAX:
8013 /* Logical operations will preserve the number of sign-bit copies.
8014 MIN and MAX operations always return one of the operands. */
8015 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8016 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8017 return MIN (num0, num1);
8018
8019 case PLUS: case MINUS:
8020 /* For addition and subtraction, we can have a 1-bit carry. However,
8021 if we are subtracting 1 from a positive number, there will not
8022 be such a carry. Furthermore, if the positive number is known to
8023 be 0 or 1, we know the result is either -1 or 0. */
8024
8025 if (code == PLUS && XEXP (x, 1) == constm1_rtx
8026 && bitwidth <= HOST_BITS_PER_WIDE_INT)
8027 {
8028 nonzero = nonzero_bits (XEXP (x, 0), mode);
8029 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8030 return (nonzero == 1 || nonzero == 0 ? bitwidth
8031 : bitwidth - floor_log2 (nonzero) - 1);
8032 }
8033
8034 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8035 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8036 return MAX (1, MIN (num0, num1) - 1);
8037
8038 case MULT:
8039 /* The number of bits of the product is the sum of the number of
8040 bits of both terms. However, unless one of the terms if known
8041 to be positive, we must allow for an additional bit since negating
8042 a negative number can remove one sign bit copy. */
8043
8044 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8045 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8046
8047 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8048 if (result > 0
7382 && bitwidth <= HOST_BITS_PER_WIDE_INT
7383 && ((nonzero_bits (XEXP (x, 0), mode)
7384 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7385 && (nonzero_bits (XEXP (x, 1), mode)
7386 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
8049 && (bitwidth > HOST_BITS_PER_WIDE_INT
8050 || (((nonzero_bits (XEXP (x, 0), mode)
8051 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8052 && ((nonzero_bits (XEXP (x, 1), mode)
8053 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
7387 result--;
7388
7389 return MAX (1, result);
7390
7391 case UDIV:
8054 result--;
8055
8056 return MAX (1, result);
8057
8058 case UDIV:
7392 /* The result must be <= the first operand. */
7393 return num_sign_bit_copies (XEXP (x, 0), mode);
7394
8059 /* The result must be <= the first operand. If the first operand
8060 has the high bit set, we know nothing about the number of sign
8061 bit copies. */
8062 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8063 return 1;
8064 else if ((nonzero_bits (XEXP (x, 0), mode)
8065 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8066 return 1;
8067 else
8068 return num_sign_bit_copies (XEXP (x, 0), mode);
8069
7395 case UMOD:
7396 /* The result must be <= the scond operand. */
7397 return num_sign_bit_copies (XEXP (x, 1), mode);
7398
7399 case DIV:
7400 /* Similar to unsigned division, except that we have to worry about
7401 the case where the divisor is negative, in which case we have
7402 to add 1. */
7403 result = num_sign_bit_copies (XEXP (x, 0), mode);
7404 if (result > 1
8070 case UMOD:
8071 /* The result must be <= the scond operand. */
8072 return num_sign_bit_copies (XEXP (x, 1), mode);
8073
8074 case DIV:
8075 /* Similar to unsigned division, except that we have to worry about
8076 the case where the divisor is negative, in which case we have
8077 to add 1. */
8078 result = num_sign_bit_copies (XEXP (x, 0), mode);
8079 if (result > 1
7405 && bitwidth <= HOST_BITS_PER_WIDE_INT
7406 && (nonzero_bits (XEXP (x, 1), mode)
7407 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7408 result --;
8080 && (bitwidth > HOST_BITS_PER_WIDE_INT
8081 || (nonzero_bits (XEXP (x, 1), mode)
8082 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8083 result--;
7409
7410 return result;
7411
7412 case MOD:
7413 result = num_sign_bit_copies (XEXP (x, 1), mode);
7414 if (result > 1
8084
8085 return result;
8086
8087 case MOD:
8088 result = num_sign_bit_copies (XEXP (x, 1), mode);
8089 if (result > 1
7415 && bitwidth <= HOST_BITS_PER_WIDE_INT
7416 && (nonzero_bits (XEXP (x, 1), mode)
7417 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7418 result --;
8090 && (bitwidth > HOST_BITS_PER_WIDE_INT
8091 || (nonzero_bits (XEXP (x, 1), mode)
8092 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8093 result--;
7419
7420 return result;
7421
7422 case ASHIFTRT:
7423 /* Shifts by a constant add to the number of bits equal to the
7424 sign bit. */
7425 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7426 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7427 && INTVAL (XEXP (x, 1)) > 0)
7428 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
7429
7430 return num0;
7431
7432 case ASHIFT:
7433 /* Left shifts destroy copies. */
7434 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7435 || INTVAL (XEXP (x, 1)) < 0
7436 || INTVAL (XEXP (x, 1)) >= bitwidth)
7437 return 1;
7438
7439 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7440 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
7441
7442 case IF_THEN_ELSE:
7443 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
7444 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
7445 return MIN (num0, num1);
7446
8094
8095 return result;
8096
8097 case ASHIFTRT:
8098 /* Shifts by a constant add to the number of bits equal to the
8099 sign bit. */
8100 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8101 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8102 && INTVAL (XEXP (x, 1)) > 0)
8103 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
8104
8105 return num0;
8106
8107 case ASHIFT:
8108 /* Left shifts destroy copies. */
8109 if (GET_CODE (XEXP (x, 1)) != CONST_INT
8110 || INTVAL (XEXP (x, 1)) < 0
8111 || INTVAL (XEXP (x, 1)) >= bitwidth)
8112 return 1;
8113
8114 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8115 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8116
8117 case IF_THEN_ELSE:
8118 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8119 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8120 return MIN (num0, num1);
8121
7447#if STORE_FLAG_VALUE == -1
7448 case EQ: case NE: case GE: case GT: case LE: case LT:
7449 case GEU: case GTU: case LEU: case LTU:
8122 case EQ: case NE: case GE: case GT: case LE: case LT:
8123 case GEU: case GTU: case LEU: case LTU:
7450 return bitwidth;
7451#endif
8124 if (STORE_FLAG_VALUE == -1)
8125 return bitwidth;
8126 break;
8127
8128 default:
8129 break;
7452 }
7453
7454 /* If we haven't been able to figure it out by one of the above rules,
7455 see if some of the high-order bits are known to be zero. If so,
7456 count those bits and return one less than that amount. If we can't
7457 safely compute the mask for this mode, always return BITWIDTH. */
7458
7459 if (bitwidth > HOST_BITS_PER_WIDE_INT)
7460 return 1;
7461
7462 nonzero = nonzero_bits (x, mode);
7463 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
7464 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
7465}
7466
7467/* Return the number of "extended" bits there are in X, when interpreted
7468 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
7469 unsigned quantities, this is the number of high-order zero bits.
7470 For signed quantities, this is the number of copies of the sign bit
7471 minus 1. In both case, this function returns the number of "spare"
7472 bits. For example, if two quantities for which this function returns
7473 at least 1 are added, the addition is known not to overflow.
7474
7475 This function will always return 0 unless called during combine, which
7476 implies that it must be called from a define_split. */
7477
7478int
7479extended_count (x, mode, unsignedp)
7480 rtx x;
7481 enum machine_mode mode;
7482 int unsignedp;
7483{
7484 if (nonzero_sign_valid == 0)
7485 return 0;
7486
7487 return (unsignedp
7488 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7489 && (GET_MODE_BITSIZE (mode) - 1
7490 - floor_log2 (nonzero_bits (x, mode))))
7491 : num_sign_bit_copies (x, mode) - 1);
7492}
7493
7494/* This function is called from `simplify_shift_const' to merge two
7495 outer operations. Specifically, we have already found that we need
7496 to perform operation *POP0 with constant *PCONST0 at the outermost
7497 position. We would now like to also perform OP1 with constant CONST1
7498 (with *POP0 being done last).
7499
7500 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
7501 the resulting operation. *PCOMP_P is set to 1 if we would need to
7502 complement the innermost operand, otherwise it is unchanged.
7503
7504 MODE is the mode in which the operation will be done. No bits outside
7505 the width of this mode matter. It is assumed that the width of this mode
7506 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
7507
7508 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
7509 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
7510 result is simply *PCONST0.
7511
7512 If the resulting operation cannot be expressed as one operation, we
7513 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
7514
7515static int
7516merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
7517 enum rtx_code *pop0;
7518 HOST_WIDE_INT *pconst0;
7519 enum rtx_code op1;
7520 HOST_WIDE_INT const1;
7521 enum machine_mode mode;
7522 int *pcomp_p;
7523{
7524 enum rtx_code op0 = *pop0;
7525 HOST_WIDE_INT const0 = *pconst0;
7526 int width = GET_MODE_BITSIZE (mode);
7527
7528 const0 &= GET_MODE_MASK (mode);
7529 const1 &= GET_MODE_MASK (mode);
7530
7531 /* If OP0 is an AND, clear unimportant bits in CONST1. */
7532 if (op0 == AND)
7533 const1 &= const0;
7534
7535 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
7536 if OP0 is SET. */
7537
7538 if (op1 == NIL || op0 == SET)
7539 return 1;
7540
7541 else if (op0 == NIL)
7542 op0 = op1, const0 = const1;
7543
7544 else if (op0 == op1)
7545 {
7546 switch (op0)
7547 {
7548 case AND:
7549 const0 &= const1;
7550 break;
7551 case IOR:
7552 const0 |= const1;
7553 break;
7554 case XOR:
7555 const0 ^= const1;
7556 break;
7557 case PLUS:
7558 const0 += const1;
7559 break;
7560 case NEG:
7561 op0 = NIL;
7562 break;
8130 }
8131
8132 /* If we haven't been able to figure it out by one of the above rules,
8133 see if some of the high-order bits are known to be zero. If so,
8134 count those bits and return one less than that amount. If we can't
8135 safely compute the mask for this mode, always return BITWIDTH. */
8136
8137 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8138 return 1;
8139
8140 nonzero = nonzero_bits (x, mode);
8141 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
8142 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
8143}
8144
8145/* Return the number of "extended" bits there are in X, when interpreted
8146 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8147 unsigned quantities, this is the number of high-order zero bits.
8148 For signed quantities, this is the number of copies of the sign bit
8149 minus 1. In both case, this function returns the number of "spare"
8150 bits. For example, if two quantities for which this function returns
8151 at least 1 are added, the addition is known not to overflow.
8152
8153 This function will always return 0 unless called during combine, which
8154 implies that it must be called from a define_split. */
8155
8156int
8157extended_count (x, mode, unsignedp)
8158 rtx x;
8159 enum machine_mode mode;
8160 int unsignedp;
8161{
8162 if (nonzero_sign_valid == 0)
8163 return 0;
8164
8165 return (unsignedp
8166 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8167 && (GET_MODE_BITSIZE (mode) - 1
8168 - floor_log2 (nonzero_bits (x, mode))))
8169 : num_sign_bit_copies (x, mode) - 1);
8170}
8171
8172/* This function is called from `simplify_shift_const' to merge two
8173 outer operations. Specifically, we have already found that we need
8174 to perform operation *POP0 with constant *PCONST0 at the outermost
8175 position. We would now like to also perform OP1 with constant CONST1
8176 (with *POP0 being done last).
8177
8178 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8179 the resulting operation. *PCOMP_P is set to 1 if we would need to
8180 complement the innermost operand, otherwise it is unchanged.
8181
8182 MODE is the mode in which the operation will be done. No bits outside
8183 the width of this mode matter. It is assumed that the width of this mode
8184 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8185
8186 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
8187 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8188 result is simply *PCONST0.
8189
8190 If the resulting operation cannot be expressed as one operation, we
8191 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8192
8193static int
8194merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8195 enum rtx_code *pop0;
8196 HOST_WIDE_INT *pconst0;
8197 enum rtx_code op1;
8198 HOST_WIDE_INT const1;
8199 enum machine_mode mode;
8200 int *pcomp_p;
8201{
8202 enum rtx_code op0 = *pop0;
8203 HOST_WIDE_INT const0 = *pconst0;
8204 int width = GET_MODE_BITSIZE (mode);
8205
8206 const0 &= GET_MODE_MASK (mode);
8207 const1 &= GET_MODE_MASK (mode);
8208
8209 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8210 if (op0 == AND)
8211 const1 &= const0;
8212
8213 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
8214 if OP0 is SET. */
8215
8216 if (op1 == NIL || op0 == SET)
8217 return 1;
8218
8219 else if (op0 == NIL)
8220 op0 = op1, const0 = const1;
8221
8222 else if (op0 == op1)
8223 {
8224 switch (op0)
8225 {
8226 case AND:
8227 const0 &= const1;
8228 break;
8229 case IOR:
8230 const0 |= const1;
8231 break;
8232 case XOR:
8233 const0 ^= const1;
8234 break;
8235 case PLUS:
8236 const0 += const1;
8237 break;
8238 case NEG:
8239 op0 = NIL;
8240 break;
8241 default:
8242 break;
7563 }
7564 }
7565
7566 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
7567 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
7568 return 0;
7569
7570 /* If the two constants aren't the same, we can't do anything. The
7571 remaining six cases can all be done. */
7572 else if (const0 != const1)
7573 return 0;
7574
7575 else
7576 switch (op0)
7577 {
7578 case IOR:
7579 if (op1 == AND)
7580 /* (a & b) | b == b */
7581 op0 = SET;
7582 else /* op1 == XOR */
7583 /* (a ^ b) | b == a | b */
8243 }
8244 }
8245
8246 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8247 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8248 return 0;
8249
8250 /* If the two constants aren't the same, we can't do anything. The
8251 remaining six cases can all be done. */
8252 else if (const0 != const1)
8253 return 0;
8254
8255 else
8256 switch (op0)
8257 {
8258 case IOR:
8259 if (op1 == AND)
8260 /* (a & b) | b == b */
8261 op0 = SET;
8262 else /* op1 == XOR */
8263 /* (a ^ b) | b == a | b */
7584 ;
8264 {;}
7585 break;
7586
7587 case XOR:
7588 if (op1 == AND)
7589 /* (a & b) ^ b == (~a) & b */
7590 op0 = AND, *pcomp_p = 1;
7591 else /* op1 == IOR */
7592 /* (a | b) ^ b == a & ~b */
7593 op0 = AND, *pconst0 = ~ const0;
7594 break;
7595
7596 case AND:
7597 if (op1 == IOR)
7598 /* (a | b) & b == b */
7599 op0 = SET;
7600 else /* op1 == XOR */
7601 /* (a ^ b) & b) == (~a) & b */
7602 *pcomp_p = 1;
7603 break;
8265 break;
8266
8267 case XOR:
8268 if (op1 == AND)
8269 /* (a & b) ^ b == (~a) & b */
8270 op0 = AND, *pcomp_p = 1;
8271 else /* op1 == IOR */
8272 /* (a | b) ^ b == a & ~b */
8273 op0 = AND, *pconst0 = ~ const0;
8274 break;
8275
8276 case AND:
8277 if (op1 == IOR)
8278 /* (a | b) & b == b */
8279 op0 = SET;
8280 else /* op1 == XOR */
8281 /* (a ^ b) & b) == (~a) & b */
8282 *pcomp_p = 1;
8283 break;
8284 default:
8285 break;
7604 }
7605
7606 /* Check for NO-OP cases. */
7607 const0 &= GET_MODE_MASK (mode);
7608 if (const0 == 0
7609 && (op0 == IOR || op0 == XOR || op0 == PLUS))
7610 op0 = NIL;
7611 else if (const0 == 0 && op0 == AND)
7612 op0 = SET;
7613 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
7614 op0 = NIL;
7615
7616 /* If this would be an entire word for the target, but is not for
7617 the host, then sign-extend on the host so that the number will look
7618 the same way on the host that it would on the target.
7619
7620 For example, when building a 64 bit alpha hosted 32 bit sparc
7621 targeted compiler, then we want the 32 bit unsigned value -1 to be
7622 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
7623 The later confuses the sparc backend. */
7624
7625 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
7626 && (const0 & ((HOST_WIDE_INT) 1 << (width - 1))))
7627 const0 |= ((HOST_WIDE_INT) (-1) << width);
7628
7629 *pop0 = op0;
7630 *pconst0 = const0;
7631
7632 return 1;
7633}
7634
7635/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
7636 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
7637 that we started with.
7638
7639 The shift is normally computed in the widest mode we find in VAROP, as
7640 long as it isn't a different number of words than RESULT_MODE. Exceptions
7641 are ASHIFTRT and ROTATE, which are always done in their original mode, */
7642
7643static rtx
7644simplify_shift_const (x, code, result_mode, varop, count)
7645 rtx x;
7646 enum rtx_code code;
7647 enum machine_mode result_mode;
7648 rtx varop;
7649 int count;
7650{
7651 enum rtx_code orig_code = code;
7652 int orig_count = count;
7653 enum machine_mode mode = result_mode;
7654 enum machine_mode shift_mode, tmode;
7655 int mode_words
7656 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
7657 /* We form (outer_op (code varop count) (outer_const)). */
7658 enum rtx_code outer_op = NIL;
7659 HOST_WIDE_INT outer_const = 0;
7660 rtx const_rtx;
7661 int complement_p = 0;
7662 rtx new;
7663
7664 /* If we were given an invalid count, don't do anything except exactly
7665 what was requested. */
7666
7667 if (count < 0 || count > GET_MODE_BITSIZE (mode))
7668 {
7669 if (x)
7670 return x;
7671
8286 }
8287
8288 /* Check for NO-OP cases. */
8289 const0 &= GET_MODE_MASK (mode);
8290 if (const0 == 0
8291 && (op0 == IOR || op0 == XOR || op0 == PLUS))
8292 op0 = NIL;
8293 else if (const0 == 0 && op0 == AND)
8294 op0 = SET;
8295 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
8296 op0 = NIL;
8297
8298 /* If this would be an entire word for the target, but is not for
8299 the host, then sign-extend on the host so that the number will look
8300 the same way on the host that it would on the target.
8301
8302 For example, when building a 64 bit alpha hosted 32 bit sparc
8303 targeted compiler, then we want the 32 bit unsigned value -1 to be
8304 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
8305 The later confuses the sparc backend. */
8306
8307 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
8308 && (const0 & ((HOST_WIDE_INT) 1 << (width - 1))))
8309 const0 |= ((HOST_WIDE_INT) (-1) << width);
8310
8311 *pop0 = op0;
8312 *pconst0 = const0;
8313
8314 return 1;
8315}
8316
8317/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8318 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
8319 that we started with.
8320
8321 The shift is normally computed in the widest mode we find in VAROP, as
8322 long as it isn't a different number of words than RESULT_MODE. Exceptions
8323 are ASHIFTRT and ROTATE, which are always done in their original mode, */
8324
8325static rtx
8326simplify_shift_const (x, code, result_mode, varop, count)
8327 rtx x;
8328 enum rtx_code code;
8329 enum machine_mode result_mode;
8330 rtx varop;
8331 int count;
8332{
8333 enum rtx_code orig_code = code;
8334 int orig_count = count;
8335 enum machine_mode mode = result_mode;
8336 enum machine_mode shift_mode, tmode;
8337 int mode_words
8338 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8339 /* We form (outer_op (code varop count) (outer_const)). */
8340 enum rtx_code outer_op = NIL;
8341 HOST_WIDE_INT outer_const = 0;
8342 rtx const_rtx;
8343 int complement_p = 0;
8344 rtx new;
8345
8346 /* If we were given an invalid count, don't do anything except exactly
8347 what was requested. */
8348
8349 if (count < 0 || count > GET_MODE_BITSIZE (mode))
8350 {
8351 if (x)
8352 return x;
8353
7672 return gen_rtx (code, mode, varop, GEN_INT (count));
8354 return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (count));
7673 }
7674
7675 /* Unless one of the branches of the `if' in this loop does a `continue',
7676 we will `break' the loop after the `if'. */
7677
7678 while (count != 0)
7679 {
7680 /* If we have an operand of (clobber (const_int 0)), just return that
7681 value. */
7682 if (GET_CODE (varop) == CLOBBER)
7683 return varop;
7684
7685 /* If we discovered we had to complement VAROP, leave. Making a NOT
7686 here would cause an infinite loop. */
7687 if (complement_p)
7688 break;
7689
7690 /* Convert ROTATERT to ROTATE. */
7691 if (code == ROTATERT)
7692 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
7693
7694 /* We need to determine what mode we will do the shift in. If the
7695 shift is a right shift or a ROTATE, we must always do it in the mode
7696 it was originally done in. Otherwise, we can do it in MODE, the
8355 }
8356
8357 /* Unless one of the branches of the `if' in this loop does a `continue',
8358 we will `break' the loop after the `if'. */
8359
8360 while (count != 0)
8361 {
8362 /* If we have an operand of (clobber (const_int 0)), just return that
8363 value. */
8364 if (GET_CODE (varop) == CLOBBER)
8365 return varop;
8366
8367 /* If we discovered we had to complement VAROP, leave. Making a NOT
8368 here would cause an infinite loop. */
8369 if (complement_p)
8370 break;
8371
8372 /* Convert ROTATERT to ROTATE. */
8373 if (code == ROTATERT)
8374 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8375
8376 /* We need to determine what mode we will do the shift in. If the
8377 shift is a right shift or a ROTATE, we must always do it in the mode
8378 it was originally done in. Otherwise, we can do it in MODE, the
7697 widest mode encountered. */
8379 widest mode encountered. */
7698 shift_mode
7699 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
7700 ? result_mode : mode);
7701
7702 /* Handle cases where the count is greater than the size of the mode
7703 minus 1. For ASHIFT, use the size minus one as the count (this can
7704 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
7705 take the count modulo the size. For other shifts, the result is
7706 zero.
7707
7708 Since these shifts are being produced by the compiler by combining
7709 multiple operations, each of which are defined, we know what the
7710 result is supposed to be. */
7711
7712 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
7713 {
7714 if (code == ASHIFTRT)
7715 count = GET_MODE_BITSIZE (shift_mode) - 1;
7716 else if (code == ROTATE || code == ROTATERT)
7717 count %= GET_MODE_BITSIZE (shift_mode);
7718 else
7719 {
7720 /* We can't simply return zero because there may be an
7721 outer op. */
7722 varop = const0_rtx;
7723 count = 0;
7724 break;
7725 }
7726 }
7727
7728 /* Negative counts are invalid and should not have been made (a
7729 programmer-specified negative count should have been handled
8380 shift_mode
8381 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8382 ? result_mode : mode);
8383
8384 /* Handle cases where the count is greater than the size of the mode
8385 minus 1. For ASHIFT, use the size minus one as the count (this can
8386 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8387 take the count modulo the size. For other shifts, the result is
8388 zero.
8389
8390 Since these shifts are being produced by the compiler by combining
8391 multiple operations, each of which are defined, we know what the
8392 result is supposed to be. */
8393
8394 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8395 {
8396 if (code == ASHIFTRT)
8397 count = GET_MODE_BITSIZE (shift_mode) - 1;
8398 else if (code == ROTATE || code == ROTATERT)
8399 count %= GET_MODE_BITSIZE (shift_mode);
8400 else
8401 {
8402 /* We can't simply return zero because there may be an
8403 outer op. */
8404 varop = const0_rtx;
8405 count = 0;
8406 break;
8407 }
8408 }
8409
8410 /* Negative counts are invalid and should not have been made (a
8411 programmer-specified negative count should have been handled
7730 above). */
8412 above). */
7731 else if (count < 0)
7732 abort ();
7733
7734 /* An arithmetic right shift of a quantity known to be -1 or 0
7735 is a no-op. */
7736 if (code == ASHIFTRT
7737 && (num_sign_bit_copies (varop, shift_mode)
7738 == GET_MODE_BITSIZE (shift_mode)))
7739 {
7740 count = 0;
7741 break;
7742 }
7743
7744 /* If we are doing an arithmetic right shift and discarding all but
7745 the sign bit copies, this is equivalent to doing a shift by the
7746 bitsize minus one. Convert it into that shift because it will often
7747 allow other simplifications. */
7748
7749 if (code == ASHIFTRT
7750 && (count + num_sign_bit_copies (varop, shift_mode)
7751 >= GET_MODE_BITSIZE (shift_mode)))
7752 count = GET_MODE_BITSIZE (shift_mode) - 1;
7753
7754 /* We simplify the tests below and elsewhere by converting
7755 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
7756 `make_compound_operation' will convert it to a ASHIFTRT for
7757 those machines (such as Vax) that don't have a LSHIFTRT. */
7758 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
7759 && code == ASHIFTRT
7760 && ((nonzero_bits (varop, shift_mode)
7761 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
7762 == 0))
7763 code = LSHIFTRT;
7764
7765 switch (GET_CODE (varop))
7766 {
7767 case SIGN_EXTEND:
7768 case ZERO_EXTEND:
7769 case SIGN_EXTRACT:
7770 case ZERO_EXTRACT:
7771 new = expand_compound_operation (varop);
7772 if (new != varop)
7773 {
7774 varop = new;
7775 continue;
7776 }
7777 break;
7778
7779 case MEM:
7780 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
7781 minus the width of a smaller mode, we can do this with a
7782 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
7783 if ((code == ASHIFTRT || code == LSHIFTRT)
7784 && ! mode_dependent_address_p (XEXP (varop, 0))
7785 && ! MEM_VOLATILE_P (varop)
7786 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7787 MODE_INT, 1)) != BLKmode)
7788 {
7789 if (BYTES_BIG_ENDIAN)
8413 else if (count < 0)
8414 abort ();
8415
8416 /* An arithmetic right shift of a quantity known to be -1 or 0
8417 is a no-op. */
8418 if (code == ASHIFTRT
8419 && (num_sign_bit_copies (varop, shift_mode)
8420 == GET_MODE_BITSIZE (shift_mode)))
8421 {
8422 count = 0;
8423 break;
8424 }
8425
8426 /* If we are doing an arithmetic right shift and discarding all but
8427 the sign bit copies, this is equivalent to doing a shift by the
8428 bitsize minus one. Convert it into that shift because it will often
8429 allow other simplifications. */
8430
8431 if (code == ASHIFTRT
8432 && (count + num_sign_bit_copies (varop, shift_mode)
8433 >= GET_MODE_BITSIZE (shift_mode)))
8434 count = GET_MODE_BITSIZE (shift_mode) - 1;
8435
8436 /* We simplify the tests below and elsewhere by converting
8437 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8438 `make_compound_operation' will convert it to a ASHIFTRT for
8439 those machines (such as Vax) that don't have a LSHIFTRT. */
8440 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8441 && code == ASHIFTRT
8442 && ((nonzero_bits (varop, shift_mode)
8443 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8444 == 0))
8445 code = LSHIFTRT;
8446
8447 switch (GET_CODE (varop))
8448 {
8449 case SIGN_EXTEND:
8450 case ZERO_EXTEND:
8451 case SIGN_EXTRACT:
8452 case ZERO_EXTRACT:
8453 new = expand_compound_operation (varop);
8454 if (new != varop)
8455 {
8456 varop = new;
8457 continue;
8458 }
8459 break;
8460
8461 case MEM:
8462 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8463 minus the width of a smaller mode, we can do this with a
8464 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
8465 if ((code == ASHIFTRT || code == LSHIFTRT)
8466 && ! mode_dependent_address_p (XEXP (varop, 0))
8467 && ! MEM_VOLATILE_P (varop)
8468 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8469 MODE_INT, 1)) != BLKmode)
8470 {
8471 if (BYTES_BIG_ENDIAN)
7790 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
8472 new = gen_rtx_MEM (tmode, XEXP (varop, 0));
7791 else
8473 else
7792 new = gen_rtx (MEM, tmode,
7793 plus_constant (XEXP (varop, 0),
7794 count / BITS_PER_UNIT));
8474 new = gen_rtx_MEM (tmode,
8475 plus_constant (XEXP (varop, 0),
8476 count / BITS_PER_UNIT));
7795 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
7796 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
7797 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
7798 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7799 : ZERO_EXTEND, mode, new);
7800 count = 0;
7801 continue;
7802 }
7803 break;
7804
7805 case USE:
7806 /* Similar to the case above, except that we can only do this if
7807 the resulting mode is the same as that of the underlying
7808 MEM and adjust the address depending on the *bits* endianness
7809 because of the way that bit-field extract insns are defined. */
7810 if ((code == ASHIFTRT || code == LSHIFTRT)
7811 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7812 MODE_INT, 1)) != BLKmode
7813 && tmode == GET_MODE (XEXP (varop, 0)))
7814 {
7815 if (BITS_BIG_ENDIAN)
7816 new = XEXP (varop, 0);
7817 else
7818 {
7819 new = copy_rtx (XEXP (varop, 0));
7820 SUBST (XEXP (new, 0),
7821 plus_constant (XEXP (new, 0),
7822 count / BITS_PER_UNIT));
7823 }
7824
7825 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7826 : ZERO_EXTEND, mode, new);
7827 count = 0;
7828 continue;
7829 }
7830 break;
7831
7832 case SUBREG:
7833 /* If VAROP is a SUBREG, strip it as long as the inner operand has
7834 the same number of words as what we've seen so far. Then store
7835 the widest mode in MODE. */
7836 if (subreg_lowpart_p (varop)
7837 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7838 > GET_MODE_SIZE (GET_MODE (varop)))
7839 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7840 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
7841 == mode_words))
7842 {
7843 varop = SUBREG_REG (varop);
7844 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
7845 mode = GET_MODE (varop);
7846 continue;
7847 }
7848 break;
7849
7850 case MULT:
7851 /* Some machines use MULT instead of ASHIFT because MULT
7852 is cheaper. But it is still better on those machines to
7853 merge two shifts into one. */
7854 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7855 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7856 {
7857 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
7858 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
7859 continue;
7860 }
7861 break;
7862
7863 case UDIV:
7864 /* Similar, for when divides are cheaper. */
7865 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7866 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7867 {
7868 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
7869 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
7870 continue;
7871 }
7872 break;
7873
7874 case ASHIFTRT:
7875 /* If we are extracting just the sign bit of an arithmetic right
7876 shift, that shift is not needed. */
7877 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
7878 {
7879 varop = XEXP (varop, 0);
7880 continue;
7881 }
7882
8477 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
8478 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
8479 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
8480 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8481 : ZERO_EXTEND, mode, new);
8482 count = 0;
8483 continue;
8484 }
8485 break;
8486
8487 case USE:
8488 /* Similar to the case above, except that we can only do this if
8489 the resulting mode is the same as that of the underlying
8490 MEM and adjust the address depending on the *bits* endianness
8491 because of the way that bit-field extract insns are defined. */
8492 if ((code == ASHIFTRT || code == LSHIFTRT)
8493 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8494 MODE_INT, 1)) != BLKmode
8495 && tmode == GET_MODE (XEXP (varop, 0)))
8496 {
8497 if (BITS_BIG_ENDIAN)
8498 new = XEXP (varop, 0);
8499 else
8500 {
8501 new = copy_rtx (XEXP (varop, 0));
8502 SUBST (XEXP (new, 0),
8503 plus_constant (XEXP (new, 0),
8504 count / BITS_PER_UNIT));
8505 }
8506
8507 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8508 : ZERO_EXTEND, mode, new);
8509 count = 0;
8510 continue;
8511 }
8512 break;
8513
8514 case SUBREG:
8515 /* If VAROP is a SUBREG, strip it as long as the inner operand has
8516 the same number of words as what we've seen so far. Then store
8517 the widest mode in MODE. */
8518 if (subreg_lowpart_p (varop)
8519 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8520 > GET_MODE_SIZE (GET_MODE (varop)))
8521 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8522 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
8523 == mode_words))
8524 {
8525 varop = SUBREG_REG (varop);
8526 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
8527 mode = GET_MODE (varop);
8528 continue;
8529 }
8530 break;
8531
8532 case MULT:
8533 /* Some machines use MULT instead of ASHIFT because MULT
8534 is cheaper. But it is still better on those machines to
8535 merge two shifts into one. */
8536 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8537 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8538 {
8539 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
8540 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
8541 continue;
8542 }
8543 break;
8544
8545 case UDIV:
8546 /* Similar, for when divides are cheaper. */
8547 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8548 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8549 {
8550 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
8551 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
8552 continue;
8553 }
8554 break;
8555
8556 case ASHIFTRT:
8557 /* If we are extracting just the sign bit of an arithmetic right
8558 shift, that shift is not needed. */
8559 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
8560 {
8561 varop = XEXP (varop, 0);
8562 continue;
8563 }
8564
7883 /* ... fall through ... */
8565 /* ... fall through ... */
7884
7885 case LSHIFTRT:
7886 case ASHIFT:
7887 case ROTATE:
7888 /* Here we have two nested shifts. The result is usually the
7889 AND of a new shift with a mask. We compute the result below. */
7890 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7891 && INTVAL (XEXP (varop, 1)) >= 0
7892 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
7893 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7894 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7895 {
7896 enum rtx_code first_code = GET_CODE (varop);
7897 int first_count = INTVAL (XEXP (varop, 1));
7898 unsigned HOST_WIDE_INT mask;
7899 rtx mask_rtx;
7900
7901 /* We have one common special case. We can't do any merging if
7902 the inner code is an ASHIFTRT of a smaller mode. However, if
7903 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
7904 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
7905 we can convert it to
7906 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
7907 This simplifies certain SIGN_EXTEND operations. */
7908 if (code == ASHIFT && first_code == ASHIFTRT
7909 && (GET_MODE_BITSIZE (result_mode)
7910 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
7911 {
7912 /* C3 has the low-order C1 bits zero. */
7913
7914 mask = (GET_MODE_MASK (mode)
7915 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
7916
7917 varop = simplify_and_const_int (NULL_RTX, result_mode,
7918 XEXP (varop, 0), mask);
7919 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
7920 varop, count);
7921 count = first_count;
7922 code = ASHIFTRT;
7923 continue;
7924 }
7925
7926 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
7927 than C1 high-order bits equal to the sign bit, we can convert
7928 this to either an ASHIFT or a ASHIFTRT depending on the
7929 two counts.
7930
7931 We cannot do this if VAROP's mode is not SHIFT_MODE. */
7932
7933 if (code == ASHIFTRT && first_code == ASHIFT
7934 && GET_MODE (varop) == shift_mode
7935 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
7936 > first_count))
7937 {
7938 count -= first_count;
7939 if (count < 0)
7940 count = - count, code = ASHIFT;
7941 varop = XEXP (varop, 0);
7942 continue;
7943 }
7944
7945 /* There are some cases we can't do. If CODE is ASHIFTRT,
7946 we can only do this if FIRST_CODE is also ASHIFTRT.
7947
7948 We can't do the case when CODE is ROTATE and FIRST_CODE is
7949 ASHIFTRT.
7950
7951 If the mode of this shift is not the mode of the outer shift,
7952 we can't do this if either shift is a right shift or ROTATE.
7953
7954 Finally, we can't do any of these if the mode is too wide
7955 unless the codes are the same.
7956
7957 Handle the case where the shift codes are the same
7958 first. */
7959
7960 if (code == first_code)
7961 {
7962 if (GET_MODE (varop) != result_mode
7963 && (code == ASHIFTRT || code == LSHIFTRT
7964 || code == ROTATE))
7965 break;
7966
7967 count += first_count;
7968 varop = XEXP (varop, 0);
7969 continue;
7970 }
7971
7972 if (code == ASHIFTRT
7973 || (code == ROTATE && first_code == ASHIFTRT)
7974 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
7975 || (GET_MODE (varop) != result_mode
7976 && (first_code == ASHIFTRT || first_code == LSHIFTRT
7977 || first_code == ROTATE
7978 || code == ROTATE)))
7979 break;
7980
7981 /* To compute the mask to apply after the shift, shift the
7982 nonzero bits of the inner shift the same way the
7983 outer shift will. */
7984
7985 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
7986
7987 mask_rtx
7988 = simplify_binary_operation (code, result_mode, mask_rtx,
7989 GEN_INT (count));
7990
7991 /* Give up if we can't compute an outer operation to use. */
7992 if (mask_rtx == 0
7993 || GET_CODE (mask_rtx) != CONST_INT
7994 || ! merge_outer_ops (&outer_op, &outer_const, AND,
7995 INTVAL (mask_rtx),
7996 result_mode, &complement_p))
7997 break;
7998
7999 /* If the shifts are in the same direction, we add the
8000 counts. Otherwise, we subtract them. */
8001 if ((code == ASHIFTRT || code == LSHIFTRT)
8002 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
8003 count += first_count;
8004 else
8005 count -= first_count;
8006
8007 /* If COUNT is positive, the new shift is usually CODE,
8008 except for the two exceptions below, in which case it is
8009 FIRST_CODE. If the count is negative, FIRST_CODE should
8010 always be used */
8011 if (count > 0
8012 && ((first_code == ROTATE && code == ASHIFT)
8013 || (first_code == ASHIFTRT && code == LSHIFTRT)))
8014 code = first_code;
8015 else if (count < 0)
8016 code = first_code, count = - count;
8017
8018 varop = XEXP (varop, 0);
8019 continue;
8020 }
8021
8022 /* If we have (A << B << C) for any shift, we can convert this to
8023 (A << C << B). This wins if A is a constant. Only try this if
8024 B is not a constant. */
8025
8026 else if (GET_CODE (varop) == code
8027 && GET_CODE (XEXP (varop, 1)) != CONST_INT
8028 && 0 != (new
8029 = simplify_binary_operation (code, mode,
8030 XEXP (varop, 0),
8031 GEN_INT (count))))
8032 {
8033 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
8034 count = 0;
8035 continue;
8036 }
8037 break;
8038
8039 case NOT:
8040 /* Make this fit the case below. */
8041 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
8042 GEN_INT (GET_MODE_MASK (mode)));
8043 continue;
8044
8045 case IOR:
8046 case AND:
8047 case XOR:
8048 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
8049 with C the size of VAROP - 1 and the shift is logical if
8050 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8051 we have an (le X 0) operation. If we have an arithmetic shift
8052 and STORE_FLAG_VALUE is 1 or we have a logical shift with
8053 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
8054
8055 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
8056 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
8057 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8058 && (code == LSHIFTRT || code == ASHIFTRT)
8059 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8060 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8061 {
8062 count = 0;
8063 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
8064 const0_rtx);
8065
8066 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8067 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8068
8069 continue;
8070 }
8071
8072 /* If we have (shift (logical)), move the logical to the outside
8073 to allow it to possibly combine with another logical and the
8074 shift to combine with another shift. This also canonicalizes to
8075 what a ZERO_EXTRACT looks like. Also, some machines have
8076 (and (shift)) insns. */
8077
8078 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8079 && (new = simplify_binary_operation (code, result_mode,
8080 XEXP (varop, 1),
8081 GEN_INT (count))) != 0
8082 && GET_CODE(new) == CONST_INT
8083 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
8084 INTVAL (new), result_mode, &complement_p))
8085 {
8086 varop = XEXP (varop, 0);
8087 continue;
8088 }
8089
8090 /* If we can't do that, try to simplify the shift in each arm of the
8091 logical expression, make a new logical expression, and apply
8092 the inverse distributive law. */
8093 {
8094 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8095 XEXP (varop, 0), count);
8096 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8097 XEXP (varop, 1), count);
8098
8099 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
8100 varop = apply_distributive_law (varop);
8101
8102 count = 0;
8103 }
8104 break;
8105
8106 case EQ:
8107 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
8108 says that the sign bit can be tested, FOO has mode MODE, C is
8109 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
8110 that may be nonzero. */
8111 if (code == LSHIFTRT
8112 && XEXP (varop, 1) == const0_rtx
8113 && GET_MODE (XEXP (varop, 0)) == result_mode
8114 && count == GET_MODE_BITSIZE (result_mode) - 1
8115 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8116 && ((STORE_FLAG_VALUE
8117 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
8118 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
8119 && merge_outer_ops (&outer_op, &outer_const, XOR,
8120 (HOST_WIDE_INT) 1, result_mode,
8121 &complement_p))
8122 {
8123 varop = XEXP (varop, 0);
8124 count = 0;
8125 continue;
8126 }
8127 break;
8128
8129 case NEG:
8130 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
8131 than the number of bits in the mode is equivalent to A. */
8132 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8133 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
8134 {
8135 varop = XEXP (varop, 0);
8136 count = 0;
8137 continue;
8138 }
8139
8140 /* NEG commutes with ASHIFT since it is multiplication. Move the
8141 NEG outside to allow shifts to combine. */
8142 if (code == ASHIFT
8143 && merge_outer_ops (&outer_op, &outer_const, NEG,
8144 (HOST_WIDE_INT) 0, result_mode,
8145 &complement_p))
8146 {
8147 varop = XEXP (varop, 0);
8148 continue;
8149 }
8150 break;
8151
8152 case PLUS:
8153 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
8154 is one less than the number of bits in the mode is
8155 equivalent to (xor A 1). */
8156 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8157 && XEXP (varop, 1) == constm1_rtx
8158 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
8159 && merge_outer_ops (&outer_op, &outer_const, XOR,
8160 (HOST_WIDE_INT) 1, result_mode,
8161 &complement_p))
8162 {
8163 count = 0;
8164 varop = XEXP (varop, 0);
8165 continue;
8166 }
8167
8168 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
8169 that might be nonzero in BAR are those being shifted out and those
8170 bits are known zero in FOO, we can replace the PLUS with FOO.
8171 Similarly in the other operand order. This code occurs when
8172 we are computing the size of a variable-size array. */
8173
8174 if ((code == ASHIFTRT || code == LSHIFTRT)
8175 && count < HOST_BITS_PER_WIDE_INT
8176 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8177 && (nonzero_bits (XEXP (varop, 1), result_mode)
8178 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
8179 {
8180 varop = XEXP (varop, 0);
8181 continue;
8182 }
8183 else if ((code == ASHIFTRT || code == LSHIFTRT)
8184 && count < HOST_BITS_PER_WIDE_INT
8185 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8186 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8187 >> count)
8188 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8189 & nonzero_bits (XEXP (varop, 1),
8190 result_mode)))
8191 {
8192 varop = XEXP (varop, 1);
8193 continue;
8194 }
8195
8196 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
8197 if (code == ASHIFT
8198 && GET_CODE (XEXP (varop, 1)) == CONST_INT
8199 && (new = simplify_binary_operation (ASHIFT, result_mode,
8200 XEXP (varop, 1),
8201 GEN_INT (count))) != 0
8202 && GET_CODE(new) == CONST_INT
8203 && merge_outer_ops (&outer_op, &outer_const, PLUS,
8204 INTVAL (new), result_mode, &complement_p))
8205 {
8206 varop = XEXP (varop, 0);
8207 continue;
8208 }
8209 break;
8210
8211 case MINUS:
8212 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8213 with C the size of VAROP - 1 and the shift is logical if
8214 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8215 we have a (gt X 0) operation. If the shift is arithmetic with
8216 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8217 we have a (neg (gt X 0)) operation. */
8218
8566
8567 case LSHIFTRT:
8568 case ASHIFT:
8569 case ROTATE:
8570 /* Here we have two nested shifts. The result is usually the
8571 AND of a new shift with a mask. We compute the result below. */
8572 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8573 && INTVAL (XEXP (varop, 1)) >= 0
8574 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
8575 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8576 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8577 {
8578 enum rtx_code first_code = GET_CODE (varop);
8579 int first_count = INTVAL (XEXP (varop, 1));
8580 unsigned HOST_WIDE_INT mask;
8581 rtx mask_rtx;
8582
8583 /* We have one common special case. We can't do any merging if
8584 the inner code is an ASHIFTRT of a smaller mode. However, if
8585 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8586 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8587 we can convert it to
8588 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8589 This simplifies certain SIGN_EXTEND operations. */
8590 if (code == ASHIFT && first_code == ASHIFTRT
8591 && (GET_MODE_BITSIZE (result_mode)
8592 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
8593 {
8594 /* C3 has the low-order C1 bits zero. */
8595
8596 mask = (GET_MODE_MASK (mode)
8597 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
8598
8599 varop = simplify_and_const_int (NULL_RTX, result_mode,
8600 XEXP (varop, 0), mask);
8601 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
8602 varop, count);
8603 count = first_count;
8604 code = ASHIFTRT;
8605 continue;
8606 }
8607
8608 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8609 than C1 high-order bits equal to the sign bit, we can convert
8610 this to either an ASHIFT or a ASHIFTRT depending on the
8611 two counts.
8612
8613 We cannot do this if VAROP's mode is not SHIFT_MODE. */
8614
8615 if (code == ASHIFTRT && first_code == ASHIFT
8616 && GET_MODE (varop) == shift_mode
8617 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
8618 > first_count))
8619 {
8620 count -= first_count;
8621 if (count < 0)
8622 count = - count, code = ASHIFT;
8623 varop = XEXP (varop, 0);
8624 continue;
8625 }
8626
8627 /* There are some cases we can't do. If CODE is ASHIFTRT,
8628 we can only do this if FIRST_CODE is also ASHIFTRT.
8629
8630 We can't do the case when CODE is ROTATE and FIRST_CODE is
8631 ASHIFTRT.
8632
8633 If the mode of this shift is not the mode of the outer shift,
8634 we can't do this if either shift is a right shift or ROTATE.
8635
8636 Finally, we can't do any of these if the mode is too wide
8637 unless the codes are the same.
8638
8639 Handle the case where the shift codes are the same
8640 first. */
8641
8642 if (code == first_code)
8643 {
8644 if (GET_MODE (varop) != result_mode
8645 && (code == ASHIFTRT || code == LSHIFTRT
8646 || code == ROTATE))
8647 break;
8648
8649 count += first_count;
8650 varop = XEXP (varop, 0);
8651 continue;
8652 }
8653
8654 if (code == ASHIFTRT
8655 || (code == ROTATE && first_code == ASHIFTRT)
8656 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
8657 || (GET_MODE (varop) != result_mode
8658 && (first_code == ASHIFTRT || first_code == LSHIFTRT
8659 || first_code == ROTATE
8660 || code == ROTATE)))
8661 break;
8662
8663 /* To compute the mask to apply after the shift, shift the
8664 nonzero bits of the inner shift the same way the
8665 outer shift will. */
8666
8667 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
8668
8669 mask_rtx
8670 = simplify_binary_operation (code, result_mode, mask_rtx,
8671 GEN_INT (count));
8672
8673 /* Give up if we can't compute an outer operation to use. */
8674 if (mask_rtx == 0
8675 || GET_CODE (mask_rtx) != CONST_INT
8676 || ! merge_outer_ops (&outer_op, &outer_const, AND,
8677 INTVAL (mask_rtx),
8678 result_mode, &complement_p))
8679 break;
8680
8681 /* If the shifts are in the same direction, we add the
8682 counts. Otherwise, we subtract them. */
8683 if ((code == ASHIFTRT || code == LSHIFTRT)
8684 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
8685 count += first_count;
8686 else
8687 count -= first_count;
8688
8689 /* If COUNT is positive, the new shift is usually CODE,
8690 except for the two exceptions below, in which case it is
8691 FIRST_CODE. If the count is negative, FIRST_CODE should
8692 always be used */
8693 if (count > 0
8694 && ((first_code == ROTATE && code == ASHIFT)
8695 || (first_code == ASHIFTRT && code == LSHIFTRT)))
8696 code = first_code;
8697 else if (count < 0)
8698 code = first_code, count = - count;
8699
8700 varop = XEXP (varop, 0);
8701 continue;
8702 }
8703
8704 /* If we have (A << B << C) for any shift, we can convert this to
8705 (A << C << B). This wins if A is a constant. Only try this if
8706 B is not a constant. */
8707
8708 else if (GET_CODE (varop) == code
8709 && GET_CODE (XEXP (varop, 1)) != CONST_INT
8710 && 0 != (new
8711 = simplify_binary_operation (code, mode,
8712 XEXP (varop, 0),
8713 GEN_INT (count))))
8714 {
8715 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
8716 count = 0;
8717 continue;
8718 }
8719 break;
8720
8721 case NOT:
8722 /* Make this fit the case below. */
8723 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
8724 GEN_INT (GET_MODE_MASK (mode)));
8725 continue;
8726
8727 case IOR:
8728 case AND:
8729 case XOR:
8730 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
8731 with C the size of VAROP - 1 and the shift is logical if
8732 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8733 we have an (le X 0) operation. If we have an arithmetic shift
8734 and STORE_FLAG_VALUE is 1 or we have a logical shift with
8735 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
8736
8737 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
8738 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
8739 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8740 && (code == LSHIFTRT || code == ASHIFTRT)
8741 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8742 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8743 {
8744 count = 0;
8745 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
8746 const0_rtx);
8747
8748 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8749 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8750
8751 continue;
8752 }
8753
8754 /* If we have (shift (logical)), move the logical to the outside
8755 to allow it to possibly combine with another logical and the
8756 shift to combine with another shift. This also canonicalizes to
8757 what a ZERO_EXTRACT looks like. Also, some machines have
8758 (and (shift)) insns. */
8759
8760 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8761 && (new = simplify_binary_operation (code, result_mode,
8762 XEXP (varop, 1),
8763 GEN_INT (count))) != 0
8764 && GET_CODE(new) == CONST_INT
8765 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
8766 INTVAL (new), result_mode, &complement_p))
8767 {
8768 varop = XEXP (varop, 0);
8769 continue;
8770 }
8771
8772 /* If we can't do that, try to simplify the shift in each arm of the
8773 logical expression, make a new logical expression, and apply
8774 the inverse distributive law. */
8775 {
8776 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8777 XEXP (varop, 0), count);
8778 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8779 XEXP (varop, 1), count);
8780
8781 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
8782 varop = apply_distributive_law (varop);
8783
8784 count = 0;
8785 }
8786 break;
8787
8788 case EQ:
8789 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
8790 says that the sign bit can be tested, FOO has mode MODE, C is
8791 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
8792 that may be nonzero. */
8793 if (code == LSHIFTRT
8794 && XEXP (varop, 1) == const0_rtx
8795 && GET_MODE (XEXP (varop, 0)) == result_mode
8796 && count == GET_MODE_BITSIZE (result_mode) - 1
8797 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8798 && ((STORE_FLAG_VALUE
8799 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
8800 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
8801 && merge_outer_ops (&outer_op, &outer_const, XOR,
8802 (HOST_WIDE_INT) 1, result_mode,
8803 &complement_p))
8804 {
8805 varop = XEXP (varop, 0);
8806 count = 0;
8807 continue;
8808 }
8809 break;
8810
8811 case NEG:
8812 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
8813 than the number of bits in the mode is equivalent to A. */
8814 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8815 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
8816 {
8817 varop = XEXP (varop, 0);
8818 count = 0;
8819 continue;
8820 }
8821
8822 /* NEG commutes with ASHIFT since it is multiplication. Move the
8823 NEG outside to allow shifts to combine. */
8824 if (code == ASHIFT
8825 && merge_outer_ops (&outer_op, &outer_const, NEG,
8826 (HOST_WIDE_INT) 0, result_mode,
8827 &complement_p))
8828 {
8829 varop = XEXP (varop, 0);
8830 continue;
8831 }
8832 break;
8833
8834 case PLUS:
8835 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
8836 is one less than the number of bits in the mode is
8837 equivalent to (xor A 1). */
8838 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8839 && XEXP (varop, 1) == constm1_rtx
8840 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
8841 && merge_outer_ops (&outer_op, &outer_const, XOR,
8842 (HOST_WIDE_INT) 1, result_mode,
8843 &complement_p))
8844 {
8845 count = 0;
8846 varop = XEXP (varop, 0);
8847 continue;
8848 }
8849
8850 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
8851 that might be nonzero in BAR are those being shifted out and those
8852 bits are known zero in FOO, we can replace the PLUS with FOO.
8853 Similarly in the other operand order. This code occurs when
8854 we are computing the size of a variable-size array. */
8855
8856 if ((code == ASHIFTRT || code == LSHIFTRT)
8857 && count < HOST_BITS_PER_WIDE_INT
8858 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8859 && (nonzero_bits (XEXP (varop, 1), result_mode)
8860 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
8861 {
8862 varop = XEXP (varop, 0);
8863 continue;
8864 }
8865 else if ((code == ASHIFTRT || code == LSHIFTRT)
8866 && count < HOST_BITS_PER_WIDE_INT
8867 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8868 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8869 >> count)
8870 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8871 & nonzero_bits (XEXP (varop, 1),
8872 result_mode)))
8873 {
8874 varop = XEXP (varop, 1);
8875 continue;
8876 }
8877
8878 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
8879 if (code == ASHIFT
8880 && GET_CODE (XEXP (varop, 1)) == CONST_INT
8881 && (new = simplify_binary_operation (ASHIFT, result_mode,
8882 XEXP (varop, 1),
8883 GEN_INT (count))) != 0
8884 && GET_CODE(new) == CONST_INT
8885 && merge_outer_ops (&outer_op, &outer_const, PLUS,
8886 INTVAL (new), result_mode, &complement_p))
8887 {
8888 varop = XEXP (varop, 0);
8889 continue;
8890 }
8891 break;
8892
8893 case MINUS:
8894 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8895 with C the size of VAROP - 1 and the shift is logical if
8896 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8897 we have a (gt X 0) operation. If the shift is arithmetic with
8898 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8899 we have a (neg (gt X 0)) operation. */
8900
8219 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
8901 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8902 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
8220 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8903 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8221 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8222 && (code == LSHIFTRT || code == ASHIFTRT)
8223 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8224 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8225 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8226 {
8227 count = 0;
8228 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8229 const0_rtx);
8230
8231 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8232 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8233
8234 continue;
8235 }
8236 break;
8904 && (code == LSHIFTRT || code == ASHIFTRT)
8905 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8906 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8907 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8908 {
8909 count = 0;
8910 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8911 const0_rtx);
8912
8913 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8914 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8915
8916 continue;
8917 }
8918 break;
8919
8920 case TRUNCATE:
8921 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
8922 if the truncate does not affect the value. */
8923 if (code == LSHIFTRT
8924 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
8925 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8926 && (INTVAL (XEXP (XEXP (varop, 0), 1))
8927 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
8928 - GET_MODE_BITSIZE (GET_MODE (varop)))))
8929 {
8930 rtx varop_inner = XEXP (varop, 0);
8931
8932 varop_inner = gen_rtx_combine (LSHIFTRT,
8933 GET_MODE (varop_inner),
8934 XEXP (varop_inner, 0),
8935 GEN_INT (count + INTVAL (XEXP (varop_inner, 1))));
8936 varop = gen_rtx_combine (TRUNCATE, GET_MODE (varop),
8937 varop_inner);
8938 count = 0;
8939 continue;
8940 }
8941 break;
8942
8943 default:
8944 break;
8237 }
8238
8239 break;
8240 }
8241
8242 /* We need to determine what mode to do the shift in. If the shift is
8243 a right shift or ROTATE, we must always do it in the mode it was
8244 originally done in. Otherwise, we can do it in MODE, the widest mode
8245 encountered. The code we care about is that of the shift that will
8246 actually be done, not the shift that was originally requested. */
8247 shift_mode
8248 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8249 ? result_mode : mode);
8250
8251 /* We have now finished analyzing the shift. The result should be
8252 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
8253 OUTER_OP is non-NIL, it is an operation that needs to be applied
8254 to the result of the shift. OUTER_CONST is the relevant constant,
8255 but we must turn off all bits turned off in the shift.
8256
8257 If we were passed a value for X, see if we can use any pieces of
8258 it. If not, make new rtx. */
8259
8260 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
8261 && GET_CODE (XEXP (x, 1)) == CONST_INT
8262 && INTVAL (XEXP (x, 1)) == count)
8263 const_rtx = XEXP (x, 1);
8264 else
8265 const_rtx = GEN_INT (count);
8266
8267 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8268 && GET_MODE (XEXP (x, 0)) == shift_mode
8269 && SUBREG_REG (XEXP (x, 0)) == varop)
8270 varop = XEXP (x, 0);
8271 else if (GET_MODE (varop) != shift_mode)
8272 varop = gen_lowpart_for_combine (shift_mode, varop);
8273
8945 }
8946
8947 break;
8948 }
8949
8950 /* We need to determine what mode to do the shift in. If the shift is
8951 a right shift or ROTATE, we must always do it in the mode it was
8952 originally done in. Otherwise, we can do it in MODE, the widest mode
8953 encountered. The code we care about is that of the shift that will
8954 actually be done, not the shift that was originally requested. */
8955 shift_mode
8956 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8957 ? result_mode : mode);
8958
8959 /* We have now finished analyzing the shift. The result should be
8960 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
8961 OUTER_OP is non-NIL, it is an operation that needs to be applied
8962 to the result of the shift. OUTER_CONST is the relevant constant,
8963 but we must turn off all bits turned off in the shift.
8964
8965 If we were passed a value for X, see if we can use any pieces of
8966 it. If not, make new rtx. */
8967
8968 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
8969 && GET_CODE (XEXP (x, 1)) == CONST_INT
8970 && INTVAL (XEXP (x, 1)) == count)
8971 const_rtx = XEXP (x, 1);
8972 else
8973 const_rtx = GEN_INT (count);
8974
8975 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8976 && GET_MODE (XEXP (x, 0)) == shift_mode
8977 && SUBREG_REG (XEXP (x, 0)) == varop)
8978 varop = XEXP (x, 0);
8979 else if (GET_MODE (varop) != shift_mode)
8980 varop = gen_lowpart_for_combine (shift_mode, varop);
8981
8274 /* If we can't make the SUBREG, try to return what we were given. */
8982 /* If we can't make the SUBREG, try to return what we were given. */
8275 if (GET_CODE (varop) == CLOBBER)
8276 return x ? x : varop;
8277
8278 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
8279 if (new != 0)
8280 x = new;
8281 else
8282 {
8283 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
8284 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
8285
8286 SUBST (XEXP (x, 0), varop);
8287 SUBST (XEXP (x, 1), const_rtx);
8288 }
8289
8290 /* If we have an outer operation and we just made a shift, it is
8291 possible that we could have simplified the shift were it not
8292 for the outer operation. So try to do the simplification
8293 recursively. */
8294
8295 if (outer_op != NIL && GET_CODE (x) == code
8296 && GET_CODE (XEXP (x, 1)) == CONST_INT)
8297 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
8298 INTVAL (XEXP (x, 1)));
8299
8300 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
8301 turn off all the bits that the shift would have turned off. */
8302 if (orig_code == LSHIFTRT && result_mode != shift_mode)
8303 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
8304 GET_MODE_MASK (result_mode) >> orig_count);
8305
8306 /* Do the remainder of the processing in RESULT_MODE. */
8307 x = gen_lowpart_for_combine (result_mode, x);
8308
8309 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
8310 operation. */
8311 if (complement_p)
8312 x = gen_unary (NOT, result_mode, result_mode, x);
8313
8314 if (outer_op != NIL)
8315 {
8316 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
8317 {
8318 int width = GET_MODE_BITSIZE (result_mode);
8319
8320 outer_const &= GET_MODE_MASK (result_mode);
8321
8322 /* If this would be an entire word for the target, but is not for
8323 the host, then sign-extend on the host so that the number will
8324 look the same way on the host that it would on the target.
8325
8326 For example, when building a 64 bit alpha hosted 32 bit sparc
8327 targeted compiler, then we want the 32 bit unsigned value -1 to be
8328 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
8329 The later confuses the sparc backend. */
8330
8331 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
8332 && (outer_const & ((HOST_WIDE_INT) 1 << (width - 1))))
8333 outer_const |= ((HOST_WIDE_INT) (-1) << width);
8334 }
8335
8336 if (outer_op == AND)
8337 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
8338 else if (outer_op == SET)
8339 /* This means that we have determined that the result is
8340 equivalent to a constant. This should be rare. */
8341 x = GEN_INT (outer_const);
8342 else if (GET_RTX_CLASS (outer_op) == '1')
8343 x = gen_unary (outer_op, result_mode, result_mode, x);
8344 else
8345 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
8346 }
8347
8348 return x;
8349}
8350
8351/* Like recog, but we receive the address of a pointer to a new pattern.
8352 We try to match the rtx that the pointer points to.
8353 If that fails, we may try to modify or replace the pattern,
8354 storing the replacement into the same pointer object.
8355
8356 Modifications include deletion or addition of CLOBBERs.
8357
8358 PNOTES is a pointer to a location where any REG_UNUSED notes added for
8359 the CLOBBERs are placed.
8360
8361 PADDED_SCRATCHES is set to the number of (clobber (scratch)) patterns
8362 we had to add.
8363
8364 The value is the final insn code from the pattern ultimately matched,
8365 or -1. */
8366
8367static int
8368recog_for_combine (pnewpat, insn, pnotes, padded_scratches)
8369 rtx *pnewpat;
8370 rtx insn;
8371 rtx *pnotes;
8372 int *padded_scratches;
8373{
8374 register rtx pat = *pnewpat;
8375 int insn_code_number;
8376 int num_clobbers_to_add = 0;
8377 int i;
8378 rtx notes = 0;
8379
8380 *padded_scratches = 0;
8381
8382 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
8383 we use to indicate that something didn't match. If we find such a
8384 thing, force rejection. */
8385 if (GET_CODE (pat) == PARALLEL)
8386 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
8387 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
8388 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
8389 return -1;
8390
8391 /* Is the result of combination a valid instruction? */
8392 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8393
8394 /* If it isn't, there is the possibility that we previously had an insn
8395 that clobbered some register as a side effect, but the combined
8396 insn doesn't need to do that. So try once more without the clobbers
8397 unless this represents an ASM insn. */
8398
8399 if (insn_code_number < 0 && ! check_asm_operands (pat)
8400 && GET_CODE (pat) == PARALLEL)
8401 {
8402 int pos;
8403
8404 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
8405 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
8406 {
8407 if (i != pos)
8408 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
8409 pos++;
8410 }
8411
8412 SUBST_INT (XVECLEN (pat, 0), pos);
8413
8414 if (pos == 1)
8415 pat = XVECEXP (pat, 0, 0);
8416
8417 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8418 }
8419
8420 /* If we had any clobbers to add, make a new pattern than contains
8421 them. Then check to make sure that all of them are dead. */
8422 if (num_clobbers_to_add)
8423 {
8983 if (GET_CODE (varop) == CLOBBER)
8984 return x ? x : varop;
8985
8986 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
8987 if (new != 0)
8988 x = new;
8989 else
8990 {
8991 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
8992 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
8993
8994 SUBST (XEXP (x, 0), varop);
8995 SUBST (XEXP (x, 1), const_rtx);
8996 }
8997
8998 /* If we have an outer operation and we just made a shift, it is
8999 possible that we could have simplified the shift were it not
9000 for the outer operation. So try to do the simplification
9001 recursively. */
9002
9003 if (outer_op != NIL && GET_CODE (x) == code
9004 && GET_CODE (XEXP (x, 1)) == CONST_INT)
9005 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9006 INTVAL (XEXP (x, 1)));
9007
9008 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
9009 turn off all the bits that the shift would have turned off. */
9010 if (orig_code == LSHIFTRT && result_mode != shift_mode)
9011 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9012 GET_MODE_MASK (result_mode) >> orig_count);
9013
9014 /* Do the remainder of the processing in RESULT_MODE. */
9015 x = gen_lowpart_for_combine (result_mode, x);
9016
9017 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9018 operation. */
9019 if (complement_p)
9020 x = gen_unary (NOT, result_mode, result_mode, x);
9021
9022 if (outer_op != NIL)
9023 {
9024 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9025 {
9026 int width = GET_MODE_BITSIZE (result_mode);
9027
9028 outer_const &= GET_MODE_MASK (result_mode);
9029
9030 /* If this would be an entire word for the target, but is not for
9031 the host, then sign-extend on the host so that the number will
9032 look the same way on the host that it would on the target.
9033
9034 For example, when building a 64 bit alpha hosted 32 bit sparc
9035 targeted compiler, then we want the 32 bit unsigned value -1 to be
9036 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
9037 The later confuses the sparc backend. */
9038
9039 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
9040 && (outer_const & ((HOST_WIDE_INT) 1 << (width - 1))))
9041 outer_const |= ((HOST_WIDE_INT) (-1) << width);
9042 }
9043
9044 if (outer_op == AND)
9045 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9046 else if (outer_op == SET)
9047 /* This means that we have determined that the result is
9048 equivalent to a constant. This should be rare. */
9049 x = GEN_INT (outer_const);
9050 else if (GET_RTX_CLASS (outer_op) == '1')
9051 x = gen_unary (outer_op, result_mode, result_mode, x);
9052 else
9053 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
9054 }
9055
9056 return x;
9057}
9058
9059/* Like recog, but we receive the address of a pointer to a new pattern.
9060 We try to match the rtx that the pointer points to.
9061 If that fails, we may try to modify or replace the pattern,
9062 storing the replacement into the same pointer object.
9063
9064 Modifications include deletion or addition of CLOBBERs.
9065
9066 PNOTES is a pointer to a location where any REG_UNUSED notes added for
9067 the CLOBBERs are placed.
9068
9069 PADDED_SCRATCHES is set to the number of (clobber (scratch)) patterns
9070 we had to add.
9071
9072 The value is the final insn code from the pattern ultimately matched,
9073 or -1. */
9074
9075static int
9076recog_for_combine (pnewpat, insn, pnotes, padded_scratches)
9077 rtx *pnewpat;
9078 rtx insn;
9079 rtx *pnotes;
9080 int *padded_scratches;
9081{
9082 register rtx pat = *pnewpat;
9083 int insn_code_number;
9084 int num_clobbers_to_add = 0;
9085 int i;
9086 rtx notes = 0;
9087
9088 *padded_scratches = 0;
9089
9090 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9091 we use to indicate that something didn't match. If we find such a
9092 thing, force rejection. */
9093 if (GET_CODE (pat) == PARALLEL)
9094 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9095 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9096 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
9097 return -1;
9098
9099 /* Is the result of combination a valid instruction? */
9100 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9101
9102 /* If it isn't, there is the possibility that we previously had an insn
9103 that clobbered some register as a side effect, but the combined
9104 insn doesn't need to do that. So try once more without the clobbers
9105 unless this represents an ASM insn. */
9106
9107 if (insn_code_number < 0 && ! check_asm_operands (pat)
9108 && GET_CODE (pat) == PARALLEL)
9109 {
9110 int pos;
9111
9112 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9113 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9114 {
9115 if (i != pos)
9116 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9117 pos++;
9118 }
9119
9120 SUBST_INT (XVECLEN (pat, 0), pos);
9121
9122 if (pos == 1)
9123 pat = XVECEXP (pat, 0, 0);
9124
9125 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9126 }
9127
9128 /* If we had any clobbers to add, make a new pattern than contains
9129 them. Then check to make sure that all of them are dead. */
9130 if (num_clobbers_to_add)
9131 {
8424 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
8425 gen_rtvec (GET_CODE (pat) == PARALLEL
8426 ? XVECLEN (pat, 0) + num_clobbers_to_add
8427 : num_clobbers_to_add + 1));
9132 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9133 gen_rtvec (GET_CODE (pat) == PARALLEL
9134 ? XVECLEN (pat, 0) + num_clobbers_to_add
9135 : num_clobbers_to_add + 1));
8428
8429 if (GET_CODE (pat) == PARALLEL)
8430 for (i = 0; i < XVECLEN (pat, 0); i++)
8431 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
8432 else
8433 XVECEXP (newpat, 0, 0) = pat;
8434
8435 add_clobbers (newpat, insn_code_number);
8436
8437 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
8438 i < XVECLEN (newpat, 0); i++)
8439 {
8440 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
8441 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
8442 return -1;
8443 else if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == SCRATCH)
8444 (*padded_scratches)++;
9136
9137 if (GET_CODE (pat) == PARALLEL)
9138 for (i = 0; i < XVECLEN (pat, 0); i++)
9139 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9140 else
9141 XVECEXP (newpat, 0, 0) = pat;
9142
9143 add_clobbers (newpat, insn_code_number);
9144
9145 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9146 i < XVECLEN (newpat, 0); i++)
9147 {
9148 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9149 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9150 return -1;
9151 else if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == SCRATCH)
9152 (*padded_scratches)++;
8445 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
8446 XEXP (XVECEXP (newpat, 0, i), 0), notes);
9153 notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9154 XEXP (XVECEXP (newpat, 0, i), 0), notes);
8447 }
8448 pat = newpat;
8449 }
8450
8451 *pnewpat = pat;
8452 *pnotes = notes;
8453
8454 return insn_code_number;
8455}
8456
8457/* Like gen_lowpart but for use by combine. In combine it is not possible
8458 to create any new pseudoregs. However, it is safe to create
8459 invalid memory addresses, because combine will try to recognize
8460 them and all they will do is make the combine attempt fail.
8461
8462 If for some reason this cannot do its job, an rtx
8463 (clobber (const_int 0)) is returned.
8464 An insn containing that will not be recognized. */
8465
8466#undef gen_lowpart
8467
8468static rtx
8469gen_lowpart_for_combine (mode, x)
8470 enum machine_mode mode;
8471 register rtx x;
8472{
8473 rtx result;
8474
8475 if (GET_MODE (x) == mode)
8476 return x;
8477
8478 /* We can only support MODE being wider than a word if X is a
8479 constant integer or has a mode the same size. */
8480
8481 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
8482 && ! ((GET_MODE (x) == VOIDmode
8483 && (GET_CODE (x) == CONST_INT
8484 || GET_CODE (x) == CONST_DOUBLE))
8485 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
9155 }
9156 pat = newpat;
9157 }
9158
9159 *pnewpat = pat;
9160 *pnotes = notes;
9161
9162 return insn_code_number;
9163}
9164
9165/* Like gen_lowpart but for use by combine. In combine it is not possible
9166 to create any new pseudoregs. However, it is safe to create
9167 invalid memory addresses, because combine will try to recognize
9168 them and all they will do is make the combine attempt fail.
9169
9170 If for some reason this cannot do its job, an rtx
9171 (clobber (const_int 0)) is returned.
9172 An insn containing that will not be recognized. */
9173
9174#undef gen_lowpart
9175
9176static rtx
9177gen_lowpart_for_combine (mode, x)
9178 enum machine_mode mode;
9179 register rtx x;
9180{
9181 rtx result;
9182
9183 if (GET_MODE (x) == mode)
9184 return x;
9185
9186 /* We can only support MODE being wider than a word if X is a
9187 constant integer or has a mode the same size. */
9188
9189 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9190 && ! ((GET_MODE (x) == VOIDmode
9191 && (GET_CODE (x) == CONST_INT
9192 || GET_CODE (x) == CONST_DOUBLE))
9193 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
8486 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
9194 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
8487
8488 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
8489 won't know what to do. So we will strip off the SUBREG here and
8490 process normally. */
8491 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
8492 {
8493 x = SUBREG_REG (x);
8494 if (GET_MODE (x) == mode)
8495 return x;
8496 }
8497
8498 result = gen_lowpart_common (mode, x);
8499 if (result != 0
8500 && GET_CODE (result) == SUBREG
8501 && GET_CODE (SUBREG_REG (result)) == REG
8502 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
8503 && (GET_MODE_SIZE (GET_MODE (result))
8504 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
9195
9196 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9197 won't know what to do. So we will strip off the SUBREG here and
9198 process normally. */
9199 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9200 {
9201 x = SUBREG_REG (x);
9202 if (GET_MODE (x) == mode)
9203 return x;
9204 }
9205
9206 result = gen_lowpart_common (mode, x);
9207 if (result != 0
9208 && GET_CODE (result) == SUBREG
9209 && GET_CODE (SUBREG_REG (result)) == REG
9210 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
9211 && (GET_MODE_SIZE (GET_MODE (result))
9212 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
8505 reg_changes_size[REGNO (SUBREG_REG (result))] = 1;
9213 REG_CHANGES_SIZE (REGNO (SUBREG_REG (result))) = 1;
8506
8507 if (result)
8508 return result;
8509
8510 if (GET_CODE (x) == MEM)
8511 {
8512 register int offset = 0;
8513 rtx new;
8514
8515 /* Refuse to work on a volatile memory ref or one with a mode-dependent
8516 address. */
8517 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
9214
9215 if (result)
9216 return result;
9217
9218 if (GET_CODE (x) == MEM)
9219 {
9220 register int offset = 0;
9221 rtx new;
9222
9223 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9224 address. */
9225 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
8518 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
9226 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
8519
8520 /* If we want to refer to something bigger than the original memref,
8521 generate a perverse subreg instead. That will force a reload
8522 of the original memref X. */
8523 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
9227
9228 /* If we want to refer to something bigger than the original memref,
9229 generate a perverse subreg instead. That will force a reload
9230 of the original memref X. */
9231 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
8524 return gen_rtx (SUBREG, mode, x, 0);
9232 return gen_rtx_SUBREG (mode, x, 0);
8525
8526 if (WORDS_BIG_ENDIAN)
8527 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
8528 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
8529 if (BYTES_BIG_ENDIAN)
8530 {
8531 /* Adjust the address so that the address-after-the-data is
8532 unchanged. */
8533 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
8534 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
8535 }
9233
9234 if (WORDS_BIG_ENDIAN)
9235 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9236 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
9237 if (BYTES_BIG_ENDIAN)
9238 {
9239 /* Adjust the address so that the address-after-the-data is
9240 unchanged. */
9241 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9242 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9243 }
8536 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
9244 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
8537 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
8538 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
8539 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
8540 return new;
8541 }
8542
8543 /* If X is a comparison operator, rewrite it in a new mode. This
8544 probably won't match, but may allow further simplifications. */
8545 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
8546 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
8547
8548 /* If we couldn't simplify X any other way, just enclose it in a
8549 SUBREG. Normally, this SUBREG won't match, but some patterns may
8550 include an explicit SUBREG or we may simplify it further in combine. */
8551 else
8552 {
8553 int word = 0;
8554
8555 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
8556 word = ((GET_MODE_SIZE (GET_MODE (x))
8557 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
8558 / UNITS_PER_WORD);
9245 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
9246 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
9247 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
9248 return new;
9249 }
9250
9251 /* If X is a comparison operator, rewrite it in a new mode. This
9252 probably won't match, but may allow further simplifications. */
9253 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
9254 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
9255
9256 /* If we couldn't simplify X any other way, just enclose it in a
9257 SUBREG. Normally, this SUBREG won't match, but some patterns may
9258 include an explicit SUBREG or we may simplify it further in combine. */
9259 else
9260 {
9261 int word = 0;
9262
9263 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
9264 word = ((GET_MODE_SIZE (GET_MODE (x))
9265 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
9266 / UNITS_PER_WORD);
8559 return gen_rtx (SUBREG, mode, x, word);
9267 return gen_rtx_SUBREG (mode, x, word);
8560 }
8561}
8562
8563/* Make an rtx expression. This is a subset of gen_rtx and only supports
8564 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
8565
8566 If the identical expression was previously in the insn (in the undobuf),
8567 it will be returned. Only if it is not found will a new expression
8568 be made. */
8569
8570/*VARARGS2*/
8571static rtx
8572gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
8573{
8574#ifndef __STDC__
8575 enum rtx_code code;
8576 enum machine_mode mode;
8577#endif
8578 va_list p;
8579 int n_args;
8580 rtx args[3];
9268 }
9269}
9270
9271/* Make an rtx expression. This is a subset of gen_rtx and only supports
9272 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
9273
9274 If the identical expression was previously in the insn (in the undobuf),
9275 it will be returned. Only if it is not found will a new expression
9276 be made. */
9277
9278/*VARARGS2*/
9279static rtx
9280gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
9281{
9282#ifndef __STDC__
9283 enum rtx_code code;
9284 enum machine_mode mode;
9285#endif
9286 va_list p;
9287 int n_args;
9288 rtx args[3];
8581 int i, j;
9289 int j;
8582 char *fmt;
8583 rtx rt;
9290 char *fmt;
9291 rtx rt;
9292 struct undo *undo;
8584
8585 VA_START (p, mode);
8586
8587#ifndef __STDC__
8588 code = va_arg (p, enum rtx_code);
8589 mode = va_arg (p, enum machine_mode);
8590#endif
8591
8592 n_args = GET_RTX_LENGTH (code);
8593 fmt = GET_RTX_FORMAT (code);
8594
8595 if (n_args == 0 || n_args > 3)
8596 abort ();
8597
8598 /* Get each arg and verify that it is supposed to be an expression. */
8599 for (j = 0; j < n_args; j++)
8600 {
8601 if (*fmt++ != 'e')
8602 abort ();
8603
8604 args[j] = va_arg (p, rtx);
8605 }
8606
8607 /* See if this is in undobuf. Be sure we don't use objects that came
8608 from another insn; this could produce circular rtl structures. */
8609
9293
9294 VA_START (p, mode);
9295
9296#ifndef __STDC__
9297 code = va_arg (p, enum rtx_code);
9298 mode = va_arg (p, enum machine_mode);
9299#endif
9300
9301 n_args = GET_RTX_LENGTH (code);
9302 fmt = GET_RTX_FORMAT (code);
9303
9304 if (n_args == 0 || n_args > 3)
9305 abort ();
9306
9307 /* Get each arg and verify that it is supposed to be an expression. */
9308 for (j = 0; j < n_args; j++)
9309 {
9310 if (*fmt++ != 'e')
9311 abort ();
9312
9313 args[j] = va_arg (p, rtx);
9314 }
9315
9316 /* See if this is in undobuf. Be sure we don't use objects that came
9317 from another insn; this could produce circular rtl structures. */
9318
8610 for (i = previous_num_undos; i < undobuf.num_undo; i++)
8611 if (!undobuf.undo[i].is_int
8612 && GET_CODE (undobuf.undo[i].old_contents.r) == code
8613 && GET_MODE (undobuf.undo[i].old_contents.r) == mode)
9319 for (undo = undobuf.undos; undo != undobuf.previous_undos; undo = undo->next)
9320 if (!undo->is_int
9321 && GET_CODE (undo->old_contents.r) == code
9322 && GET_MODE (undo->old_contents.r) == mode)
8614 {
8615 for (j = 0; j < n_args; j++)
9323 {
9324 for (j = 0; j < n_args; j++)
8616 if (XEXP (undobuf.undo[i].old_contents.r, j) != args[j])
9325 if (XEXP (undo->old_contents.r, j) != args[j])
8617 break;
8618
8619 if (j == n_args)
9326 break;
9327
9328 if (j == n_args)
8620 return undobuf.undo[i].old_contents.r;
9329 return undo->old_contents.r;
8621 }
8622
8623 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
8624 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
8625 rt = rtx_alloc (code);
8626 PUT_MODE (rt, mode);
8627 XEXP (rt, 0) = args[0];
8628 if (n_args > 1)
8629 {
8630 XEXP (rt, 1) = args[1];
8631 if (n_args > 2)
8632 XEXP (rt, 2) = args[2];
8633 }
8634 return rt;
8635}
8636
8637/* These routines make binary and unary operations by first seeing if they
8638 fold; if not, a new expression is allocated. */
8639
8640static rtx
8641gen_binary (code, mode, op0, op1)
8642 enum rtx_code code;
8643 enum machine_mode mode;
8644 rtx op0, op1;
8645{
8646 rtx result;
8647 rtx tem;
8648
8649 if (GET_RTX_CLASS (code) == 'c'
8650 && (GET_CODE (op0) == CONST_INT
8651 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
8652 tem = op0, op0 = op1, op1 = tem;
8653
8654 if (GET_RTX_CLASS (code) == '<')
8655 {
8656 enum machine_mode op_mode = GET_MODE (op0);
8657
8658 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
9330 }
9331
9332 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
9333 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
9334 rt = rtx_alloc (code);
9335 PUT_MODE (rt, mode);
9336 XEXP (rt, 0) = args[0];
9337 if (n_args > 1)
9338 {
9339 XEXP (rt, 1) = args[1];
9340 if (n_args > 2)
9341 XEXP (rt, 2) = args[2];
9342 }
9343 return rt;
9344}
9345
9346/* These routines make binary and unary operations by first seeing if they
9347 fold; if not, a new expression is allocated. */
9348
9349static rtx
9350gen_binary (code, mode, op0, op1)
9351 enum rtx_code code;
9352 enum machine_mode mode;
9353 rtx op0, op1;
9354{
9355 rtx result;
9356 rtx tem;
9357
9358 if (GET_RTX_CLASS (code) == 'c'
9359 && (GET_CODE (op0) == CONST_INT
9360 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
9361 tem = op0, op0 = op1, op1 = tem;
9362
9363 if (GET_RTX_CLASS (code) == '<')
9364 {
9365 enum machine_mode op_mode = GET_MODE (op0);
9366
9367 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
8659 just (REL_OP X Y). */
9368 just (REL_OP X Y). */
8660 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
8661 {
8662 op1 = XEXP (op0, 1);
8663 op0 = XEXP (op0, 0);
8664 op_mode = GET_MODE (op0);
8665 }
8666
8667 if (op_mode == VOIDmode)
8668 op_mode = GET_MODE (op1);
8669 result = simplify_relational_operation (code, op_mode, op0, op1);
8670 }
8671 else
8672 result = simplify_binary_operation (code, mode, op0, op1);
8673
8674 if (result)
8675 return result;
8676
8677 /* Put complex operands first and constants second. */
8678 if (GET_RTX_CLASS (code) == 'c'
8679 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
8680 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
8681 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
8682 || (GET_CODE (op0) == SUBREG
8683 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
8684 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
8685 return gen_rtx_combine (code, mode, op1, op0);
8686
9369 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9370 {
9371 op1 = XEXP (op0, 1);
9372 op0 = XEXP (op0, 0);
9373 op_mode = GET_MODE (op0);
9374 }
9375
9376 if (op_mode == VOIDmode)
9377 op_mode = GET_MODE (op1);
9378 result = simplify_relational_operation (code, op_mode, op0, op1);
9379 }
9380 else
9381 result = simplify_binary_operation (code, mode, op0, op1);
9382
9383 if (result)
9384 return result;
9385
9386 /* Put complex operands first and constants second. */
9387 if (GET_RTX_CLASS (code) == 'c'
9388 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9389 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
9390 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
9391 || (GET_CODE (op0) == SUBREG
9392 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
9393 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
9394 return gen_rtx_combine (code, mode, op1, op0);
9395
9396 /* If we are turning off bits already known off in OP0, we need not do
9397 an AND. */
9398 else if (code == AND && GET_CODE (op1) == CONST_INT
9399 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9400 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
9401 return op0;
9402
8687 return gen_rtx_combine (code, mode, op0, op1);
8688}
8689
8690static rtx
8691gen_unary (code, mode, op0_mode, op0)
8692 enum rtx_code code;
8693 enum machine_mode mode, op0_mode;
8694 rtx op0;
8695{
8696 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
8697
8698 if (result)
8699 return result;
8700
8701 return gen_rtx_combine (code, mode, op0);
8702}
8703
8704/* Simplify a comparison between *POP0 and *POP1 where CODE is the
8705 comparison code that will be tested.
8706
8707 The result is a possibly different comparison code to use. *POP0 and
8708 *POP1 may be updated.
8709
8710 It is possible that we might detect that a comparison is either always
8711 true or always false. However, we do not perform general constant
8712 folding in combine, so this knowledge isn't useful. Such tautologies
8713 should have been detected earlier. Hence we ignore all such cases. */
8714
8715static enum rtx_code
8716simplify_comparison (code, pop0, pop1)
8717 enum rtx_code code;
8718 rtx *pop0;
8719 rtx *pop1;
8720{
8721 rtx op0 = *pop0;
8722 rtx op1 = *pop1;
8723 rtx tem, tem1;
8724 int i;
8725 enum machine_mode mode, tmode;
8726
8727 /* Try a few ways of applying the same transformation to both operands. */
8728 while (1)
8729 {
8730#ifndef WORD_REGISTER_OPERATIONS
8731 /* The test below this one won't handle SIGN_EXTENDs on these machines,
8732 so check specially. */
8733 if (code != GTU && code != GEU && code != LTU && code != LEU
8734 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
8735 && GET_CODE (XEXP (op0, 0)) == ASHIFT
8736 && GET_CODE (XEXP (op1, 0)) == ASHIFT
8737 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
8738 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
8739 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
8740 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
8741 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8742 && GET_CODE (XEXP (op1, 1)) == CONST_INT
8743 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8744 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
8745 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
8746 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
8747 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
8748 && (INTVAL (XEXP (op0, 1))
8749 == (GET_MODE_BITSIZE (GET_MODE (op0))
8750 - (GET_MODE_BITSIZE
8751 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
8752 {
8753 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
8754 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
8755 }
8756#endif
8757
8758 /* If both operands are the same constant shift, see if we can ignore the
8759 shift. We can if the shift is a rotate or if the bits shifted out of
8760 this shift are known to be zero for both inputs and if the type of
8761 comparison is compatible with the shift. */
8762 if (GET_CODE (op0) == GET_CODE (op1)
8763 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
8764 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
8765 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
8766 && (code != GT && code != LT && code != GE && code != LE))
8767 || (GET_CODE (op0) == ASHIFTRT
8768 && (code != GTU && code != LTU
8769 && code != GEU && code != GEU)))
8770 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8771 && INTVAL (XEXP (op0, 1)) >= 0
8772 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8773 && XEXP (op0, 1) == XEXP (op1, 1))
8774 {
8775 enum machine_mode mode = GET_MODE (op0);
8776 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
8777 int shift_count = INTVAL (XEXP (op0, 1));
8778
8779 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
8780 mask &= (mask >> shift_count) << shift_count;
8781 else if (GET_CODE (op0) == ASHIFT)
8782 mask = (mask & (mask << shift_count)) >> shift_count;
8783
8784 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
8785 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
8786 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
8787 else
8788 break;
8789 }
8790
8791 /* If both operands are AND's of a paradoxical SUBREG by constant, the
8792 SUBREGs are of the same mode, and, in both cases, the AND would
8793 be redundant if the comparison was done in the narrower mode,
8794 do the comparison in the narrower mode (e.g., we are AND'ing with 1
8795 and the operand's possibly nonzero bits are 0xffffff01; in that case
8796 if we only care about QImode, we don't need the AND). This case
8797 occurs if the output mode of an scc insn is not SImode and
8798 STORE_FLAG_VALUE == 1 (e.g., the 386).
8799
8800 Similarly, check for a case where the AND's are ZERO_EXTEND
8801 operations from some narrower mode even though a SUBREG is not
8802 present. */
8803
8804 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
8805 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8806 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
8807 {
8808 rtx inner_op0 = XEXP (op0, 0);
8809 rtx inner_op1 = XEXP (op1, 0);
8810 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
8811 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
8812 int changed = 0;
8813
8814 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
8815 && (GET_MODE_SIZE (GET_MODE (inner_op0))
8816 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
8817 && (GET_MODE (SUBREG_REG (inner_op0))
8818 == GET_MODE (SUBREG_REG (inner_op1)))
9403 return gen_rtx_combine (code, mode, op0, op1);
9404}
9405
9406static rtx
9407gen_unary (code, mode, op0_mode, op0)
9408 enum rtx_code code;
9409 enum machine_mode mode, op0_mode;
9410 rtx op0;
9411{
9412 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
9413
9414 if (result)
9415 return result;
9416
9417 return gen_rtx_combine (code, mode, op0);
9418}
9419
9420/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9421 comparison code that will be tested.
9422
9423 The result is a possibly different comparison code to use. *POP0 and
9424 *POP1 may be updated.
9425
9426 It is possible that we might detect that a comparison is either always
9427 true or always false. However, we do not perform general constant
9428 folding in combine, so this knowledge isn't useful. Such tautologies
9429 should have been detected earlier. Hence we ignore all such cases. */
9430
9431static enum rtx_code
9432simplify_comparison (code, pop0, pop1)
9433 enum rtx_code code;
9434 rtx *pop0;
9435 rtx *pop1;
9436{
9437 rtx op0 = *pop0;
9438 rtx op1 = *pop1;
9439 rtx tem, tem1;
9440 int i;
9441 enum machine_mode mode, tmode;
9442
9443 /* Try a few ways of applying the same transformation to both operands. */
9444 while (1)
9445 {
9446#ifndef WORD_REGISTER_OPERATIONS
9447 /* The test below this one won't handle SIGN_EXTENDs on these machines,
9448 so check specially. */
9449 if (code != GTU && code != GEU && code != LTU && code != LEU
9450 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9451 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9452 && GET_CODE (XEXP (op1, 0)) == ASHIFT
9453 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9454 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9455 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
9456 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
9457 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9458 && GET_CODE (XEXP (op1, 1)) == CONST_INT
9459 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9460 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
9461 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
9462 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
9463 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
9464 && (INTVAL (XEXP (op0, 1))
9465 == (GET_MODE_BITSIZE (GET_MODE (op0))
9466 - (GET_MODE_BITSIZE
9467 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9468 {
9469 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9470 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9471 }
9472#endif
9473
9474 /* If both operands are the same constant shift, see if we can ignore the
9475 shift. We can if the shift is a rotate or if the bits shifted out of
9476 this shift are known to be zero for both inputs and if the type of
9477 comparison is compatible with the shift. */
9478 if (GET_CODE (op0) == GET_CODE (op1)
9479 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9480 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
9481 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
9482 && (code != GT && code != LT && code != GE && code != LE))
9483 || (GET_CODE (op0) == ASHIFTRT
9484 && (code != GTU && code != LTU
9485 && code != GEU && code != GEU)))
9486 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9487 && INTVAL (XEXP (op0, 1)) >= 0
9488 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9489 && XEXP (op0, 1) == XEXP (op1, 1))
9490 {
9491 enum machine_mode mode = GET_MODE (op0);
9492 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9493 int shift_count = INTVAL (XEXP (op0, 1));
9494
9495 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9496 mask &= (mask >> shift_count) << shift_count;
9497 else if (GET_CODE (op0) == ASHIFT)
9498 mask = (mask & (mask << shift_count)) >> shift_count;
9499
9500 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
9501 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
9502 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9503 else
9504 break;
9505 }
9506
9507 /* If both operands are AND's of a paradoxical SUBREG by constant, the
9508 SUBREGs are of the same mode, and, in both cases, the AND would
9509 be redundant if the comparison was done in the narrower mode,
9510 do the comparison in the narrower mode (e.g., we are AND'ing with 1
9511 and the operand's possibly nonzero bits are 0xffffff01; in that case
9512 if we only care about QImode, we don't need the AND). This case
9513 occurs if the output mode of an scc insn is not SImode and
9514 STORE_FLAG_VALUE == 1 (e.g., the 386).
9515
9516 Similarly, check for a case where the AND's are ZERO_EXTEND
9517 operations from some narrower mode even though a SUBREG is not
9518 present. */
9519
9520 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9521 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9522 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
9523 {
9524 rtx inner_op0 = XEXP (op0, 0);
9525 rtx inner_op1 = XEXP (op1, 0);
9526 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9527 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9528 int changed = 0;
9529
9530 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9531 && (GET_MODE_SIZE (GET_MODE (inner_op0))
9532 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9533 && (GET_MODE (SUBREG_REG (inner_op0))
9534 == GET_MODE (SUBREG_REG (inner_op1)))
8819 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9535 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
8820 <= HOST_BITS_PER_WIDE_INT)
9536 <= HOST_BITS_PER_WIDE_INT)
8821 && (0 == (~c0) & nonzero_bits (SUBREG_REG (inner_op0),
8822 GET_MODE (SUBREG_REG (op0))))
8823 && (0 == (~c1) & nonzero_bits (SUBREG_REG (inner_op1),
8824 GET_MODE (SUBREG_REG (inner_op1)))))
9537 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
9538 GET_MODE (SUBREG_REG (inner_op0)))))
9539 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9540 GET_MODE (SUBREG_REG (inner_op1))))))
8825 {
8826 op0 = SUBREG_REG (inner_op0);
8827 op1 = SUBREG_REG (inner_op1);
8828
8829 /* The resulting comparison is always unsigned since we masked
9541 {
9542 op0 = SUBREG_REG (inner_op0);
9543 op1 = SUBREG_REG (inner_op1);
9544
9545 /* The resulting comparison is always unsigned since we masked
8830 off the original sign bit. */
9546 off the original sign bit. */
8831 code = unsigned_condition (code);
8832
8833 changed = 1;
8834 }
8835
8836 else if (c0 == c1)
8837 for (tmode = GET_CLASS_NARROWEST_MODE
8838 (GET_MODE_CLASS (GET_MODE (op0)));
8839 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
8840 if (c0 == GET_MODE_MASK (tmode))
8841 {
8842 op0 = gen_lowpart_for_combine (tmode, inner_op0);
8843 op1 = gen_lowpart_for_combine (tmode, inner_op1);
8844 code = unsigned_condition (code);
8845 changed = 1;
8846 break;
8847 }
8848
8849 if (! changed)
8850 break;
8851 }
8852
8853 /* If both operands are NOT, we can strip off the outer operation
8854 and adjust the comparison code for swapped operands; similarly for
8855 NEG, except that this must be an equality comparison. */
8856 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
8857 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
8858 && (code == EQ || code == NE)))
8859 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
8860
8861 else
8862 break;
8863 }
8864
8865 /* If the first operand is a constant, swap the operands and adjust the
9547 code = unsigned_condition (code);
9548
9549 changed = 1;
9550 }
9551
9552 else if (c0 == c1)
9553 for (tmode = GET_CLASS_NARROWEST_MODE
9554 (GET_MODE_CLASS (GET_MODE (op0)));
9555 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
9556 if (c0 == GET_MODE_MASK (tmode))
9557 {
9558 op0 = gen_lowpart_for_combine (tmode, inner_op0);
9559 op1 = gen_lowpart_for_combine (tmode, inner_op1);
9560 code = unsigned_condition (code);
9561 changed = 1;
9562 break;
9563 }
9564
9565 if (! changed)
9566 break;
9567 }
9568
9569 /* If both operands are NOT, we can strip off the outer operation
9570 and adjust the comparison code for swapped operands; similarly for
9571 NEG, except that this must be an equality comparison. */
9572 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9573 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9574 && (code == EQ || code == NE)))
9575 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
9576
9577 else
9578 break;
9579 }
9580
9581 /* If the first operand is a constant, swap the operands and adjust the
8866 comparison code appropriately. */
8867 if (CONSTANT_P (op0))
9582 comparison code appropriately, but don't do this if the second operand
9583 is already a constant integer. */
9584 if (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
8868 {
8869 tem = op0, op0 = op1, op1 = tem;
8870 code = swap_condition (code);
8871 }
8872
8873 /* We now enter a loop during which we will try to simplify the comparison.
8874 For the most part, we only are concerned with comparisons with zero,
8875 but some things may really be comparisons with zero but not start
8876 out looking that way. */
8877
8878 while (GET_CODE (op1) == CONST_INT)
8879 {
8880 enum machine_mode mode = GET_MODE (op0);
8881 int mode_width = GET_MODE_BITSIZE (mode);
8882 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
8883 int equality_comparison_p;
8884 int sign_bit_comparison_p;
8885 int unsigned_comparison_p;
8886 HOST_WIDE_INT const_op;
8887
8888 /* We only want to handle integral modes. This catches VOIDmode,
8889 CCmode, and the floating-point modes. An exception is that we
8890 can handle VOIDmode if OP0 is a COMPARE or a comparison
8891 operation. */
8892
8893 if (GET_MODE_CLASS (mode) != MODE_INT
8894 && ! (mode == VOIDmode
8895 && (GET_CODE (op0) == COMPARE
8896 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
8897 break;
8898
8899 /* Get the constant we are comparing against and turn off all bits
8900 not on in our mode. */
8901 const_op = INTVAL (op1);
8902 if (mode_width <= HOST_BITS_PER_WIDE_INT)
8903 const_op &= mask;
8904
8905 /* If we are comparing against a constant power of two and the value
8906 being compared can only have that single bit nonzero (e.g., it was
8907 `and'ed with that bit), we can replace this with a comparison
8908 with zero. */
8909 if (const_op
8910 && (code == EQ || code == NE || code == GE || code == GEU
8911 || code == LT || code == LTU)
8912 && mode_width <= HOST_BITS_PER_WIDE_INT
8913 && exact_log2 (const_op) >= 0
8914 && nonzero_bits (op0, mode) == const_op)
8915 {
8916 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
8917 op1 = const0_rtx, const_op = 0;
8918 }
8919
8920 /* Similarly, if we are comparing a value known to be either -1 or
8921 0 with -1, change it to the opposite comparison against zero. */
8922
8923 if (const_op == -1
8924 && (code == EQ || code == NE || code == GT || code == LE
8925 || code == GEU || code == LTU)
8926 && num_sign_bit_copies (op0, mode) == mode_width)
8927 {
8928 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
8929 op1 = const0_rtx, const_op = 0;
8930 }
8931
8932 /* Do some canonicalizations based on the comparison code. We prefer
8933 comparisons against zero and then prefer equality comparisons.
8934 If we can reduce the size of a constant, we will do that too. */
8935
8936 switch (code)
8937 {
8938 case LT:
8939 /* < C is equivalent to <= (C - 1) */
8940 if (const_op > 0)
8941 {
8942 const_op -= 1;
8943 op1 = GEN_INT (const_op);
8944 code = LE;
8945 /* ... fall through to LE case below. */
8946 }
8947 else
8948 break;
8949
8950 case LE:
8951 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
8952 if (const_op < 0)
8953 {
8954 const_op += 1;
8955 op1 = GEN_INT (const_op);
8956 code = LT;
8957 }
8958
8959 /* If we are doing a <= 0 comparison on a value known to have
8960 a zero sign bit, we can replace this with == 0. */
8961 else if (const_op == 0
8962 && mode_width <= HOST_BITS_PER_WIDE_INT
8963 && (nonzero_bits (op0, mode)
8964 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
8965 code = EQ;
8966 break;
8967
8968 case GE:
9585 {
9586 tem = op0, op0 = op1, op1 = tem;
9587 code = swap_condition (code);
9588 }
9589
9590 /* We now enter a loop during which we will try to simplify the comparison.
9591 For the most part, we only are concerned with comparisons with zero,
9592 but some things may really be comparisons with zero but not start
9593 out looking that way. */
9594
9595 while (GET_CODE (op1) == CONST_INT)
9596 {
9597 enum machine_mode mode = GET_MODE (op0);
9598 int mode_width = GET_MODE_BITSIZE (mode);
9599 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9600 int equality_comparison_p;
9601 int sign_bit_comparison_p;
9602 int unsigned_comparison_p;
9603 HOST_WIDE_INT const_op;
9604
9605 /* We only want to handle integral modes. This catches VOIDmode,
9606 CCmode, and the floating-point modes. An exception is that we
9607 can handle VOIDmode if OP0 is a COMPARE or a comparison
9608 operation. */
9609
9610 if (GET_MODE_CLASS (mode) != MODE_INT
9611 && ! (mode == VOIDmode
9612 && (GET_CODE (op0) == COMPARE
9613 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
9614 break;
9615
9616 /* Get the constant we are comparing against and turn off all bits
9617 not on in our mode. */
9618 const_op = INTVAL (op1);
9619 if (mode_width <= HOST_BITS_PER_WIDE_INT)
9620 const_op &= mask;
9621
9622 /* If we are comparing against a constant power of two and the value
9623 being compared can only have that single bit nonzero (e.g., it was
9624 `and'ed with that bit), we can replace this with a comparison
9625 with zero. */
9626 if (const_op
9627 && (code == EQ || code == NE || code == GE || code == GEU
9628 || code == LT || code == LTU)
9629 && mode_width <= HOST_BITS_PER_WIDE_INT
9630 && exact_log2 (const_op) >= 0
9631 && nonzero_bits (op0, mode) == const_op)
9632 {
9633 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9634 op1 = const0_rtx, const_op = 0;
9635 }
9636
9637 /* Similarly, if we are comparing a value known to be either -1 or
9638 0 with -1, change it to the opposite comparison against zero. */
9639
9640 if (const_op == -1
9641 && (code == EQ || code == NE || code == GT || code == LE
9642 || code == GEU || code == LTU)
9643 && num_sign_bit_copies (op0, mode) == mode_width)
9644 {
9645 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9646 op1 = const0_rtx, const_op = 0;
9647 }
9648
9649 /* Do some canonicalizations based on the comparison code. We prefer
9650 comparisons against zero and then prefer equality comparisons.
9651 If we can reduce the size of a constant, we will do that too. */
9652
9653 switch (code)
9654 {
9655 case LT:
9656 /* < C is equivalent to <= (C - 1) */
9657 if (const_op > 0)
9658 {
9659 const_op -= 1;
9660 op1 = GEN_INT (const_op);
9661 code = LE;
9662 /* ... fall through to LE case below. */
9663 }
9664 else
9665 break;
9666
9667 case LE:
9668 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
9669 if (const_op < 0)
9670 {
9671 const_op += 1;
9672 op1 = GEN_INT (const_op);
9673 code = LT;
9674 }
9675
9676 /* If we are doing a <= 0 comparison on a value known to have
9677 a zero sign bit, we can replace this with == 0. */
9678 else if (const_op == 0
9679 && mode_width <= HOST_BITS_PER_WIDE_INT
9680 && (nonzero_bits (op0, mode)
9681 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
9682 code = EQ;
9683 break;
9684
9685 case GE:
8969 /* >= C is equivalent to > (C - 1). */
9686 /* >= C is equivalent to > (C - 1). */
8970 if (const_op > 0)
8971 {
8972 const_op -= 1;
8973 op1 = GEN_INT (const_op);
8974 code = GT;
8975 /* ... fall through to GT below. */
8976 }
8977 else
8978 break;
8979
8980 case GT:
8981 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
8982 if (const_op < 0)
8983 {
8984 const_op += 1;
8985 op1 = GEN_INT (const_op);
8986 code = GE;
8987 }
8988
8989 /* If we are doing a > 0 comparison on a value known to have
8990 a zero sign bit, we can replace this with != 0. */
8991 else if (const_op == 0
8992 && mode_width <= HOST_BITS_PER_WIDE_INT
8993 && (nonzero_bits (op0, mode)
8994 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
8995 code = NE;
8996 break;
8997
8998 case LTU:
8999 /* < C is equivalent to <= (C - 1). */
9000 if (const_op > 0)
9001 {
9002 const_op -= 1;
9003 op1 = GEN_INT (const_op);
9004 code = LEU;
9687 if (const_op > 0)
9688 {
9689 const_op -= 1;
9690 op1 = GEN_INT (const_op);
9691 code = GT;
9692 /* ... fall through to GT below. */
9693 }
9694 else
9695 break;
9696
9697 case GT:
9698 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
9699 if (const_op < 0)
9700 {
9701 const_op += 1;
9702 op1 = GEN_INT (const_op);
9703 code = GE;
9704 }
9705
9706 /* If we are doing a > 0 comparison on a value known to have
9707 a zero sign bit, we can replace this with != 0. */
9708 else if (const_op == 0
9709 && mode_width <= HOST_BITS_PER_WIDE_INT
9710 && (nonzero_bits (op0, mode)
9711 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
9712 code = NE;
9713 break;
9714
9715 case LTU:
9716 /* < C is equivalent to <= (C - 1). */
9717 if (const_op > 0)
9718 {
9719 const_op -= 1;
9720 op1 = GEN_INT (const_op);
9721 code = LEU;
9005 /* ... fall through ... */
9722 /* ... fall through ... */
9006 }
9007
9008 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
9723 }
9724
9725 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
9009 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
9726 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9727 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9010 {
9011 const_op = 0, op1 = const0_rtx;
9012 code = GE;
9013 break;
9014 }
9015 else
9016 break;
9017
9018 case LEU:
9019 /* unsigned <= 0 is equivalent to == 0 */
9020 if (const_op == 0)
9021 code = EQ;
9022
9728 {
9729 const_op = 0, op1 = const0_rtx;
9730 code = GE;
9731 break;
9732 }
9733 else
9734 break;
9735
9736 case LEU:
9737 /* unsigned <= 0 is equivalent to == 0 */
9738 if (const_op == 0)
9739 code = EQ;
9740
9023 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
9024 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
9741 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
9742 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9743 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
9025 {
9026 const_op = 0, op1 = const0_rtx;
9027 code = GE;
9028 }
9029 break;
9030
9031 case GEU:
9032 /* >= C is equivalent to < (C - 1). */
9033 if (const_op > 1)
9034 {
9035 const_op -= 1;
9036 op1 = GEN_INT (const_op);
9037 code = GTU;
9744 {
9745 const_op = 0, op1 = const0_rtx;
9746 code = GE;
9747 }
9748 break;
9749
9750 case GEU:
9751 /* >= C is equivalent to < (C - 1). */
9752 if (const_op > 1)
9753 {
9754 const_op -= 1;
9755 op1 = GEN_INT (const_op);
9756 code = GTU;
9038 /* ... fall through ... */
9757 /* ... fall through ... */
9039 }
9040
9041 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
9758 }
9759
9760 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
9042 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
9761 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9762 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9043 {
9044 const_op = 0, op1 = const0_rtx;
9045 code = LT;
9046 break;
9047 }
9048 else
9049 break;
9050
9051 case GTU:
9052 /* unsigned > 0 is equivalent to != 0 */
9053 if (const_op == 0)
9054 code = NE;
9055
9056 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
9763 {
9764 const_op = 0, op1 = const0_rtx;
9765 code = LT;
9766 break;
9767 }
9768 else
9769 break;
9770
9771 case GTU:
9772 /* unsigned > 0 is equivalent to != 0 */
9773 if (const_op == 0)
9774 code = NE;
9775
9776 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
9057 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
9777 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9778 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
9058 {
9059 const_op = 0, op1 = const0_rtx;
9060 code = LT;
9061 }
9062 break;
9779 {
9780 const_op = 0, op1 = const0_rtx;
9781 code = LT;
9782 }
9783 break;
9784
9785 default:
9786 break;
9063 }
9064
9065 /* Compute some predicates to simplify code below. */
9066
9067 equality_comparison_p = (code == EQ || code == NE);
9068 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
9069 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
9070 || code == LEU);
9071
9072 /* If this is a sign bit comparison and we can do arithmetic in
9073 MODE, say that we will only be needing the sign bit of OP0. */
9074 if (sign_bit_comparison_p
9075 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9076 op0 = force_to_mode (op0, mode,
9077 ((HOST_WIDE_INT) 1
9078 << (GET_MODE_BITSIZE (mode) - 1)),
9079 NULL_RTX, 0);
9080
9081 /* Now try cases based on the opcode of OP0. If none of the cases
9082 does a "continue", we exit this loop immediately after the
9083 switch. */
9084
9085 switch (GET_CODE (op0))
9086 {
9087 case ZERO_EXTRACT:
9088 /* If we are extracting a single bit from a variable position in
9089 a constant that has only a single bit set and are comparing it
9090 with zero, we can convert this into an equality comparison
9787 }
9788
9789 /* Compute some predicates to simplify code below. */
9790
9791 equality_comparison_p = (code == EQ || code == NE);
9792 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
9793 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
9794 || code == LEU);
9795
9796 /* If this is a sign bit comparison and we can do arithmetic in
9797 MODE, say that we will only be needing the sign bit of OP0. */
9798 if (sign_bit_comparison_p
9799 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9800 op0 = force_to_mode (op0, mode,
9801 ((HOST_WIDE_INT) 1
9802 << (GET_MODE_BITSIZE (mode) - 1)),
9803 NULL_RTX, 0);
9804
9805 /* Now try cases based on the opcode of OP0. If none of the cases
9806 does a "continue", we exit this loop immediately after the
9807 switch. */
9808
9809 switch (GET_CODE (op0))
9810 {
9811 case ZERO_EXTRACT:
9812 /* If we are extracting a single bit from a variable position in
9813 a constant that has only a single bit set and are comparing it
9814 with zero, we can convert this into an equality comparison
9091 between the position and the location of the single bit. We can't
9092 do this if bit endian and we don't have an extzv since we then
9093 can't know what mode to use for the endianness adjustment. */
9815 between the position and the location of the single bit. */
9094
9095 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
9096 && XEXP (op0, 1) == const1_rtx
9097 && equality_comparison_p && const_op == 0
9816
9817 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
9818 && XEXP (op0, 1) == const1_rtx
9819 && equality_comparison_p && const_op == 0
9098 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0
9099 && (! BITS_BIG_ENDIAN
9100#ifdef HAVE_extzv
9101 || HAVE_extzv
9102#endif
9103 ))
9820 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
9104 {
9821 {
9105#ifdef HAVE_extzv
9106 if (BITS_BIG_ENDIAN)
9822 if (BITS_BIG_ENDIAN)
9823#ifdef HAVE_extzv
9107 i = (GET_MODE_BITSIZE
9108 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
9824 i = (GET_MODE_BITSIZE
9825 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
9826#else
9827 i = BITS_PER_WORD - 1 - i;
9109#endif
9110
9111 op0 = XEXP (op0, 2);
9112 op1 = GEN_INT (i);
9113 const_op = i;
9114
9115 /* Result is nonzero iff shift count is equal to I. */
9116 code = reverse_condition (code);
9117 continue;
9118 }
9119
9828#endif
9829
9830 op0 = XEXP (op0, 2);
9831 op1 = GEN_INT (i);
9832 const_op = i;
9833
9834 /* Result is nonzero iff shift count is equal to I. */
9835 code = reverse_condition (code);
9836 continue;
9837 }
9838
9120 /* ... fall through ... */
9839 /* ... fall through ... */
9121
9122 case SIGN_EXTRACT:
9123 tem = expand_compound_operation (op0);
9124 if (tem != op0)
9125 {
9126 op0 = tem;
9127 continue;
9128 }
9129 break;
9130
9131 case NOT:
9132 /* If testing for equality, we can take the NOT of the constant. */
9133 if (equality_comparison_p
9134 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
9135 {
9136 op0 = XEXP (op0, 0);
9137 op1 = tem;
9138 continue;
9139 }
9140
9141 /* If just looking at the sign bit, reverse the sense of the
9142 comparison. */
9143 if (sign_bit_comparison_p)
9144 {
9145 op0 = XEXP (op0, 0);
9146 code = (code == GE ? LT : GE);
9147 continue;
9148 }
9149 break;
9150
9151 case NEG:
9152 /* If testing for equality, we can take the NEG of the constant. */
9153 if (equality_comparison_p
9154 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
9155 {
9156 op0 = XEXP (op0, 0);
9157 op1 = tem;
9158 continue;
9159 }
9160
9161 /* The remaining cases only apply to comparisons with zero. */
9162 if (const_op != 0)
9163 break;
9164
9165 /* When X is ABS or is known positive,
9166 (neg X) is < 0 if and only if X != 0. */
9167
9168 if (sign_bit_comparison_p
9169 && (GET_CODE (XEXP (op0, 0)) == ABS
9170 || (mode_width <= HOST_BITS_PER_WIDE_INT
9171 && (nonzero_bits (XEXP (op0, 0), mode)
9172 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
9173 {
9174 op0 = XEXP (op0, 0);
9175 code = (code == LT ? NE : EQ);
9176 continue;
9177 }
9178
9179 /* If we have NEG of something whose two high-order bits are the
9840
9841 case SIGN_EXTRACT:
9842 tem = expand_compound_operation (op0);
9843 if (tem != op0)
9844 {
9845 op0 = tem;
9846 continue;
9847 }
9848 break;
9849
9850 case NOT:
9851 /* If testing for equality, we can take the NOT of the constant. */
9852 if (equality_comparison_p
9853 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
9854 {
9855 op0 = XEXP (op0, 0);
9856 op1 = tem;
9857 continue;
9858 }
9859
9860 /* If just looking at the sign bit, reverse the sense of the
9861 comparison. */
9862 if (sign_bit_comparison_p)
9863 {
9864 op0 = XEXP (op0, 0);
9865 code = (code == GE ? LT : GE);
9866 continue;
9867 }
9868 break;
9869
9870 case NEG:
9871 /* If testing for equality, we can take the NEG of the constant. */
9872 if (equality_comparison_p
9873 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
9874 {
9875 op0 = XEXP (op0, 0);
9876 op1 = tem;
9877 continue;
9878 }
9879
9880 /* The remaining cases only apply to comparisons with zero. */
9881 if (const_op != 0)
9882 break;
9883
9884 /* When X is ABS or is known positive,
9885 (neg X) is < 0 if and only if X != 0. */
9886
9887 if (sign_bit_comparison_p
9888 && (GET_CODE (XEXP (op0, 0)) == ABS
9889 || (mode_width <= HOST_BITS_PER_WIDE_INT
9890 && (nonzero_bits (XEXP (op0, 0), mode)
9891 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
9892 {
9893 op0 = XEXP (op0, 0);
9894 code = (code == LT ? NE : EQ);
9895 continue;
9896 }
9897
9898 /* If we have NEG of something whose two high-order bits are the
9180 same, we know that "(-a) < 0" is equivalent to "a > 0". */
9899 same, we know that "(-a) < 0" is equivalent to "a > 0". */
9181 if (num_sign_bit_copies (op0, mode) >= 2)
9182 {
9183 op0 = XEXP (op0, 0);
9184 code = swap_condition (code);
9185 continue;
9186 }
9187 break;
9188
9189 case ROTATE:
9190 /* If we are testing equality and our count is a constant, we
9191 can perform the inverse operation on our RHS. */
9192 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9193 && (tem = simplify_binary_operation (ROTATERT, mode,
9194 op1, XEXP (op0, 1))) != 0)
9195 {
9196 op0 = XEXP (op0, 0);
9197 op1 = tem;
9198 continue;
9199 }
9200
9201 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
9202 a particular bit. Convert it to an AND of a constant of that
9203 bit. This will be converted into a ZERO_EXTRACT. */
9204 if (const_op == 0 && sign_bit_comparison_p
9205 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9206 && mode_width <= HOST_BITS_PER_WIDE_INT)
9207 {
9208 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9209 ((HOST_WIDE_INT) 1
9210 << (mode_width - 1
9211 - INTVAL (XEXP (op0, 1)))));
9212 code = (code == LT ? NE : EQ);
9213 continue;
9214 }
9215
9900 if (num_sign_bit_copies (op0, mode) >= 2)
9901 {
9902 op0 = XEXP (op0, 0);
9903 code = swap_condition (code);
9904 continue;
9905 }
9906 break;
9907
9908 case ROTATE:
9909 /* If we are testing equality and our count is a constant, we
9910 can perform the inverse operation on our RHS. */
9911 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9912 && (tem = simplify_binary_operation (ROTATERT, mode,
9913 op1, XEXP (op0, 1))) != 0)
9914 {
9915 op0 = XEXP (op0, 0);
9916 op1 = tem;
9917 continue;
9918 }
9919
9920 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
9921 a particular bit. Convert it to an AND of a constant of that
9922 bit. This will be converted into a ZERO_EXTRACT. */
9923 if (const_op == 0 && sign_bit_comparison_p
9924 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9925 && mode_width <= HOST_BITS_PER_WIDE_INT)
9926 {
9927 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9928 ((HOST_WIDE_INT) 1
9929 << (mode_width - 1
9930 - INTVAL (XEXP (op0, 1)))));
9931 code = (code == LT ? NE : EQ);
9932 continue;
9933 }
9934
9216 /* ... fall through ... */
9935 /* ... fall through ... */
9217
9218 case ABS:
9219 /* ABS is ignorable inside an equality comparison with zero. */
9220 if (const_op == 0 && equality_comparison_p)
9221 {
9222 op0 = XEXP (op0, 0);
9223 continue;
9224 }
9225 break;
9226
9227
9228 case SIGN_EXTEND:
9229 /* Can simplify (compare (zero/sign_extend FOO) CONST)
9230 to (compare FOO CONST) if CONST fits in FOO's mode and we
9231 are either testing inequality or have an unsigned comparison
9232 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
9233 if (! unsigned_comparison_p
9234 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9235 <= HOST_BITS_PER_WIDE_INT)
9236 && ((unsigned HOST_WIDE_INT) const_op
9237 < (((HOST_WIDE_INT) 1
9238 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
9239 {
9240 op0 = XEXP (op0, 0);
9241 continue;
9242 }
9243 break;
9244
9245 case SUBREG:
9246 /* Check for the case where we are comparing A - C1 with C2,
9247 both constants are smaller than 1/2 the maximum positive
9248 value in MODE, and the comparison is equality or unsigned.
9249 In that case, if A is either zero-extended to MODE or has
9250 sufficient sign bits so that the high-order bit in MODE
9251 is a copy of the sign in the inner mode, we can prove that it is
9252 safe to do the operation in the wider mode. This simplifies
9253 many range checks. */
9254
9255 if (mode_width <= HOST_BITS_PER_WIDE_INT
9256 && subreg_lowpart_p (op0)
9257 && GET_CODE (SUBREG_REG (op0)) == PLUS
9258 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
9259 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
9260 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
9261 < GET_MODE_MASK (mode) / 2)
9262 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
9263 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
9264 GET_MODE (SUBREG_REG (op0)))
9265 & ~ GET_MODE_MASK (mode))
9266 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
9267 GET_MODE (SUBREG_REG (op0)))
9268 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9269 - GET_MODE_BITSIZE (mode)))))
9270 {
9271 op0 = SUBREG_REG (op0);
9272 continue;
9273 }
9274
9275 /* If the inner mode is narrower and we are extracting the low part,
9276 we can treat the SUBREG as if it were a ZERO_EXTEND. */
9277 if (subreg_lowpart_p (op0)
9278 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
9279 /* Fall through */ ;
9280 else
9281 break;
9282
9936
9937 case ABS:
9938 /* ABS is ignorable inside an equality comparison with zero. */
9939 if (const_op == 0 && equality_comparison_p)
9940 {
9941 op0 = XEXP (op0, 0);
9942 continue;
9943 }
9944 break;
9945
9946
9947 case SIGN_EXTEND:
9948 /* Can simplify (compare (zero/sign_extend FOO) CONST)
9949 to (compare FOO CONST) if CONST fits in FOO's mode and we
9950 are either testing inequality or have an unsigned comparison
9951 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
9952 if (! unsigned_comparison_p
9953 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9954 <= HOST_BITS_PER_WIDE_INT)
9955 && ((unsigned HOST_WIDE_INT) const_op
9956 < (((HOST_WIDE_INT) 1
9957 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
9958 {
9959 op0 = XEXP (op0, 0);
9960 continue;
9961 }
9962 break;
9963
9964 case SUBREG:
9965 /* Check for the case where we are comparing A - C1 with C2,
9966 both constants are smaller than 1/2 the maximum positive
9967 value in MODE, and the comparison is equality or unsigned.
9968 In that case, if A is either zero-extended to MODE or has
9969 sufficient sign bits so that the high-order bit in MODE
9970 is a copy of the sign in the inner mode, we can prove that it is
9971 safe to do the operation in the wider mode. This simplifies
9972 many range checks. */
9973
9974 if (mode_width <= HOST_BITS_PER_WIDE_INT
9975 && subreg_lowpart_p (op0)
9976 && GET_CODE (SUBREG_REG (op0)) == PLUS
9977 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
9978 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
9979 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
9980 < GET_MODE_MASK (mode) / 2)
9981 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
9982 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
9983 GET_MODE (SUBREG_REG (op0)))
9984 & ~ GET_MODE_MASK (mode))
9985 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
9986 GET_MODE (SUBREG_REG (op0)))
9987 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9988 - GET_MODE_BITSIZE (mode)))))
9989 {
9990 op0 = SUBREG_REG (op0);
9991 continue;
9992 }
9993
9994 /* If the inner mode is narrower and we are extracting the low part,
9995 we can treat the SUBREG as if it were a ZERO_EXTEND. */
9996 if (subreg_lowpart_p (op0)
9997 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
9998 /* Fall through */ ;
9999 else
10000 break;
10001
9283 /* ... fall through ... */
10002 /* ... fall through ... */
9284
9285 case ZERO_EXTEND:
9286 if ((unsigned_comparison_p || equality_comparison_p)
9287 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9288 <= HOST_BITS_PER_WIDE_INT)
9289 && ((unsigned HOST_WIDE_INT) const_op
9290 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
9291 {
9292 op0 = XEXP (op0, 0);
9293 continue;
9294 }
9295 break;
9296
9297 case PLUS:
9298 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
9299 this for equality comparisons due to pathological cases involving
9300 overflows. */
9301 if (equality_comparison_p
9302 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9303 op1, XEXP (op0, 1))))
9304 {
9305 op0 = XEXP (op0, 0);
9306 op1 = tem;
9307 continue;
9308 }
9309
9310 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
9311 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
9312 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
9313 {
9314 op0 = XEXP (XEXP (op0, 0), 0);
9315 code = (code == LT ? EQ : NE);
9316 continue;
9317 }
9318 break;
9319
9320 case MINUS:
9321 /* (eq (minus A B) C) -> (eq A (plus B C)) or
9322 (eq B (minus A C)), whichever simplifies. We can only do
9323 this for equality comparisons due to pathological cases involving
9324 overflows. */
9325 if (equality_comparison_p
9326 && 0 != (tem = simplify_binary_operation (PLUS, mode,
9327 XEXP (op0, 1), op1)))
9328 {
9329 op0 = XEXP (op0, 0);
9330 op1 = tem;
9331 continue;
9332 }
9333
9334 if (equality_comparison_p
9335 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9336 XEXP (op0, 0), op1)))
9337 {
9338 op0 = XEXP (op0, 1);
9339 op1 = tem;
9340 continue;
9341 }
9342
9343 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
9344 of bits in X minus 1, is one iff X > 0. */
9345 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
9346 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9347 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
9348 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9349 {
9350 op0 = XEXP (op0, 1);
9351 code = (code == GE ? LE : GT);
9352 continue;
9353 }
9354 break;
9355
9356 case XOR:
9357 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
9358 if C is zero or B is a constant. */
9359 if (equality_comparison_p
9360 && 0 != (tem = simplify_binary_operation (XOR, mode,
9361 XEXP (op0, 1), op1)))
9362 {
9363 op0 = XEXP (op0, 0);
9364 op1 = tem;
9365 continue;
9366 }
9367 break;
9368
9369 case EQ: case NE:
9370 case LT: case LTU: case LE: case LEU:
9371 case GT: case GTU: case GE: case GEU:
9372 /* We can't do anything if OP0 is a condition code value, rather
9373 than an actual data value. */
9374 if (const_op != 0
9375#ifdef HAVE_cc0
9376 || XEXP (op0, 0) == cc0_rtx
9377#endif
9378 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
9379 break;
9380
9381 /* Get the two operands being compared. */
9382 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
9383 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
9384 else
9385 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
9386
9387 /* Check for the cases where we simply want the result of the
9388 earlier test or the opposite of that result. */
9389 if (code == NE
9390 || (code == EQ && reversible_comparison_p (op0))
9391 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9392 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9393 && (STORE_FLAG_VALUE
9394 & (((HOST_WIDE_INT) 1
9395 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
9396 && (code == LT
9397 || (code == GE && reversible_comparison_p (op0)))))
9398 {
9399 code = (code == LT || code == NE
9400 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
9401 op0 = tem, op1 = tem1;
9402 continue;
9403 }
9404 break;
9405
9406 case IOR:
9407 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
9408 iff X <= 0. */
9409 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
9410 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
9411 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9412 {
9413 op0 = XEXP (op0, 1);
9414 code = (code == GE ? GT : LE);
9415 continue;
9416 }
9417 break;
9418
9419 case AND:
9420 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
9421 will be converted to a ZERO_EXTRACT later. */
9422 if (const_op == 0 && equality_comparison_p
9423 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9424 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
9425 {
9426 op0 = simplify_and_const_int
9427 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
9428 XEXP (op0, 1),
9429 XEXP (XEXP (op0, 0), 1)),
9430 (HOST_WIDE_INT) 1);
9431 continue;
9432 }
9433
9434 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
9435 zero and X is a comparison and C1 and C2 describe only bits set
9436 in STORE_FLAG_VALUE, we can compare with X. */
9437 if (const_op == 0 && equality_comparison_p
9438 && mode_width <= HOST_BITS_PER_WIDE_INT
9439 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9440 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
9441 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9442 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
9443 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
9444 {
9445 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
9446 << INTVAL (XEXP (XEXP (op0, 0), 1)));
9447 if ((~ STORE_FLAG_VALUE & mask) == 0
9448 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
9449 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
9450 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
9451 {
9452 op0 = XEXP (XEXP (op0, 0), 0);
9453 continue;
9454 }
9455 }
9456
9457 /* If we are doing an equality comparison of an AND of a bit equal
9458 to the sign bit, replace this with a LT or GE comparison of
9459 the underlying value. */
9460 if (equality_comparison_p
9461 && const_op == 0
9462 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9463 && mode_width <= HOST_BITS_PER_WIDE_INT
9464 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
9465 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9466 {
9467 op0 = XEXP (op0, 0);
9468 code = (code == EQ ? GE : LT);
9469 continue;
9470 }
9471
9472 /* If this AND operation is really a ZERO_EXTEND from a narrower
9473 mode, the constant fits within that mode, and this is either an
9474 equality or unsigned comparison, try to do this comparison in
9475 the narrower mode. */
9476 if ((equality_comparison_p || unsigned_comparison_p)
9477 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9478 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
9479 & GET_MODE_MASK (mode))
9480 + 1)) >= 0
9481 && const_op >> i == 0
9482 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
9483 {
9484 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
9485 continue;
9486 }
10003
10004 case ZERO_EXTEND:
10005 if ((unsigned_comparison_p || equality_comparison_p)
10006 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10007 <= HOST_BITS_PER_WIDE_INT)
10008 && ((unsigned HOST_WIDE_INT) const_op
10009 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10010 {
10011 op0 = XEXP (op0, 0);
10012 continue;
10013 }
10014 break;
10015
10016 case PLUS:
10017 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
10018 this for equality comparisons due to pathological cases involving
10019 overflows. */
10020 if (equality_comparison_p
10021 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10022 op1, XEXP (op0, 1))))
10023 {
10024 op0 = XEXP (op0, 0);
10025 op1 = tem;
10026 continue;
10027 }
10028
10029 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10030 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10031 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10032 {
10033 op0 = XEXP (XEXP (op0, 0), 0);
10034 code = (code == LT ? EQ : NE);
10035 continue;
10036 }
10037 break;
10038
10039 case MINUS:
10040 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10041 (eq B (minus A C)), whichever simplifies. We can only do
10042 this for equality comparisons due to pathological cases involving
10043 overflows. */
10044 if (equality_comparison_p
10045 && 0 != (tem = simplify_binary_operation (PLUS, mode,
10046 XEXP (op0, 1), op1)))
10047 {
10048 op0 = XEXP (op0, 0);
10049 op1 = tem;
10050 continue;
10051 }
10052
10053 if (equality_comparison_p
10054 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10055 XEXP (op0, 0), op1)))
10056 {
10057 op0 = XEXP (op0, 1);
10058 op1 = tem;
10059 continue;
10060 }
10061
10062 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10063 of bits in X minus 1, is one iff X > 0. */
10064 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10065 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10066 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
10067 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10068 {
10069 op0 = XEXP (op0, 1);
10070 code = (code == GE ? LE : GT);
10071 continue;
10072 }
10073 break;
10074
10075 case XOR:
10076 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
10077 if C is zero or B is a constant. */
10078 if (equality_comparison_p
10079 && 0 != (tem = simplify_binary_operation (XOR, mode,
10080 XEXP (op0, 1), op1)))
10081 {
10082 op0 = XEXP (op0, 0);
10083 op1 = tem;
10084 continue;
10085 }
10086 break;
10087
10088 case EQ: case NE:
10089 case LT: case LTU: case LE: case LEU:
10090 case GT: case GTU: case GE: case GEU:
10091 /* We can't do anything if OP0 is a condition code value, rather
10092 than an actual data value. */
10093 if (const_op != 0
10094#ifdef HAVE_cc0
10095 || XEXP (op0, 0) == cc0_rtx
10096#endif
10097 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10098 break;
10099
10100 /* Get the two operands being compared. */
10101 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10102 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10103 else
10104 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10105
10106 /* Check for the cases where we simply want the result of the
10107 earlier test or the opposite of that result. */
10108 if (code == NE
10109 || (code == EQ && reversible_comparison_p (op0))
10110 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10111 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10112 && (STORE_FLAG_VALUE
10113 & (((HOST_WIDE_INT) 1
10114 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
10115 && (code == LT
10116 || (code == GE && reversible_comparison_p (op0)))))
10117 {
10118 code = (code == LT || code == NE
10119 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
10120 op0 = tem, op1 = tem1;
10121 continue;
10122 }
10123 break;
10124
10125 case IOR:
10126 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10127 iff X <= 0. */
10128 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10129 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10130 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10131 {
10132 op0 = XEXP (op0, 1);
10133 code = (code == GE ? GT : LE);
10134 continue;
10135 }
10136 break;
10137
10138 case AND:
10139 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10140 will be converted to a ZERO_EXTRACT later. */
10141 if (const_op == 0 && equality_comparison_p
10142 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10143 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10144 {
10145 op0 = simplify_and_const_int
10146 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
10147 XEXP (op0, 1),
10148 XEXP (XEXP (op0, 0), 1)),
10149 (HOST_WIDE_INT) 1);
10150 continue;
10151 }
10152
10153 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10154 zero and X is a comparison and C1 and C2 describe only bits set
10155 in STORE_FLAG_VALUE, we can compare with X. */
10156 if (const_op == 0 && equality_comparison_p
10157 && mode_width <= HOST_BITS_PER_WIDE_INT
10158 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10159 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10160 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10161 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
10162 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
10163 {
10164 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10165 << INTVAL (XEXP (XEXP (op0, 0), 1)));
10166 if ((~ STORE_FLAG_VALUE & mask) == 0
10167 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10168 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10169 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10170 {
10171 op0 = XEXP (XEXP (op0, 0), 0);
10172 continue;
10173 }
10174 }
10175
10176 /* If we are doing an equality comparison of an AND of a bit equal
10177 to the sign bit, replace this with a LT or GE comparison of
10178 the underlying value. */
10179 if (equality_comparison_p
10180 && const_op == 0
10181 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10182 && mode_width <= HOST_BITS_PER_WIDE_INT
10183 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10184 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10185 {
10186 op0 = XEXP (op0, 0);
10187 code = (code == EQ ? GE : LT);
10188 continue;
10189 }
10190
10191 /* If this AND operation is really a ZERO_EXTEND from a narrower
10192 mode, the constant fits within that mode, and this is either an
10193 equality or unsigned comparison, try to do this comparison in
10194 the narrower mode. */
10195 if ((equality_comparison_p || unsigned_comparison_p)
10196 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10197 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10198 & GET_MODE_MASK (mode))
10199 + 1)) >= 0
10200 && const_op >> i == 0
10201 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10202 {
10203 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10204 continue;
10205 }
10206
10207 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
10208 in both M1 and M2 and the SUBREG is either paradoxical or
10209 represents the low part, permute the SUBREG and the AND and
10210 try again. */
10211 if (GET_CODE (XEXP (op0, 0)) == SUBREG
10212 && ((mode_width
10213 >= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10214#ifdef WORD_REGISTER_OPERATIONS
10215 || subreg_lowpart_p (XEXP (op0, 0))
10216#endif
10217 )
10218#ifndef WORD_REGISTER_OPERATIONS
10219 /* It is unsafe to commute the AND into the SUBREG if the SUBREG
10220 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
10221 As originally written the upper bits have a defined value
10222 due to the AND operation. However, if we commute the AND
10223 inside the SUBREG then they no longer have defined values
10224 and the meaning of the code has been changed. */
10225 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
10226 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10227#endif
10228 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10229 && mode_width <= HOST_BITS_PER_WIDE_INT
10230 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10231 <= HOST_BITS_PER_WIDE_INT)
10232 && (INTVAL (XEXP (op0, 1)) & ~ mask) == 0
10233 && 0 == (~ GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10234 & INTVAL (XEXP (op0, 1)))
10235 && INTVAL (XEXP (op0, 1)) != mask
10236 && (INTVAL (XEXP (op0, 1))
10237 != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10238
10239 {
10240 op0
10241 = gen_lowpart_for_combine
10242 (mode,
10243 gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
10244 SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
10245 continue;
10246 }
10247
9487 break;
9488
9489 case ASHIFT:
9490 /* If we have (compare (ashift FOO N) (const_int C)) and
9491 the high order N bits of FOO (N+1 if an inequality comparison)
9492 are known to be zero, we can do this by comparing FOO with C
9493 shifted right N bits so long as the low-order N bits of C are
9494 zero. */
9495 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9496 && INTVAL (XEXP (op0, 1)) >= 0
9497 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
9498 < HOST_BITS_PER_WIDE_INT)
9499 && ((const_op
9500 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
9501 && mode_width <= HOST_BITS_PER_WIDE_INT
9502 && (nonzero_bits (XEXP (op0, 0), mode)
9503 & ~ (mask >> (INTVAL (XEXP (op0, 1))
9504 + ! equality_comparison_p))) == 0)
9505 {
9506 const_op >>= INTVAL (XEXP (op0, 1));
9507 op1 = GEN_INT (const_op);
9508 op0 = XEXP (op0, 0);
9509 continue;
9510 }
9511
9512 /* If we are doing a sign bit comparison, it means we are testing
9513 a particular bit. Convert it to the appropriate AND. */
9514 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9515 && mode_width <= HOST_BITS_PER_WIDE_INT)
9516 {
9517 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9518 ((HOST_WIDE_INT) 1
9519 << (mode_width - 1
9520 - INTVAL (XEXP (op0, 1)))));
9521 code = (code == LT ? NE : EQ);
9522 continue;
9523 }
9524
9525 /* If this an equality comparison with zero and we are shifting
9526 the low bit to the sign bit, we can convert this to an AND of the
9527 low-order bit. */
9528 if (const_op == 0 && equality_comparison_p
9529 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9530 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9531 {
9532 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9533 (HOST_WIDE_INT) 1);
9534 continue;
9535 }
9536 break;
9537
9538 case ASHIFTRT:
9539 /* If this is an equality comparison with zero, we can do this
9540 as a logical shift, which might be much simpler. */
9541 if (equality_comparison_p && const_op == 0
9542 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
9543 {
9544 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
9545 XEXP (op0, 0),
9546 INTVAL (XEXP (op0, 1)));
9547 continue;
9548 }
9549
9550 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
9551 do the comparison in a narrower mode. */
9552 if (! unsigned_comparison_p
9553 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9554 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9555 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
9556 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
9557 MODE_INT, 1)) != BLKmode
9558 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
9559 || ((unsigned HOST_WIDE_INT) - const_op
9560 <= GET_MODE_MASK (tmode))))
9561 {
9562 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
9563 continue;
9564 }
9565
10248 break;
10249
10250 case ASHIFT:
10251 /* If we have (compare (ashift FOO N) (const_int C)) and
10252 the high order N bits of FOO (N+1 if an inequality comparison)
10253 are known to be zero, we can do this by comparing FOO with C
10254 shifted right N bits so long as the low-order N bits of C are
10255 zero. */
10256 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10257 && INTVAL (XEXP (op0, 1)) >= 0
10258 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
10259 < HOST_BITS_PER_WIDE_INT)
10260 && ((const_op
10261 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
10262 && mode_width <= HOST_BITS_PER_WIDE_INT
10263 && (nonzero_bits (XEXP (op0, 0), mode)
10264 & ~ (mask >> (INTVAL (XEXP (op0, 1))
10265 + ! equality_comparison_p))) == 0)
10266 {
10267 const_op >>= INTVAL (XEXP (op0, 1));
10268 op1 = GEN_INT (const_op);
10269 op0 = XEXP (op0, 0);
10270 continue;
10271 }
10272
10273 /* If we are doing a sign bit comparison, it means we are testing
10274 a particular bit. Convert it to the appropriate AND. */
10275 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10276 && mode_width <= HOST_BITS_PER_WIDE_INT)
10277 {
10278 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10279 ((HOST_WIDE_INT) 1
10280 << (mode_width - 1
10281 - INTVAL (XEXP (op0, 1)))));
10282 code = (code == LT ? NE : EQ);
10283 continue;
10284 }
10285
10286 /* If this an equality comparison with zero and we are shifting
10287 the low bit to the sign bit, we can convert this to an AND of the
10288 low-order bit. */
10289 if (const_op == 0 && equality_comparison_p
10290 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10291 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10292 {
10293 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10294 (HOST_WIDE_INT) 1);
10295 continue;
10296 }
10297 break;
10298
10299 case ASHIFTRT:
10300 /* If this is an equality comparison with zero, we can do this
10301 as a logical shift, which might be much simpler. */
10302 if (equality_comparison_p && const_op == 0
10303 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10304 {
10305 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10306 XEXP (op0, 0),
10307 INTVAL (XEXP (op0, 1)));
10308 continue;
10309 }
10310
10311 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10312 do the comparison in a narrower mode. */
10313 if (! unsigned_comparison_p
10314 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10315 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10316 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10317 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10318 MODE_INT, 1)) != BLKmode
10319 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
10320 || ((unsigned HOST_WIDE_INT) - const_op
10321 <= GET_MODE_MASK (tmode))))
10322 {
10323 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10324 continue;
10325 }
10326
9566 /* ... fall through ... */
10327 /* ... fall through ... */
9567 case LSHIFTRT:
9568 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
9569 the low order N bits of FOO are known to be zero, we can do this
9570 by comparing FOO with C shifted left N bits so long as no
9571 overflow occurs. */
9572 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9573 && INTVAL (XEXP (op0, 1)) >= 0
9574 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9575 && mode_width <= HOST_BITS_PER_WIDE_INT
9576 && (nonzero_bits (XEXP (op0, 0), mode)
9577 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
9578 && (const_op == 0
9579 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
9580 < mode_width)))
9581 {
9582 const_op <<= INTVAL (XEXP (op0, 1));
9583 op1 = GEN_INT (const_op);
9584 op0 = XEXP (op0, 0);
9585 continue;
9586 }
9587
9588 /* If we are using this shift to extract just the sign bit, we
9589 can replace this with an LT or GE comparison. */
9590 if (const_op == 0
9591 && (equality_comparison_p || sign_bit_comparison_p)
9592 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9593 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9594 {
9595 op0 = XEXP (op0, 0);
9596 code = (code == NE || code == GT ? LT : GE);
9597 continue;
9598 }
9599 break;
10328 case LSHIFTRT:
10329 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
10330 the low order N bits of FOO are known to be zero, we can do this
10331 by comparing FOO with C shifted left N bits so long as no
10332 overflow occurs. */
10333 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10334 && INTVAL (XEXP (op0, 1)) >= 0
10335 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10336 && mode_width <= HOST_BITS_PER_WIDE_INT
10337 && (nonzero_bits (XEXP (op0, 0), mode)
10338 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
10339 && (const_op == 0
10340 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
10341 < mode_width)))
10342 {
10343 const_op <<= INTVAL (XEXP (op0, 1));
10344 op1 = GEN_INT (const_op);
10345 op0 = XEXP (op0, 0);
10346 continue;
10347 }
10348
10349 /* If we are using this shift to extract just the sign bit, we
10350 can replace this with an LT or GE comparison. */
10351 if (const_op == 0
10352 && (equality_comparison_p || sign_bit_comparison_p)
10353 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10354 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10355 {
10356 op0 = XEXP (op0, 0);
10357 code = (code == NE || code == GT ? LT : GE);
10358 continue;
10359 }
10360 break;
10361
10362 default:
10363 break;
9600 }
9601
9602 break;
9603 }
9604
9605 /* Now make any compound operations involved in this comparison. Then,
10364 }
10365
10366 break;
10367 }
10368
10369 /* Now make any compound operations involved in this comparison. Then,
9606 check for an outmost SUBREG on OP0 that isn't doing anything or is
10370 check for an outmost SUBREG on OP0 that is not doing anything or is
9607 paradoxical. The latter case can only occur when it is known that the
9608 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
9609 We can never remove a SUBREG for a non-equality comparison because the
9610 sign bit is in a different place in the underlying object. */
9611
9612 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
9613 op1 = make_compound_operation (op1, SET);
9614
9615 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9616 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9617 && (code == NE || code == EQ)
9618 && ((GET_MODE_SIZE (GET_MODE (op0))
9619 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
9620 {
9621 op0 = SUBREG_REG (op0);
9622 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
9623 }
9624
9625 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9626 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9627 && (code == NE || code == EQ)
9628 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9629 <= HOST_BITS_PER_WIDE_INT)
9630 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
9631 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
9632 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
9633 op1),
9634 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
9635 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
9636 op0 = SUBREG_REG (op0), op1 = tem;
9637
9638 /* We now do the opposite procedure: Some machines don't have compare
9639 insns in all modes. If OP0's mode is an integer mode smaller than a
9640 word and we can't do a compare in that mode, see if there is a larger
9641 mode for which we can do the compare. There are a number of cases in
9642 which we can use the wider mode. */
9643
9644 mode = GET_MODE (op0);
9645 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
9646 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
9647 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
9648 for (tmode = GET_MODE_WIDER_MODE (mode);
9649 (tmode != VOIDmode
9650 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
9651 tmode = GET_MODE_WIDER_MODE (tmode))
9652 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
9653 {
9654 /* If the only nonzero bits in OP0 and OP1 are those in the
9655 narrower mode and this is an equality or unsigned comparison,
9656 we can use the wider mode. Similarly for sign-extended
9657 values, in which case it is true for all comparisons. */
9658 if (((code == EQ || code == NE
9659 || code == GEU || code == GTU || code == LEU || code == LTU)
9660 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
9661 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
9662 || ((num_sign_bit_copies (op0, tmode)
9663 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
9664 && (num_sign_bit_copies (op1, tmode)
9665 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
9666 {
9667 op0 = gen_lowpart_for_combine (tmode, op0);
9668 op1 = gen_lowpart_for_combine (tmode, op1);
9669 break;
9670 }
9671
9672 /* If this is a test for negative, we can make an explicit
9673 test of the sign bit. */
9674
9675 if (op1 == const0_rtx && (code == LT || code == GE)
9676 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9677 {
9678 op0 = gen_binary (AND, tmode,
9679 gen_lowpart_for_combine (tmode, op0),
9680 GEN_INT ((HOST_WIDE_INT) 1
9681 << (GET_MODE_BITSIZE (mode) - 1)));
9682 code = (code == LT) ? NE : EQ;
9683 break;
9684 }
9685 }
9686
9687#ifdef CANONICALIZE_COMPARISON
9688 /* If this machine only supports a subset of valid comparisons, see if we
9689 can convert an unsupported one into a supported one. */
9690 CANONICALIZE_COMPARISON (code, op0, op1);
9691#endif
9692
9693 *pop0 = op0;
9694 *pop1 = op1;
9695
9696 return code;
9697}
9698
9699/* Return 1 if we know that X, a comparison operation, is not operating
9700 on a floating-point value or is EQ or NE, meaning that we can safely
9701 reverse it. */
9702
9703static int
9704reversible_comparison_p (x)
9705 rtx x;
9706{
9707 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
9708 || flag_fast_math
9709 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
9710 return 1;
9711
9712 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
9713 {
9714 case MODE_INT:
9715 case MODE_PARTIAL_INT:
9716 case MODE_COMPLEX_INT:
9717 return 1;
9718
9719 case MODE_CC:
9720 /* If the mode of the condition codes tells us that this is safe,
9721 we need look no further. */
9722 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
9723 return 1;
9724
9725 /* Otherwise try and find where the condition codes were last set and
9726 use that. */
9727 x = get_last_value (XEXP (x, 0));
9728 return (x && GET_CODE (x) == COMPARE
9729 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
10371 paradoxical. The latter case can only occur when it is known that the
10372 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
10373 We can never remove a SUBREG for a non-equality comparison because the
10374 sign bit is in a different place in the underlying object. */
10375
10376 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10377 op1 = make_compound_operation (op1, SET);
10378
10379 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10380 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10381 && (code == NE || code == EQ)
10382 && ((GET_MODE_SIZE (GET_MODE (op0))
10383 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
10384 {
10385 op0 = SUBREG_REG (op0);
10386 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
10387 }
10388
10389 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10390 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10391 && (code == NE || code == EQ)
10392 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10393 <= HOST_BITS_PER_WIDE_INT)
10394 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
10395 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
10396 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
10397 op1),
10398 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
10399 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
10400 op0 = SUBREG_REG (op0), op1 = tem;
10401
10402 /* We now do the opposite procedure: Some machines don't have compare
10403 insns in all modes. If OP0's mode is an integer mode smaller than a
10404 word and we can't do a compare in that mode, see if there is a larger
10405 mode for which we can do the compare. There are a number of cases in
10406 which we can use the wider mode. */
10407
10408 mode = GET_MODE (op0);
10409 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10410 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10411 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
10412 for (tmode = GET_MODE_WIDER_MODE (mode);
10413 (tmode != VOIDmode
10414 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
10415 tmode = GET_MODE_WIDER_MODE (tmode))
10416 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
10417 {
10418 /* If the only nonzero bits in OP0 and OP1 are those in the
10419 narrower mode and this is an equality or unsigned comparison,
10420 we can use the wider mode. Similarly for sign-extended
10421 values, in which case it is true for all comparisons. */
10422 if (((code == EQ || code == NE
10423 || code == GEU || code == GTU || code == LEU || code == LTU)
10424 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
10425 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
10426 || ((num_sign_bit_copies (op0, tmode)
10427 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
10428 && (num_sign_bit_copies (op1, tmode)
10429 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
10430 {
10431 op0 = gen_lowpart_for_combine (tmode, op0);
10432 op1 = gen_lowpart_for_combine (tmode, op1);
10433 break;
10434 }
10435
10436 /* If this is a test for negative, we can make an explicit
10437 test of the sign bit. */
10438
10439 if (op1 == const0_rtx && (code == LT || code == GE)
10440 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10441 {
10442 op0 = gen_binary (AND, tmode,
10443 gen_lowpart_for_combine (tmode, op0),
10444 GEN_INT ((HOST_WIDE_INT) 1
10445 << (GET_MODE_BITSIZE (mode) - 1)));
10446 code = (code == LT) ? NE : EQ;
10447 break;
10448 }
10449 }
10450
10451#ifdef CANONICALIZE_COMPARISON
10452 /* If this machine only supports a subset of valid comparisons, see if we
10453 can convert an unsupported one into a supported one. */
10454 CANONICALIZE_COMPARISON (code, op0, op1);
10455#endif
10456
10457 *pop0 = op0;
10458 *pop1 = op1;
10459
10460 return code;
10461}
10462
10463/* Return 1 if we know that X, a comparison operation, is not operating
10464 on a floating-point value or is EQ or NE, meaning that we can safely
10465 reverse it. */
10466
10467static int
10468reversible_comparison_p (x)
10469 rtx x;
10470{
10471 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
10472 || flag_fast_math
10473 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
10474 return 1;
10475
10476 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
10477 {
10478 case MODE_INT:
10479 case MODE_PARTIAL_INT:
10480 case MODE_COMPLEX_INT:
10481 return 1;
10482
10483 case MODE_CC:
10484 /* If the mode of the condition codes tells us that this is safe,
10485 we need look no further. */
10486 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
10487 return 1;
10488
10489 /* Otherwise try and find where the condition codes were last set and
10490 use that. */
10491 x = get_last_value (XEXP (x, 0));
10492 return (x && GET_CODE (x) == COMPARE
10493 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
10494
10495 default:
10496 return 0;
9730 }
10497 }
9731
9732 return 0;
9733}
9734
9735/* Utility function for following routine. Called when X is part of a value
9736 being stored into reg_last_set_value. Sets reg_last_set_table_tick
9737 for each register mentioned. Similar to mention_regs in cse.c */
9738
9739static void
9740update_table_tick (x)
9741 rtx x;
9742{
9743 register enum rtx_code code = GET_CODE (x);
9744 register char *fmt = GET_RTX_FORMAT (code);
9745 register int i;
9746
9747 if (code == REG)
9748 {
9749 int regno = REGNO (x);
9750 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9751 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9752
9753 for (i = regno; i < endregno; i++)
9754 reg_last_set_table_tick[i] = label_tick;
9755
9756 return;
9757 }
9758
9759 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9760 /* Note that we can't have an "E" in values stored; see
9761 get_last_value_validate. */
9762 if (fmt[i] == 'e')
9763 update_table_tick (XEXP (x, i));
9764}
9765
9766/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
9767 are saying that the register is clobbered and we no longer know its
9768 value. If INSN is zero, don't update reg_last_set; this is only permitted
9769 with VALUE also zero and is used to invalidate the register. */
9770
9771static void
9772record_value_for_reg (reg, insn, value)
9773 rtx reg;
9774 rtx insn;
9775 rtx value;
9776{
9777 int regno = REGNO (reg);
9778 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9779 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
9780 int i;
9781
9782 /* If VALUE contains REG and we have a previous value for REG, substitute
9783 the previous value. */
9784 if (value && insn && reg_overlap_mentioned_p (reg, value))
9785 {
9786 rtx tem;
9787
9788 /* Set things up so get_last_value is allowed to see anything set up to
9789 our insn. */
9790 subst_low_cuid = INSN_CUID (insn);
9791 tem = get_last_value (reg);
9792
9793 if (tem)
9794 value = replace_rtx (copy_rtx (value), reg, tem);
9795 }
9796
9797 /* For each register modified, show we don't know its value, that
9798 we don't know about its bitwise content, that its value has been
9799 updated, and that we don't know the location of the death of the
9800 register. */
9801 for (i = regno; i < endregno; i ++)
9802 {
9803 if (insn)
9804 reg_last_set[i] = insn;
9805 reg_last_set_value[i] = 0;
9806 reg_last_set_mode[i] = 0;
9807 reg_last_set_nonzero_bits[i] = 0;
9808 reg_last_set_sign_bit_copies[i] = 0;
9809 reg_last_death[i] = 0;
9810 }
9811
9812 /* Mark registers that are being referenced in this value. */
9813 if (value)
9814 update_table_tick (value);
9815
9816 /* Now update the status of each register being set.
9817 If someone is using this register in this block, set this register
9818 to invalid since we will get confused between the two lives in this
9819 basic block. This makes using this register always invalid. In cse, we
9820 scan the table to invalidate all entries using this register, but this
9821 is too much work for us. */
9822
9823 for (i = regno; i < endregno; i++)
9824 {
9825 reg_last_set_label[i] = label_tick;
9826 if (value && reg_last_set_table_tick[i] == label_tick)
9827 reg_last_set_invalid[i] = 1;
9828 else
9829 reg_last_set_invalid[i] = 0;
9830 }
9831
9832 /* The value being assigned might refer to X (like in "x++;"). In that
9833 case, we must replace it with (clobber (const_int 0)) to prevent
9834 infinite loops. */
10498}
10499
10500/* Utility function for following routine. Called when X is part of a value
10501 being stored into reg_last_set_value. Sets reg_last_set_table_tick
10502 for each register mentioned. Similar to mention_regs in cse.c */
10503
10504static void
10505update_table_tick (x)
10506 rtx x;
10507{
10508 register enum rtx_code code = GET_CODE (x);
10509 register char *fmt = GET_RTX_FORMAT (code);
10510 register int i;
10511
10512 if (code == REG)
10513 {
10514 int regno = REGNO (x);
10515 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10516 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10517
10518 for (i = regno; i < endregno; i++)
10519 reg_last_set_table_tick[i] = label_tick;
10520
10521 return;
10522 }
10523
10524 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10525 /* Note that we can't have an "E" in values stored; see
10526 get_last_value_validate. */
10527 if (fmt[i] == 'e')
10528 update_table_tick (XEXP (x, i));
10529}
10530
10531/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
10532 are saying that the register is clobbered and we no longer know its
10533 value. If INSN is zero, don't update reg_last_set; this is only permitted
10534 with VALUE also zero and is used to invalidate the register. */
10535
10536static void
10537record_value_for_reg (reg, insn, value)
10538 rtx reg;
10539 rtx insn;
10540 rtx value;
10541{
10542 int regno = REGNO (reg);
10543 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10544 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
10545 int i;
10546
10547 /* If VALUE contains REG and we have a previous value for REG, substitute
10548 the previous value. */
10549 if (value && insn && reg_overlap_mentioned_p (reg, value))
10550 {
10551 rtx tem;
10552
10553 /* Set things up so get_last_value is allowed to see anything set up to
10554 our insn. */
10555 subst_low_cuid = INSN_CUID (insn);
10556 tem = get_last_value (reg);
10557
10558 if (tem)
10559 value = replace_rtx (copy_rtx (value), reg, tem);
10560 }
10561
10562 /* For each register modified, show we don't know its value, that
10563 we don't know about its bitwise content, that its value has been
10564 updated, and that we don't know the location of the death of the
10565 register. */
10566 for (i = regno; i < endregno; i ++)
10567 {
10568 if (insn)
10569 reg_last_set[i] = insn;
10570 reg_last_set_value[i] = 0;
10571 reg_last_set_mode[i] = 0;
10572 reg_last_set_nonzero_bits[i] = 0;
10573 reg_last_set_sign_bit_copies[i] = 0;
10574 reg_last_death[i] = 0;
10575 }
10576
10577 /* Mark registers that are being referenced in this value. */
10578 if (value)
10579 update_table_tick (value);
10580
10581 /* Now update the status of each register being set.
10582 If someone is using this register in this block, set this register
10583 to invalid since we will get confused between the two lives in this
10584 basic block. This makes using this register always invalid. In cse, we
10585 scan the table to invalidate all entries using this register, but this
10586 is too much work for us. */
10587
10588 for (i = regno; i < endregno; i++)
10589 {
10590 reg_last_set_label[i] = label_tick;
10591 if (value && reg_last_set_table_tick[i] == label_tick)
10592 reg_last_set_invalid[i] = 1;
10593 else
10594 reg_last_set_invalid[i] = 0;
10595 }
10596
10597 /* The value being assigned might refer to X (like in "x++;"). In that
10598 case, we must replace it with (clobber (const_int 0)) to prevent
10599 infinite loops. */
9835 if (value && ! get_last_value_validate (&value,
10600 if (value && ! get_last_value_validate (&value, insn,
9836 reg_last_set_label[regno], 0))
9837 {
9838 value = copy_rtx (value);
10601 reg_last_set_label[regno], 0))
10602 {
10603 value = copy_rtx (value);
9839 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
10604 if (! get_last_value_validate (&value, insn,
10605 reg_last_set_label[regno], 1))
9840 value = 0;
9841 }
9842
9843 /* For the main register being modified, update the value, the mode, the
9844 nonzero bits, and the number of sign bit copies. */
9845
9846 reg_last_set_value[regno] = value;
9847
9848 if (value)
9849 {
9850 subst_low_cuid = INSN_CUID (insn);
9851 reg_last_set_mode[regno] = GET_MODE (reg);
9852 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
9853 reg_last_set_sign_bit_copies[regno]
9854 = num_sign_bit_copies (value, GET_MODE (reg));
9855 }
9856}
9857
9858/* Used for communication between the following two routines. */
9859static rtx record_dead_insn;
9860
9861/* Called via note_stores from record_dead_and_set_regs to handle one
9862 SET or CLOBBER in an insn. */
9863
9864static void
9865record_dead_and_set_regs_1 (dest, setter)
9866 rtx dest, setter;
9867{
9868 if (GET_CODE (dest) == SUBREG)
9869 dest = SUBREG_REG (dest);
9870
9871 if (GET_CODE (dest) == REG)
9872 {
9873 /* If we are setting the whole register, we know its value. Otherwise
9874 show that we don't know the value. We can handle SUBREG in
9875 some cases. */
9876 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
9877 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
9878 else if (GET_CODE (setter) == SET
9879 && GET_CODE (SET_DEST (setter)) == SUBREG
9880 && SUBREG_REG (SET_DEST (setter)) == dest
9881 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
9882 && subreg_lowpart_p (SET_DEST (setter)))
9883 record_value_for_reg (dest, record_dead_insn,
9884 gen_lowpart_for_combine (GET_MODE (dest),
9885 SET_SRC (setter)));
9886 else
9887 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
9888 }
9889 else if (GET_CODE (dest) == MEM
9890 /* Ignore pushes, they clobber nothing. */
9891 && ! push_operand (dest, GET_MODE (dest)))
9892 mem_last_set = INSN_CUID (record_dead_insn);
9893}
9894
9895/* Update the records of when each REG was most recently set or killed
9896 for the things done by INSN. This is the last thing done in processing
9897 INSN in the combiner loop.
9898
9899 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
9900 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
9901 and also the similar information mem_last_set (which insn most recently
9902 modified memory) and last_call_cuid (which insn was the most recent
9903 subroutine call). */
9904
9905static void
9906record_dead_and_set_regs (insn)
9907 rtx insn;
9908{
9909 register rtx link;
9910 int i;
9911
9912 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
9913 {
9914 if (REG_NOTE_KIND (link) == REG_DEAD
9915 && GET_CODE (XEXP (link, 0)) == REG)
9916 {
9917 int regno = REGNO (XEXP (link, 0));
9918 int endregno
9919 = regno + (regno < FIRST_PSEUDO_REGISTER
9920 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
9921 : 1);
9922
9923 for (i = regno; i < endregno; i++)
9924 reg_last_death[i] = insn;
9925 }
9926 else if (REG_NOTE_KIND (link) == REG_INC)
9927 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
9928 }
9929
9930 if (GET_CODE (insn) == CALL_INSN)
9931 {
9932 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
9933 if (call_used_regs[i])
9934 {
9935 reg_last_set_value[i] = 0;
9936 reg_last_set_mode[i] = 0;
9937 reg_last_set_nonzero_bits[i] = 0;
9938 reg_last_set_sign_bit_copies[i] = 0;
9939 reg_last_death[i] = 0;
9940 }
9941
9942 last_call_cuid = mem_last_set = INSN_CUID (insn);
9943 }
9944
9945 record_dead_insn = insn;
9946 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
9947}
9948
9949/* Utility routine for the following function. Verify that all the registers
9950 mentioned in *LOC are valid when *LOC was part of a value set when
9951 label_tick == TICK. Return 0 if some are not.
9952
9953 If REPLACE is non-zero, replace the invalid reference with
9954 (clobber (const_int 0)) and return 1. This replacement is useful because
9955 we often can get useful information about the form of a value (e.g., if
9956 it was produced by a shift that always produces -1 or 0) even though
9957 we don't know exactly what registers it was produced from. */
9958
9959static int
10606 value = 0;
10607 }
10608
10609 /* For the main register being modified, update the value, the mode, the
10610 nonzero bits, and the number of sign bit copies. */
10611
10612 reg_last_set_value[regno] = value;
10613
10614 if (value)
10615 {
10616 subst_low_cuid = INSN_CUID (insn);
10617 reg_last_set_mode[regno] = GET_MODE (reg);
10618 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
10619 reg_last_set_sign_bit_copies[regno]
10620 = num_sign_bit_copies (value, GET_MODE (reg));
10621 }
10622}
10623
10624/* Used for communication between the following two routines. */
10625static rtx record_dead_insn;
10626
10627/* Called via note_stores from record_dead_and_set_regs to handle one
10628 SET or CLOBBER in an insn. */
10629
10630static void
10631record_dead_and_set_regs_1 (dest, setter)
10632 rtx dest, setter;
10633{
10634 if (GET_CODE (dest) == SUBREG)
10635 dest = SUBREG_REG (dest);
10636
10637 if (GET_CODE (dest) == REG)
10638 {
10639 /* If we are setting the whole register, we know its value. Otherwise
10640 show that we don't know the value. We can handle SUBREG in
10641 some cases. */
10642 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
10643 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
10644 else if (GET_CODE (setter) == SET
10645 && GET_CODE (SET_DEST (setter)) == SUBREG
10646 && SUBREG_REG (SET_DEST (setter)) == dest
10647 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
10648 && subreg_lowpart_p (SET_DEST (setter)))
10649 record_value_for_reg (dest, record_dead_insn,
10650 gen_lowpart_for_combine (GET_MODE (dest),
10651 SET_SRC (setter)));
10652 else
10653 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
10654 }
10655 else if (GET_CODE (dest) == MEM
10656 /* Ignore pushes, they clobber nothing. */
10657 && ! push_operand (dest, GET_MODE (dest)))
10658 mem_last_set = INSN_CUID (record_dead_insn);
10659}
10660
10661/* Update the records of when each REG was most recently set or killed
10662 for the things done by INSN. This is the last thing done in processing
10663 INSN in the combiner loop.
10664
10665 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
10666 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
10667 and also the similar information mem_last_set (which insn most recently
10668 modified memory) and last_call_cuid (which insn was the most recent
10669 subroutine call). */
10670
10671static void
10672record_dead_and_set_regs (insn)
10673 rtx insn;
10674{
10675 register rtx link;
10676 int i;
10677
10678 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
10679 {
10680 if (REG_NOTE_KIND (link) == REG_DEAD
10681 && GET_CODE (XEXP (link, 0)) == REG)
10682 {
10683 int regno = REGNO (XEXP (link, 0));
10684 int endregno
10685 = regno + (regno < FIRST_PSEUDO_REGISTER
10686 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
10687 : 1);
10688
10689 for (i = regno; i < endregno; i++)
10690 reg_last_death[i] = insn;
10691 }
10692 else if (REG_NOTE_KIND (link) == REG_INC)
10693 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
10694 }
10695
10696 if (GET_CODE (insn) == CALL_INSN)
10697 {
10698 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
10699 if (call_used_regs[i])
10700 {
10701 reg_last_set_value[i] = 0;
10702 reg_last_set_mode[i] = 0;
10703 reg_last_set_nonzero_bits[i] = 0;
10704 reg_last_set_sign_bit_copies[i] = 0;
10705 reg_last_death[i] = 0;
10706 }
10707
10708 last_call_cuid = mem_last_set = INSN_CUID (insn);
10709 }
10710
10711 record_dead_insn = insn;
10712 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
10713}
10714
10715/* Utility routine for the following function. Verify that all the registers
10716 mentioned in *LOC are valid when *LOC was part of a value set when
10717 label_tick == TICK. Return 0 if some are not.
10718
10719 If REPLACE is non-zero, replace the invalid reference with
10720 (clobber (const_int 0)) and return 1. This replacement is useful because
10721 we often can get useful information about the form of a value (e.g., if
10722 it was produced by a shift that always produces -1 or 0) even though
10723 we don't know exactly what registers it was produced from. */
10724
10725static int
9960get_last_value_validate (loc, tick, replace)
10726get_last_value_validate (loc, insn, tick, replace)
9961 rtx *loc;
10727 rtx *loc;
10728 rtx insn;
9962 int tick;
9963 int replace;
9964{
9965 rtx x = *loc;
9966 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
9967 int len = GET_RTX_LENGTH (GET_CODE (x));
9968 int i;
9969
9970 if (GET_CODE (x) == REG)
9971 {
9972 int regno = REGNO (x);
9973 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9974 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9975 int j;
9976
9977 for (j = regno; j < endregno; j++)
9978 if (reg_last_set_invalid[j]
9979 /* If this is a pseudo-register that was only set once, it is
9980 always valid. */
10729 int tick;
10730 int replace;
10731{
10732 rtx x = *loc;
10733 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
10734 int len = GET_RTX_LENGTH (GET_CODE (x));
10735 int i;
10736
10737 if (GET_CODE (x) == REG)
10738 {
10739 int regno = REGNO (x);
10740 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10741 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10742 int j;
10743
10744 for (j = regno; j < endregno; j++)
10745 if (reg_last_set_invalid[j]
10746 /* If this is a pseudo-register that was only set once, it is
10747 always valid. */
9981 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
10748 || (! (regno >= FIRST_PSEUDO_REGISTER && REG_N_SETS (regno) == 1)
9982 && reg_last_set_label[j] > tick))
9983 {
9984 if (replace)
10749 && reg_last_set_label[j] > tick))
10750 {
10751 if (replace)
9985 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
10752 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9986 return replace;
9987 }
9988
9989 return 1;
9990 }
10753 return replace;
10754 }
10755
10756 return 1;
10757 }
10758 /* If this is a memory reference, make sure that there were
10759 no stores after it that might have clobbered the value. We don't
10760 have alias info, so we assume any store invalidates it. */
10761 else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
10762 && INSN_CUID (insn) <= mem_last_set)
10763 {
10764 if (replace)
10765 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
10766 return replace;
10767 }
9991
9992 for (i = 0; i < len; i++)
9993 if ((fmt[i] == 'e'
10768
10769 for (i = 0; i < len; i++)
10770 if ((fmt[i] == 'e'
9994 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
10771 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
9995 /* Don't bother with these. They shouldn't occur anyway. */
9996 || fmt[i] == 'E')
9997 return 0;
9998
9999 /* If we haven't found a reason for it to be invalid, it is valid. */
10000 return 1;
10001}
10002
10003/* Get the last value assigned to X, if known. Some registers
10004 in the value may be replaced with (clobber (const_int 0)) if their value
10005 is known longer known reliably. */
10006
10007static rtx
10008get_last_value (x)
10009 rtx x;
10010{
10011 int regno;
10012 rtx value;
10013
10014 /* If this is a non-paradoxical SUBREG, get the value of its operand and
10015 then convert it to the desired mode. If this is a paradoxical SUBREG,
10772 /* Don't bother with these. They shouldn't occur anyway. */
10773 || fmt[i] == 'E')
10774 return 0;
10775
10776 /* If we haven't found a reason for it to be invalid, it is valid. */
10777 return 1;
10778}
10779
10780/* Get the last value assigned to X, if known. Some registers
10781 in the value may be replaced with (clobber (const_int 0)) if their value
10782 is known longer known reliably. */
10783
10784static rtx
10785get_last_value (x)
10786 rtx x;
10787{
10788 int regno;
10789 rtx value;
10790
10791 /* If this is a non-paradoxical SUBREG, get the value of its operand and
10792 then convert it to the desired mode. If this is a paradoxical SUBREG,
10016 we cannot predict what values the "extra" bits might have. */
10793 we cannot predict what values the "extra" bits might have. */
10017 if (GET_CODE (x) == SUBREG
10018 && subreg_lowpart_p (x)
10019 && (GET_MODE_SIZE (GET_MODE (x))
10020 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
10021 && (value = get_last_value (SUBREG_REG (x))) != 0)
10022 return gen_lowpart_for_combine (GET_MODE (x), value);
10023
10024 if (GET_CODE (x) != REG)
10025 return 0;
10026
10027 regno = REGNO (x);
10028 value = reg_last_set_value[regno];
10029
10794 if (GET_CODE (x) == SUBREG
10795 && subreg_lowpart_p (x)
10796 && (GET_MODE_SIZE (GET_MODE (x))
10797 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
10798 && (value = get_last_value (SUBREG_REG (x))) != 0)
10799 return gen_lowpart_for_combine (GET_MODE (x), value);
10800
10801 if (GET_CODE (x) != REG)
10802 return 0;
10803
10804 regno = REGNO (x);
10805 value = reg_last_set_value[regno];
10806
10030 /* If we don't have a value or if it isn't for this basic block, return 0. */
10807 /* If we don't have a value or if it isn't for this basic block,
10808 return 0. */
10031
10032 if (value == 0
10809
10810 if (value == 0
10033 || (reg_n_sets[regno] != 1
10811 || (REG_N_SETS (regno) != 1
10034 && reg_last_set_label[regno] != label_tick))
10035 return 0;
10036
10037 /* If the value was set in a later insn than the ones we are processing,
10038 we can't use it even if the register was only set once, but make a quick
10039 check to see if the previous insn set it to something. This is commonly
10040 the case when the same pseudo is used by repeated insns.
10041
10042 This does not work if there exists an instruction which is temporarily
10043 not on the insn chain. */
10044
10045 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
10046 {
10047 rtx insn, set;
10048
10049 /* We can not do anything useful in this case, because there is
10050 an instruction which is not on the insn chain. */
10051 if (subst_prev_insn)
10052 return 0;
10053
10054 /* Skip over USE insns. They are not useful here, and they may have
10055 been made by combine, in which case they do not have a INSN_CUID
10056 value. We can't use prev_real_insn, because that would incorrectly
10057 take us backwards across labels. Skip over BARRIERs also, since
10058 they could have been made by combine. If we see one, we must be
10059 optimizing dead code, so it doesn't matter what we do. */
10060 for (insn = prev_nonnote_insn (subst_insn);
10061 insn && ((GET_CODE (insn) == INSN
10062 && GET_CODE (PATTERN (insn)) == USE)
10063 || GET_CODE (insn) == BARRIER
10064 || INSN_CUID (insn) >= subst_low_cuid);
10065 insn = prev_nonnote_insn (insn))
10066 ;
10067
10068 if (insn
10069 && (set = single_set (insn)) != 0
10070 && rtx_equal_p (SET_DEST (set), x))
10071 {
10072 value = SET_SRC (set);
10073
10074 /* Make sure that VALUE doesn't reference X. Replace any
10075 explicit references with a CLOBBER. If there are any remaining
10076 references (rare), don't use the value. */
10077
10078 if (reg_mentioned_p (x, value))
10079 value = replace_rtx (copy_rtx (value), x,
10812 && reg_last_set_label[regno] != label_tick))
10813 return 0;
10814
10815 /* If the value was set in a later insn than the ones we are processing,
10816 we can't use it even if the register was only set once, but make a quick
10817 check to see if the previous insn set it to something. This is commonly
10818 the case when the same pseudo is used by repeated insns.
10819
10820 This does not work if there exists an instruction which is temporarily
10821 not on the insn chain. */
10822
10823 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
10824 {
10825 rtx insn, set;
10826
10827 /* We can not do anything useful in this case, because there is
10828 an instruction which is not on the insn chain. */
10829 if (subst_prev_insn)
10830 return 0;
10831
10832 /* Skip over USE insns. They are not useful here, and they may have
10833 been made by combine, in which case they do not have a INSN_CUID
10834 value. We can't use prev_real_insn, because that would incorrectly
10835 take us backwards across labels. Skip over BARRIERs also, since
10836 they could have been made by combine. If we see one, we must be
10837 optimizing dead code, so it doesn't matter what we do. */
10838 for (insn = prev_nonnote_insn (subst_insn);
10839 insn && ((GET_CODE (insn) == INSN
10840 && GET_CODE (PATTERN (insn)) == USE)
10841 || GET_CODE (insn) == BARRIER
10842 || INSN_CUID (insn) >= subst_low_cuid);
10843 insn = prev_nonnote_insn (insn))
10844 ;
10845
10846 if (insn
10847 && (set = single_set (insn)) != 0
10848 && rtx_equal_p (SET_DEST (set), x))
10849 {
10850 value = SET_SRC (set);
10851
10852 /* Make sure that VALUE doesn't reference X. Replace any
10853 explicit references with a CLOBBER. If there are any remaining
10854 references (rare), don't use the value. */
10855
10856 if (reg_mentioned_p (x, value))
10857 value = replace_rtx (copy_rtx (value), x,
10080 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
10858 gen_rtx_CLOBBER (GET_MODE (x), const0_rtx));
10081
10082 if (reg_overlap_mentioned_p (x, value))
10083 return 0;
10084 }
10085 else
10086 return 0;
10087 }
10088
10089 /* If the value has all its registers valid, return it. */
10859
10860 if (reg_overlap_mentioned_p (x, value))
10861 return 0;
10862 }
10863 else
10864 return 0;
10865 }
10866
10867 /* If the value has all its registers valid, return it. */
10090 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
10868 if (get_last_value_validate (&value, reg_last_set[regno],
10869 reg_last_set_label[regno], 0))
10091 return value;
10092
10093 /* Otherwise, make a copy and replace any invalid register with
10094 (clobber (const_int 0)). If that fails for some reason, return 0. */
10095
10096 value = copy_rtx (value);
10870 return value;
10871
10872 /* Otherwise, make a copy and replace any invalid register with
10873 (clobber (const_int 0)). If that fails for some reason, return 0. */
10874
10875 value = copy_rtx (value);
10097 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
10876 if (get_last_value_validate (&value, reg_last_set[regno],
10877 reg_last_set_label[regno], 1))
10098 return value;
10099
10100 return 0;
10101}
10102
10103/* Return nonzero if expression X refers to a REG or to memory
10104 that is set in an instruction more recent than FROM_CUID. */
10105
10106static int
10107use_crosses_set_p (x, from_cuid)
10108 register rtx x;
10109 int from_cuid;
10110{
10111 register char *fmt;
10112 register int i;
10113 register enum rtx_code code = GET_CODE (x);
10114
10115 if (code == REG)
10116 {
10117 register int regno = REGNO (x);
10118 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
10119 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10120
10121#ifdef PUSH_ROUNDING
10122 /* Don't allow uses of the stack pointer to be moved,
10123 because we don't know whether the move crosses a push insn. */
10124 if (regno == STACK_POINTER_REGNUM)
10125 return 1;
10126#endif
10127 for (;regno < endreg; regno++)
10128 if (reg_last_set[regno]
10129 && INSN_CUID (reg_last_set[regno]) > from_cuid)
10130 return 1;
10131 return 0;
10132 }
10133
10134 if (code == MEM && mem_last_set > from_cuid)
10135 return 1;
10136
10137 fmt = GET_RTX_FORMAT (code);
10138
10139 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10140 {
10141 if (fmt[i] == 'E')
10142 {
10143 register int j;
10144 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10145 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
10146 return 1;
10147 }
10148 else if (fmt[i] == 'e'
10149 && use_crosses_set_p (XEXP (x, i), from_cuid))
10150 return 1;
10151 }
10152 return 0;
10153}
10154
10155/* Define three variables used for communication between the following
10156 routines. */
10157
10158static int reg_dead_regno, reg_dead_endregno;
10159static int reg_dead_flag;
10160
10161/* Function called via note_stores from reg_dead_at_p.
10162
10163 If DEST is within [reg_dead_regno, reg_dead_endregno), set
10164 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
10165
10166static void
10167reg_dead_at_p_1 (dest, x)
10168 rtx dest;
10169 rtx x;
10170{
10171 int regno, endregno;
10172
10173 if (GET_CODE (dest) != REG)
10174 return;
10175
10176 regno = REGNO (dest);
10177 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10178 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
10179
10180 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
10181 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
10182}
10183
10184/* Return non-zero if REG is known to be dead at INSN.
10185
10186 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
10187 referencing REG, it is dead. If we hit a SET referencing REG, it is
10188 live. Otherwise, see if it is live or dead at the start of the basic
10189 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
10190 must be assumed to be always live. */
10191
10192static int
10193reg_dead_at_p (reg, insn)
10194 rtx reg;
10195 rtx insn;
10196{
10197 int block, i;
10198
10199 /* Set variables for reg_dead_at_p_1. */
10200 reg_dead_regno = REGNO (reg);
10201 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
10202 ? HARD_REGNO_NREGS (reg_dead_regno,
10203 GET_MODE (reg))
10204 : 1);
10205
10206 reg_dead_flag = 0;
10207
10208 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
10209 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
10210 {
10211 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10212 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
10213 return 0;
10214 }
10215
10216 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
10217 beginning of function. */
10218 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
10219 insn = prev_nonnote_insn (insn))
10220 {
10221 note_stores (PATTERN (insn), reg_dead_at_p_1);
10222 if (reg_dead_flag)
10223 return reg_dead_flag == 1 ? 1 : 0;
10224
10225 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
10226 return 1;
10227 }
10228
10229 /* Get the basic block number that we were in. */
10230 if (insn == 0)
10231 block = 0;
10232 else
10233 {
10234 for (block = 0; block < n_basic_blocks; block++)
10235 if (insn == basic_block_head[block])
10236 break;
10237
10238 if (block == n_basic_blocks)
10239 return 0;
10240 }
10241
10242 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10878 return value;
10879
10880 return 0;
10881}
10882
10883/* Return nonzero if expression X refers to a REG or to memory
10884 that is set in an instruction more recent than FROM_CUID. */
10885
10886static int
10887use_crosses_set_p (x, from_cuid)
10888 register rtx x;
10889 int from_cuid;
10890{
10891 register char *fmt;
10892 register int i;
10893 register enum rtx_code code = GET_CODE (x);
10894
10895 if (code == REG)
10896 {
10897 register int regno = REGNO (x);
10898 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
10899 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10900
10901#ifdef PUSH_ROUNDING
10902 /* Don't allow uses of the stack pointer to be moved,
10903 because we don't know whether the move crosses a push insn. */
10904 if (regno == STACK_POINTER_REGNUM)
10905 return 1;
10906#endif
10907 for (;regno < endreg; regno++)
10908 if (reg_last_set[regno]
10909 && INSN_CUID (reg_last_set[regno]) > from_cuid)
10910 return 1;
10911 return 0;
10912 }
10913
10914 if (code == MEM && mem_last_set > from_cuid)
10915 return 1;
10916
10917 fmt = GET_RTX_FORMAT (code);
10918
10919 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10920 {
10921 if (fmt[i] == 'E')
10922 {
10923 register int j;
10924 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10925 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
10926 return 1;
10927 }
10928 else if (fmt[i] == 'e'
10929 && use_crosses_set_p (XEXP (x, i), from_cuid))
10930 return 1;
10931 }
10932 return 0;
10933}
10934
10935/* Define three variables used for communication between the following
10936 routines. */
10937
10938static int reg_dead_regno, reg_dead_endregno;
10939static int reg_dead_flag;
10940
10941/* Function called via note_stores from reg_dead_at_p.
10942
10943 If DEST is within [reg_dead_regno, reg_dead_endregno), set
10944 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
10945
10946static void
10947reg_dead_at_p_1 (dest, x)
10948 rtx dest;
10949 rtx x;
10950{
10951 int regno, endregno;
10952
10953 if (GET_CODE (dest) != REG)
10954 return;
10955
10956 regno = REGNO (dest);
10957 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10958 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
10959
10960 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
10961 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
10962}
10963
10964/* Return non-zero if REG is known to be dead at INSN.
10965
10966 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
10967 referencing REG, it is dead. If we hit a SET referencing REG, it is
10968 live. Otherwise, see if it is live or dead at the start of the basic
10969 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
10970 must be assumed to be always live. */
10971
10972static int
10973reg_dead_at_p (reg, insn)
10974 rtx reg;
10975 rtx insn;
10976{
10977 int block, i;
10978
10979 /* Set variables for reg_dead_at_p_1. */
10980 reg_dead_regno = REGNO (reg);
10981 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
10982 ? HARD_REGNO_NREGS (reg_dead_regno,
10983 GET_MODE (reg))
10984 : 1);
10985
10986 reg_dead_flag = 0;
10987
10988 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
10989 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
10990 {
10991 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10992 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
10993 return 0;
10994 }
10995
10996 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
10997 beginning of function. */
10998 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
10999 insn = prev_nonnote_insn (insn))
11000 {
11001 note_stores (PATTERN (insn), reg_dead_at_p_1);
11002 if (reg_dead_flag)
11003 return reg_dead_flag == 1 ? 1 : 0;
11004
11005 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11006 return 1;
11007 }
11008
11009 /* Get the basic block number that we were in. */
11010 if (insn == 0)
11011 block = 0;
11012 else
11013 {
11014 for (block = 0; block < n_basic_blocks; block++)
11015 if (insn == basic_block_head[block])
11016 break;
11017
11018 if (block == n_basic_blocks)
11019 return 0;
11020 }
11021
11022 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10243 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
10244 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
11023 if (REGNO_REG_SET_P (basic_block_live_at_start[block], i))
10245 return 0;
10246
10247 return 1;
10248}
10249
10250/* Note hard registers in X that are used. This code is similar to
10251 that in flow.c, but much simpler since we don't care about pseudos. */
10252
10253static void
10254mark_used_regs_combine (x)
10255 rtx x;
10256{
10257 register RTX_CODE code = GET_CODE (x);
10258 register int regno;
10259 int i;
10260
10261 switch (code)
10262 {
10263 case LABEL_REF:
10264 case SYMBOL_REF:
10265 case CONST_INT:
10266 case CONST:
10267 case CONST_DOUBLE:
10268 case PC:
10269 case ADDR_VEC:
10270 case ADDR_DIFF_VEC:
10271 case ASM_INPUT:
10272#ifdef HAVE_cc0
10273 /* CC0 must die in the insn after it is set, so we don't need to take
10274 special note of it here. */
10275 case CC0:
10276#endif
10277 return;
10278
10279 case CLOBBER:
10280 /* If we are clobbering a MEM, mark any hard registers inside the
10281 address as used. */
10282 if (GET_CODE (XEXP (x, 0)) == MEM)
10283 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
10284 return;
10285
10286 case REG:
10287 regno = REGNO (x);
10288 /* A hard reg in a wide mode may really be multiple registers.
10289 If so, mark all of them just like the first. */
10290 if (regno < FIRST_PSEUDO_REGISTER)
10291 {
10292 /* None of this applies to the stack, frame or arg pointers */
10293 if (regno == STACK_POINTER_REGNUM
10294#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
10295 || regno == HARD_FRAME_POINTER_REGNUM
10296#endif
10297#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
10298 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
10299#endif
10300 || regno == FRAME_POINTER_REGNUM)
10301 return;
10302
10303 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
10304 while (i-- > 0)
10305 SET_HARD_REG_BIT (newpat_used_regs, regno + i);
10306 }
10307 return;
10308
10309 case SET:
10310 {
10311 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
10312 the address. */
10313 register rtx testreg = SET_DEST (x);
10314
10315 while (GET_CODE (testreg) == SUBREG
10316 || GET_CODE (testreg) == ZERO_EXTRACT
10317 || GET_CODE (testreg) == SIGN_EXTRACT
10318 || GET_CODE (testreg) == STRICT_LOW_PART)
10319 testreg = XEXP (testreg, 0);
10320
10321 if (GET_CODE (testreg) == MEM)
10322 mark_used_regs_combine (XEXP (testreg, 0));
10323
10324 mark_used_regs_combine (SET_SRC (x));
11024 return 0;
11025
11026 return 1;
11027}
11028
11029/* Note hard registers in X that are used. This code is similar to
11030 that in flow.c, but much simpler since we don't care about pseudos. */
11031
11032static void
11033mark_used_regs_combine (x)
11034 rtx x;
11035{
11036 register RTX_CODE code = GET_CODE (x);
11037 register int regno;
11038 int i;
11039
11040 switch (code)
11041 {
11042 case LABEL_REF:
11043 case SYMBOL_REF:
11044 case CONST_INT:
11045 case CONST:
11046 case CONST_DOUBLE:
11047 case PC:
11048 case ADDR_VEC:
11049 case ADDR_DIFF_VEC:
11050 case ASM_INPUT:
11051#ifdef HAVE_cc0
11052 /* CC0 must die in the insn after it is set, so we don't need to take
11053 special note of it here. */
11054 case CC0:
11055#endif
11056 return;
11057
11058 case CLOBBER:
11059 /* If we are clobbering a MEM, mark any hard registers inside the
11060 address as used. */
11061 if (GET_CODE (XEXP (x, 0)) == MEM)
11062 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11063 return;
11064
11065 case REG:
11066 regno = REGNO (x);
11067 /* A hard reg in a wide mode may really be multiple registers.
11068 If so, mark all of them just like the first. */
11069 if (regno < FIRST_PSEUDO_REGISTER)
11070 {
11071 /* None of this applies to the stack, frame or arg pointers */
11072 if (regno == STACK_POINTER_REGNUM
11073#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11074 || regno == HARD_FRAME_POINTER_REGNUM
11075#endif
11076#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11077 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11078#endif
11079 || regno == FRAME_POINTER_REGNUM)
11080 return;
11081
11082 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
11083 while (i-- > 0)
11084 SET_HARD_REG_BIT (newpat_used_regs, regno + i);
11085 }
11086 return;
11087
11088 case SET:
11089 {
11090 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11091 the address. */
11092 register rtx testreg = SET_DEST (x);
11093
11094 while (GET_CODE (testreg) == SUBREG
11095 || GET_CODE (testreg) == ZERO_EXTRACT
11096 || GET_CODE (testreg) == SIGN_EXTRACT
11097 || GET_CODE (testreg) == STRICT_LOW_PART)
11098 testreg = XEXP (testreg, 0);
11099
11100 if (GET_CODE (testreg) == MEM)
11101 mark_used_regs_combine (XEXP (testreg, 0));
11102
11103 mark_used_regs_combine (SET_SRC (x));
10325 return;
10326 }
11104 }
11105 return;
11106
11107 default:
11108 break;
10327 }
10328
10329 /* Recursively scan the operands of this expression. */
10330
10331 {
10332 register char *fmt = GET_RTX_FORMAT (code);
10333
10334 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10335 {
10336 if (fmt[i] == 'e')
10337 mark_used_regs_combine (XEXP (x, i));
10338 else if (fmt[i] == 'E')
10339 {
10340 register int j;
10341
10342 for (j = 0; j < XVECLEN (x, i); j++)
10343 mark_used_regs_combine (XVECEXP (x, i, j));
10344 }
10345 }
10346 }
10347}
10348
10349
10350/* Remove register number REGNO from the dead registers list of INSN.
10351
10352 Return the note used to record the death, if there was one. */
10353
10354rtx
10355remove_death (regno, insn)
10356 int regno;
10357 rtx insn;
10358{
10359 register rtx note = find_regno_note (insn, REG_DEAD, regno);
10360
10361 if (note)
10362 {
11109 }
11110
11111 /* Recursively scan the operands of this expression. */
11112
11113 {
11114 register char *fmt = GET_RTX_FORMAT (code);
11115
11116 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11117 {
11118 if (fmt[i] == 'e')
11119 mark_used_regs_combine (XEXP (x, i));
11120 else if (fmt[i] == 'E')
11121 {
11122 register int j;
11123
11124 for (j = 0; j < XVECLEN (x, i); j++)
11125 mark_used_regs_combine (XVECEXP (x, i, j));
11126 }
11127 }
11128 }
11129}
11130
11131
11132/* Remove register number REGNO from the dead registers list of INSN.
11133
11134 Return the note used to record the death, if there was one. */
11135
11136rtx
11137remove_death (regno, insn)
11138 int regno;
11139 rtx insn;
11140{
11141 register rtx note = find_regno_note (insn, REG_DEAD, regno);
11142
11143 if (note)
11144 {
10363 reg_n_deaths[regno]--;
11145 REG_N_DEATHS (regno)--;
10364 remove_note (insn, note);
10365 }
10366
10367 return note;
10368}
10369
10370/* For each register (hardware or pseudo) used within expression X, if its
10371 death is in an instruction with cuid between FROM_CUID (inclusive) and
10372 TO_INSN (exclusive), put a REG_DEAD note for that register in the
10373 list headed by PNOTES.
10374
11146 remove_note (insn, note);
11147 }
11148
11149 return note;
11150}
11151
11152/* For each register (hardware or pseudo) used within expression X, if its
11153 death is in an instruction with cuid between FROM_CUID (inclusive) and
11154 TO_INSN (exclusive), put a REG_DEAD note for that register in the
11155 list headed by PNOTES.
11156
11157 That said, don't move registers killed by maybe_kill_insn.
11158
10375 This is done when X is being merged by combination into TO_INSN. These
10376 notes will then be distributed as needed. */
10377
10378static void
11159 This is done when X is being merged by combination into TO_INSN. These
11160 notes will then be distributed as needed. */
11161
11162static void
10379move_deaths (x, from_cuid, to_insn, pnotes)
11163move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
10380 rtx x;
11164 rtx x;
11165 rtx maybe_kill_insn;
10381 int from_cuid;
10382 rtx to_insn;
10383 rtx *pnotes;
10384{
10385 register char *fmt;
10386 register int len, i;
10387 register enum rtx_code code = GET_CODE (x);
10388
10389 if (code == REG)
10390 {
10391 register int regno = REGNO (x);
10392 register rtx where_dead = reg_last_death[regno];
10393 register rtx before_dead, after_dead;
10394
11166 int from_cuid;
11167 rtx to_insn;
11168 rtx *pnotes;
11169{
11170 register char *fmt;
11171 register int len, i;
11172 register enum rtx_code code = GET_CODE (x);
11173
11174 if (code == REG)
11175 {
11176 register int regno = REGNO (x);
11177 register rtx where_dead = reg_last_death[regno];
11178 register rtx before_dead, after_dead;
11179
11180 /* Don't move the register if it gets killed in between from and to */
11181 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11182 && !reg_referenced_p (x, maybe_kill_insn))
11183 return;
11184
10395 /* WHERE_DEAD could be a USE insn made by combine, so first we
10396 make sure that we have insns with valid INSN_CUID values. */
10397 before_dead = where_dead;
10398 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
10399 before_dead = PREV_INSN (before_dead);
10400 after_dead = where_dead;
10401 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
10402 after_dead = NEXT_INSN (after_dead);
10403
10404 if (before_dead && after_dead
10405 && INSN_CUID (before_dead) >= from_cuid
10406 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
10407 || (where_dead != after_dead
10408 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
10409 {
10410 rtx note = remove_death (regno, where_dead);
10411
10412 /* It is possible for the call above to return 0. This can occur
10413 when reg_last_death points to I2 or I1 that we combined with.
10414 In that case make a new note.
10415
10416 We must also check for the case where X is a hard register
10417 and NOTE is a death note for a range of hard registers
10418 including X. In that case, we must put REG_DEAD notes for
10419 the remaining registers in place of NOTE. */
10420
10421 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
10422 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11185 /* WHERE_DEAD could be a USE insn made by combine, so first we
11186 make sure that we have insns with valid INSN_CUID values. */
11187 before_dead = where_dead;
11188 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11189 before_dead = PREV_INSN (before_dead);
11190 after_dead = where_dead;
11191 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11192 after_dead = NEXT_INSN (after_dead);
11193
11194 if (before_dead && after_dead
11195 && INSN_CUID (before_dead) >= from_cuid
11196 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11197 || (where_dead != after_dead
11198 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
11199 {
11200 rtx note = remove_death (regno, where_dead);
11201
11202 /* It is possible for the call above to return 0. This can occur
11203 when reg_last_death points to I2 or I1 that we combined with.
11204 In that case make a new note.
11205
11206 We must also check for the case where X is a hard register
11207 and NOTE is a death note for a range of hard registers
11208 including X. In that case, we must put REG_DEAD notes for
11209 the remaining registers in place of NOTE. */
11210
11211 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11212 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
10423 != GET_MODE_SIZE (GET_MODE (x))))
11213 > GET_MODE_SIZE (GET_MODE (x))))
10424 {
10425 int deadregno = REGNO (XEXP (note, 0));
10426 int deadend
10427 = (deadregno + HARD_REGNO_NREGS (deadregno,
10428 GET_MODE (XEXP (note, 0))));
10429 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10430 int i;
10431
10432 for (i = deadregno; i < deadend; i++)
10433 if (i < regno || i >= ourend)
10434 REG_NOTES (where_dead)
11214 {
11215 int deadregno = REGNO (XEXP (note, 0));
11216 int deadend
11217 = (deadregno + HARD_REGNO_NREGS (deadregno,
11218 GET_MODE (XEXP (note, 0))));
11219 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11220 int i;
11221
11222 for (i = deadregno; i < deadend; i++)
11223 if (i < regno || i >= ourend)
11224 REG_NOTES (where_dead)
10435 = gen_rtx (EXPR_LIST, REG_DEAD,
10436 gen_rtx (REG, reg_raw_mode[i], i),
10437 REG_NOTES (where_dead));
11225 = gen_rtx_EXPR_LIST (REG_DEAD,
11226 gen_rtx_REG (reg_raw_mode[i], i),
11227 REG_NOTES (where_dead));
10438 }
11228 }
10439 /* If we didn't find any note, and we have a multi-reg hard
11229 /* If we didn't find any note, or if we found a REG_DEAD note that
11230 covers only part of the given reg, and we have a multi-reg hard
10440 register, then to be safe we must check for REG_DEAD notes
10441 for each register other than the first. They could have
10442 their own REG_DEAD notes lying around. */
11231 register, then to be safe we must check for REG_DEAD notes
11232 for each register other than the first. They could have
11233 their own REG_DEAD notes lying around. */
10443 else if (note == 0 && regno < FIRST_PSEUDO_REGISTER
11234 else if ((note == 0
11235 || (note != 0
11236 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11237 < GET_MODE_SIZE (GET_MODE (x)))))
11238 && regno < FIRST_PSEUDO_REGISTER
10444 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
10445 {
10446 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11239 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
11240 {
11241 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10447 int i;
11242 int i, offset;
10448 rtx oldnotes = 0;
10449
11243 rtx oldnotes = 0;
11244
10450 for (i = regno + 1; i < ourend; i++)
10451 move_deaths (gen_rtx (REG, reg_raw_mode[i], i),
10452 from_cuid, to_insn, &oldnotes);
11245 if (note)
11246 offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
11247 else
11248 offset = 1;
11249
11250 for (i = regno + offset; i < ourend; i++)
11251 move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
11252 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
10453 }
10454
10455 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
10456 {
10457 XEXP (note, 1) = *pnotes;
10458 *pnotes = note;
10459 }
10460 else
11253 }
11254
11255 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
11256 {
11257 XEXP (note, 1) = *pnotes;
11258 *pnotes = note;
11259 }
11260 else
10461 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
11261 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
10462
11262
10463 reg_n_deaths[regno]++;
11263 REG_N_DEATHS (regno)++;
10464 }
10465
10466 return;
10467 }
10468
10469 else if (GET_CODE (x) == SET)
10470 {
10471 rtx dest = SET_DEST (x);
10472
11264 }
11265
11266 return;
11267 }
11268
11269 else if (GET_CODE (x) == SET)
11270 {
11271 rtx dest = SET_DEST (x);
11272
10473 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
11273 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
10474
10475 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
10476 that accesses one word of a multi-word item, some
10477 piece of everything register in the expression is used by
10478 this insn, so remove any old death. */
10479
10480 if (GET_CODE (dest) == ZERO_EXTRACT
10481 || GET_CODE (dest) == STRICT_LOW_PART
10482 || (GET_CODE (dest) == SUBREG
10483 && (((GET_MODE_SIZE (GET_MODE (dest))
10484 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
10485 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
10486 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
10487 {
11274
11275 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11276 that accesses one word of a multi-word item, some
11277 piece of everything register in the expression is used by
11278 this insn, so remove any old death. */
11279
11280 if (GET_CODE (dest) == ZERO_EXTRACT
11281 || GET_CODE (dest) == STRICT_LOW_PART
11282 || (GET_CODE (dest) == SUBREG
11283 && (((GET_MODE_SIZE (GET_MODE (dest))
11284 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11285 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11286 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
11287 {
10488 move_deaths (dest, from_cuid, to_insn, pnotes);
11288 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
10489 return;
10490 }
10491
10492 /* If this is some other SUBREG, we know it replaces the entire
10493 value, so use that as the destination. */
10494 if (GET_CODE (dest) == SUBREG)
10495 dest = SUBREG_REG (dest);
10496
10497 /* If this is a MEM, adjust deaths of anything used in the address.
10498 For a REG (the only other possibility), the entire value is
10499 being replaced so the old value is not used in this insn. */
10500
10501 if (GET_CODE (dest) == MEM)
11289 return;
11290 }
11291
11292 /* If this is some other SUBREG, we know it replaces the entire
11293 value, so use that as the destination. */
11294 if (GET_CODE (dest) == SUBREG)
11295 dest = SUBREG_REG (dest);
11296
11297 /* If this is a MEM, adjust deaths of anything used in the address.
11298 For a REG (the only other possibility), the entire value is
11299 being replaced so the old value is not used in this insn. */
11300
11301 if (GET_CODE (dest) == MEM)
10502 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
11302 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11303 to_insn, pnotes);
10503 return;
10504 }
10505
10506 else if (GET_CODE (x) == CLOBBER)
10507 return;
10508
10509 len = GET_RTX_LENGTH (code);
10510 fmt = GET_RTX_FORMAT (code);
10511
10512 for (i = 0; i < len; i++)
10513 {
10514 if (fmt[i] == 'E')
10515 {
10516 register int j;
10517 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11304 return;
11305 }
11306
11307 else if (GET_CODE (x) == CLOBBER)
11308 return;
11309
11310 len = GET_RTX_LENGTH (code);
11311 fmt = GET_RTX_FORMAT (code);
11312
11313 for (i = 0; i < len; i++)
11314 {
11315 if (fmt[i] == 'E')
11316 {
11317 register int j;
11318 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10518 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
11319 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11320 to_insn, pnotes);
10519 }
10520 else if (fmt[i] == 'e')
11321 }
11322 else if (fmt[i] == 'e')
10521 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
11323 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
10522 }
10523}
10524
10525/* Return 1 if X is the target of a bit-field assignment in BODY, the
10526 pattern of an insn. X must be a REG. */
10527
10528static int
10529reg_bitfield_target_p (x, body)
10530 rtx x;
10531 rtx body;
10532{
10533 int i;
10534
10535 if (GET_CODE (body) == SET)
10536 {
10537 rtx dest = SET_DEST (body);
10538 rtx target;
10539 int regno, tregno, endregno, endtregno;
10540
10541 if (GET_CODE (dest) == ZERO_EXTRACT)
10542 target = XEXP (dest, 0);
10543 else if (GET_CODE (dest) == STRICT_LOW_PART)
10544 target = SUBREG_REG (XEXP (dest, 0));
10545 else
10546 return 0;
10547
10548 if (GET_CODE (target) == SUBREG)
10549 target = SUBREG_REG (target);
10550
10551 if (GET_CODE (target) != REG)
10552 return 0;
10553
10554 tregno = REGNO (target), regno = REGNO (x);
10555 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
10556 return target == x;
10557
10558 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
10559 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10560
10561 return endregno > tregno && regno < endtregno;
10562 }
10563
10564 else if (GET_CODE (body) == PARALLEL)
10565 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
10566 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
10567 return 1;
10568
10569 return 0;
10570}
10571
10572/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
10573 as appropriate. I3 and I2 are the insns resulting from the combination
10574 insns including FROM (I2 may be zero).
10575
10576 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
10577 not need REG_DEAD notes because they are being substituted for. This
10578 saves searching in the most common cases.
10579
10580 Each note in the list is either ignored or placed on some insns, depending
10581 on the type of note. */
10582
10583static void
10584distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
10585 rtx notes;
10586 rtx from_insn;
10587 rtx i3, i2;
10588 rtx elim_i2, elim_i1;
10589{
10590 rtx note, next_note;
10591 rtx tem;
10592
10593 for (note = notes; note; note = next_note)
10594 {
10595 rtx place = 0, place2 = 0;
10596
10597 /* If this NOTE references a pseudo register, ensure it references
10598 the latest copy of that register. */
10599 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
10600 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
10601 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
10602
10603 next_note = XEXP (note, 1);
10604 switch (REG_NOTE_KIND (note))
10605 {
11324 }
11325}
11326
11327/* Return 1 if X is the target of a bit-field assignment in BODY, the
11328 pattern of an insn. X must be a REG. */
11329
11330static int
11331reg_bitfield_target_p (x, body)
11332 rtx x;
11333 rtx body;
11334{
11335 int i;
11336
11337 if (GET_CODE (body) == SET)
11338 {
11339 rtx dest = SET_DEST (body);
11340 rtx target;
11341 int regno, tregno, endregno, endtregno;
11342
11343 if (GET_CODE (dest) == ZERO_EXTRACT)
11344 target = XEXP (dest, 0);
11345 else if (GET_CODE (dest) == STRICT_LOW_PART)
11346 target = SUBREG_REG (XEXP (dest, 0));
11347 else
11348 return 0;
11349
11350 if (GET_CODE (target) == SUBREG)
11351 target = SUBREG_REG (target);
11352
11353 if (GET_CODE (target) != REG)
11354 return 0;
11355
11356 tregno = REGNO (target), regno = REGNO (x);
11357 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11358 return target == x;
11359
11360 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
11361 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11362
11363 return endregno > tregno && regno < endtregno;
11364 }
11365
11366 else if (GET_CODE (body) == PARALLEL)
11367 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
11368 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
11369 return 1;
11370
11371 return 0;
11372}
11373
11374/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11375 as appropriate. I3 and I2 are the insns resulting from the combination
11376 insns including FROM (I2 may be zero).
11377
11378 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11379 not need REG_DEAD notes because they are being substituted for. This
11380 saves searching in the most common cases.
11381
11382 Each note in the list is either ignored or placed on some insns, depending
11383 on the type of note. */
11384
11385static void
11386distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
11387 rtx notes;
11388 rtx from_insn;
11389 rtx i3, i2;
11390 rtx elim_i2, elim_i1;
11391{
11392 rtx note, next_note;
11393 rtx tem;
11394
11395 for (note = notes; note; note = next_note)
11396 {
11397 rtx place = 0, place2 = 0;
11398
11399 /* If this NOTE references a pseudo register, ensure it references
11400 the latest copy of that register. */
11401 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
11402 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
11403 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
11404
11405 next_note = XEXP (note, 1);
11406 switch (REG_NOTE_KIND (note))
11407 {
11408 case REG_BR_PROB:
11409 case REG_EXEC_COUNT:
11410 /* Doesn't matter much where we put this, as long as it's somewhere.
11411 It is preferable to keep these notes on branches, which is most
11412 likely to be i3. */
11413 place = i3;
11414 break;
11415
10606 case REG_UNUSED:
10607 /* Any clobbers for i3 may still exist, and so we must process
10608 REG_UNUSED notes from that insn.
10609
10610 Any clobbers from i2 or i1 can only exist if they were added by
10611 recog_for_combine. In that case, recog_for_combine created the
10612 necessary REG_UNUSED notes. Trying to keep any original
10613 REG_UNUSED notes from these insns can cause incorrect output
10614 if it is for the same register as the original i3 dest.
10615 In that case, we will notice that the register is set in i3,
10616 and then add a REG_UNUSED note for the destination of i3, which
10617 is wrong. However, it is possible to have REG_UNUSED notes from
10618 i2 or i1 for register which were both used and clobbered, so
10619 we keep notes from i2 or i1 if they will turn into REG_DEAD
10620 notes. */
10621
10622 /* If this register is set or clobbered in I3, put the note there
10623 unless there is one already. */
10624 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
10625 {
10626 if (from_insn != i3)
10627 break;
10628
10629 if (! (GET_CODE (XEXP (note, 0)) == REG
10630 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
10631 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
10632 place = i3;
10633 }
10634 /* Otherwise, if this register is used by I3, then this register
10635 now dies here, so we must put a REG_DEAD note here unless there
10636 is one already. */
10637 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
10638 && ! (GET_CODE (XEXP (note, 0)) == REG
10639 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
10640 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
10641 {
10642 PUT_REG_NOTE_KIND (note, REG_DEAD);
10643 place = i3;
10644 }
10645 break;
10646
10647 case REG_EQUAL:
10648 case REG_EQUIV:
10649 case REG_NONNEG:
11416 case REG_UNUSED:
11417 /* Any clobbers for i3 may still exist, and so we must process
11418 REG_UNUSED notes from that insn.
11419
11420 Any clobbers from i2 or i1 can only exist if they were added by
11421 recog_for_combine. In that case, recog_for_combine created the
11422 necessary REG_UNUSED notes. Trying to keep any original
11423 REG_UNUSED notes from these insns can cause incorrect output
11424 if it is for the same register as the original i3 dest.
11425 In that case, we will notice that the register is set in i3,
11426 and then add a REG_UNUSED note for the destination of i3, which
11427 is wrong. However, it is possible to have REG_UNUSED notes from
11428 i2 or i1 for register which were both used and clobbered, so
11429 we keep notes from i2 or i1 if they will turn into REG_DEAD
11430 notes. */
11431
11432 /* If this register is set or clobbered in I3, put the note there
11433 unless there is one already. */
11434 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
11435 {
11436 if (from_insn != i3)
11437 break;
11438
11439 if (! (GET_CODE (XEXP (note, 0)) == REG
11440 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
11441 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
11442 place = i3;
11443 }
11444 /* Otherwise, if this register is used by I3, then this register
11445 now dies here, so we must put a REG_DEAD note here unless there
11446 is one already. */
11447 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
11448 && ! (GET_CODE (XEXP (note, 0)) == REG
11449 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
11450 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
11451 {
11452 PUT_REG_NOTE_KIND (note, REG_DEAD);
11453 place = i3;
11454 }
11455 break;
11456
11457 case REG_EQUAL:
11458 case REG_EQUIV:
11459 case REG_NONNEG:
11460 case REG_NOALIAS:
10650 /* These notes say something about results of an insn. We can
10651 only support them if they used to be on I3 in which case they
10652 remain on I3. Otherwise they are ignored.
10653
10654 If the note refers to an expression that is not a constant, we
10655 must also ignore the note since we cannot tell whether the
10656 equivalence is still true. It might be possible to do
10657 slightly better than this (we only have a problem if I2DEST
10658 or I1DEST is present in the expression), but it doesn't
10659 seem worth the trouble. */
10660
10661 if (from_insn == i3
10662 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
10663 place = i3;
10664 break;
10665
10666 case REG_INC:
10667 case REG_NO_CONFLICT:
10668 case REG_LABEL:
10669 /* These notes say something about how a register is used. They must
10670 be present on any use of the register in I2 or I3. */
10671 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
10672 place = i3;
10673
10674 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
10675 {
10676 if (place)
10677 place2 = i2;
10678 else
10679 place = i2;
10680 }
10681 break;
10682
10683 case REG_WAS_0:
10684 /* It is too much trouble to try to see if this note is still
10685 correct in all situations. It is better to simply delete it. */
10686 break;
10687
10688 case REG_RETVAL:
10689 /* If the insn previously containing this note still exists,
10690 put it back where it was. Otherwise move it to the previous
10691 insn. Adjust the corresponding REG_LIBCALL note. */
10692 if (GET_CODE (from_insn) != NOTE)
10693 place = from_insn;
10694 else
10695 {
10696 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
10697 place = prev_real_insn (from_insn);
10698 if (tem && place)
10699 XEXP (tem, 0) = place;
10700 }
10701 break;
10702
10703 case REG_LIBCALL:
10704 /* This is handled similarly to REG_RETVAL. */
10705 if (GET_CODE (from_insn) != NOTE)
10706 place = from_insn;
10707 else
10708 {
10709 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
10710 place = next_real_insn (from_insn);
10711 if (tem && place)
10712 XEXP (tem, 0) = place;
10713 }
10714 break;
10715
10716 case REG_DEAD:
10717 /* If the register is used as an input in I3, it dies there.
10718 Similarly for I2, if it is non-zero and adjacent to I3.
10719
10720 If the register is not used as an input in either I3 or I2
10721 and it is not one of the registers we were supposed to eliminate,
10722 there are two possibilities. We might have a non-adjacent I2
10723 or we might have somehow eliminated an additional register
10724 from a computation. For example, we might have had A & B where
10725 we discover that B will always be zero. In this case we will
10726 eliminate the reference to A.
10727
10728 In both cases, we must search to see if we can find a previous
10729 use of A and put the death note there. */
10730
10731 if (from_insn
10732 && GET_CODE (from_insn) == CALL_INSN
10733 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
10734 place = from_insn;
10735 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
10736 place = i3;
10737 else if (i2 != 0 && next_nonnote_insn (i2) == i3
10738 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10739 place = i2;
10740
10741 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
10742 break;
10743
10744 /* If the register is used in both I2 and I3 and it dies in I3,
10745 we might have added another reference to it. If reg_n_refs
10746 was 2, bump it to 3. This has to be correct since the
10747 register must have been set somewhere. The reason this is
10748 done is because local-alloc.c treats 2 references as a
10749 special case. */
10750
10751 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
11461 /* These notes say something about results of an insn. We can
11462 only support them if they used to be on I3 in which case they
11463 remain on I3. Otherwise they are ignored.
11464
11465 If the note refers to an expression that is not a constant, we
11466 must also ignore the note since we cannot tell whether the
11467 equivalence is still true. It might be possible to do
11468 slightly better than this (we only have a problem if I2DEST
11469 or I1DEST is present in the expression), but it doesn't
11470 seem worth the trouble. */
11471
11472 if (from_insn == i3
11473 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
11474 place = i3;
11475 break;
11476
11477 case REG_INC:
11478 case REG_NO_CONFLICT:
11479 case REG_LABEL:
11480 /* These notes say something about how a register is used. They must
11481 be present on any use of the register in I2 or I3. */
11482 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
11483 place = i3;
11484
11485 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
11486 {
11487 if (place)
11488 place2 = i2;
11489 else
11490 place = i2;
11491 }
11492 break;
11493
11494 case REG_WAS_0:
11495 /* It is too much trouble to try to see if this note is still
11496 correct in all situations. It is better to simply delete it. */
11497 break;
11498
11499 case REG_RETVAL:
11500 /* If the insn previously containing this note still exists,
11501 put it back where it was. Otherwise move it to the previous
11502 insn. Adjust the corresponding REG_LIBCALL note. */
11503 if (GET_CODE (from_insn) != NOTE)
11504 place = from_insn;
11505 else
11506 {
11507 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
11508 place = prev_real_insn (from_insn);
11509 if (tem && place)
11510 XEXP (tem, 0) = place;
11511 }
11512 break;
11513
11514 case REG_LIBCALL:
11515 /* This is handled similarly to REG_RETVAL. */
11516 if (GET_CODE (from_insn) != NOTE)
11517 place = from_insn;
11518 else
11519 {
11520 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
11521 place = next_real_insn (from_insn);
11522 if (tem && place)
11523 XEXP (tem, 0) = place;
11524 }
11525 break;
11526
11527 case REG_DEAD:
11528 /* If the register is used as an input in I3, it dies there.
11529 Similarly for I2, if it is non-zero and adjacent to I3.
11530
11531 If the register is not used as an input in either I3 or I2
11532 and it is not one of the registers we were supposed to eliminate,
11533 there are two possibilities. We might have a non-adjacent I2
11534 or we might have somehow eliminated an additional register
11535 from a computation. For example, we might have had A & B where
11536 we discover that B will always be zero. In this case we will
11537 eliminate the reference to A.
11538
11539 In both cases, we must search to see if we can find a previous
11540 use of A and put the death note there. */
11541
11542 if (from_insn
11543 && GET_CODE (from_insn) == CALL_INSN
11544 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
11545 place = from_insn;
11546 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
11547 place = i3;
11548 else if (i2 != 0 && next_nonnote_insn (i2) == i3
11549 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11550 place = i2;
11551
11552 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
11553 break;
11554
11555 /* If the register is used in both I2 and I3 and it dies in I3,
11556 we might have added another reference to it. If reg_n_refs
11557 was 2, bump it to 3. This has to be correct since the
11558 register must have been set somewhere. The reason this is
11559 done is because local-alloc.c treats 2 references as a
11560 special case. */
11561
11562 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
10752 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
11563 && REG_N_REFS (REGNO (XEXP (note, 0)))== 2
10753 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11564 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10754 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
11565 REG_N_REFS (REGNO (XEXP (note, 0))) = 3;
10755
10756 if (place == 0)
10757 {
10758 for (tem = prev_nonnote_insn (i3);
10759 place == 0 && tem
10760 && (GET_CODE (tem) == INSN || GET_CODE (tem) == CALL_INSN);
10761 tem = prev_nonnote_insn (tem))
10762 {
10763 /* If the register is being set at TEM, see if that is all
10764 TEM is doing. If so, delete TEM. Otherwise, make this
10765 into a REG_UNUSED note instead. */
10766 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
10767 {
10768 rtx set = single_set (tem);
11566
11567 if (place == 0)
11568 {
11569 for (tem = prev_nonnote_insn (i3);
11570 place == 0 && tem
11571 && (GET_CODE (tem) == INSN || GET_CODE (tem) == CALL_INSN);
11572 tem = prev_nonnote_insn (tem))
11573 {
11574 /* If the register is being set at TEM, see if that is all
11575 TEM is doing. If so, delete TEM. Otherwise, make this
11576 into a REG_UNUSED note instead. */
11577 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
11578 {
11579 rtx set = single_set (tem);
11580 rtx inner_dest = 0;
10769
11581
11582 if (set != 0)
11583 for (inner_dest = SET_DEST (set);
11584 GET_CODE (inner_dest) == STRICT_LOW_PART
11585 || GET_CODE (inner_dest) == SUBREG
11586 || GET_CODE (inner_dest) == ZERO_EXTRACT;
11587 inner_dest = XEXP (inner_dest, 0))
11588 ;
11589
10770 /* Verify that it was the set, and not a clobber that
10771 modified the register. */
10772
10773 if (set != 0 && ! side_effects_p (SET_SRC (set))
11590 /* Verify that it was the set, and not a clobber that
11591 modified the register. */
11592
11593 if (set != 0 && ! side_effects_p (SET_SRC (set))
10774 && (rtx_equal_p (XEXP (note, 0), SET_DEST (set))
10775 || (GET_CODE (SET_DEST (set)) == SUBREG
10776 && rtx_equal_p (XEXP (note, 0),
10777 XEXP (SET_DEST (set), 0)))))
11594 && rtx_equal_p (XEXP (note, 0), inner_dest))
10778 {
10779 /* Move the notes and links of TEM elsewhere.
10780 This might delete other dead insns recursively.
10781 First set the pattern to something that won't use
10782 any register. */
10783
10784 PATTERN (tem) = pc_rtx;
10785
10786 distribute_notes (REG_NOTES (tem), tem, tem,
10787 NULL_RTX, NULL_RTX, NULL_RTX);
10788 distribute_links (LOG_LINKS (tem));
10789
10790 PUT_CODE (tem, NOTE);
10791 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
10792 NOTE_SOURCE_FILE (tem) = 0;
10793 }
11595 {
11596 /* Move the notes and links of TEM elsewhere.
11597 This might delete other dead insns recursively.
11598 First set the pattern to something that won't use
11599 any register. */
11600
11601 PATTERN (tem) = pc_rtx;
11602
11603 distribute_notes (REG_NOTES (tem), tem, tem,
11604 NULL_RTX, NULL_RTX, NULL_RTX);
11605 distribute_links (LOG_LINKS (tem));
11606
11607 PUT_CODE (tem, NOTE);
11608 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
11609 NOTE_SOURCE_FILE (tem) = 0;
11610 }
11611 /* If the register is both set and used here, put the
11612 REG_DEAD note here, but place a REG_UNUSED note
11613 here too unless there already is one. */
11614 else if (reg_referenced_p (XEXP (note, 0),
11615 PATTERN (tem)))
11616 {
11617 place = tem;
11618
11619 if (! find_regno_note (tem, REG_UNUSED,
11620 REGNO (XEXP (note, 0))))
11621 REG_NOTES (tem)
11622 = gen_rtx_EXPR_LIST (REG_UNUSED,
11623 XEXP (note, 0),
11624 REG_NOTES (tem));
11625 }
10794 else
10795 {
10796 PUT_REG_NOTE_KIND (note, REG_UNUSED);
10797
10798 /* If there isn't already a REG_UNUSED note, put one
10799 here. */
10800 if (! find_regno_note (tem, REG_UNUSED,
10801 REGNO (XEXP (note, 0))))
10802 place = tem;
10803 break;
10804 }
10805 }
10806 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
10807 || (GET_CODE (tem) == CALL_INSN
10808 && find_reg_fusage (tem, USE, XEXP (note, 0))))
10809 {
10810 place = tem;
10811
10812 /* If we are doing a 3->2 combination, and we have a
10813 register which formerly died in i3 and was not used
10814 by i2, which now no longer dies in i3 and is used in
10815 i2 but does not die in i2, and place is between i2
10816 and i3, then we may need to move a link from place to
10817 i2. */
10818 if (i2 && INSN_UID (place) <= max_uid_cuid
10819 && INSN_CUID (place) > INSN_CUID (i2)
10820 && from_insn && INSN_CUID (from_insn) > INSN_CUID (i2)
10821 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10822 {
10823 rtx links = LOG_LINKS (place);
10824 LOG_LINKS (place) = 0;
10825 distribute_links (links);
10826 }
10827 break;
10828 }
10829 }
10830
10831 /* If we haven't found an insn for the death note and it
10832 is still a REG_DEAD note, but we have hit a CODE_LABEL,
10833 insert a USE insn for the register at that label and
10834 put the death node there. This prevents problems with
10835 call-state tracking in caller-save.c. */
10836 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0)
10837 {
10838 place
11626 else
11627 {
11628 PUT_REG_NOTE_KIND (note, REG_UNUSED);
11629
11630 /* If there isn't already a REG_UNUSED note, put one
11631 here. */
11632 if (! find_regno_note (tem, REG_UNUSED,
11633 REGNO (XEXP (note, 0))))
11634 place = tem;
11635 break;
11636 }
11637 }
11638 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
11639 || (GET_CODE (tem) == CALL_INSN
11640 && find_reg_fusage (tem, USE, XEXP (note, 0))))
11641 {
11642 place = tem;
11643
11644 /* If we are doing a 3->2 combination, and we have a
11645 register which formerly died in i3 and was not used
11646 by i2, which now no longer dies in i3 and is used in
11647 i2 but does not die in i2, and place is between i2
11648 and i3, then we may need to move a link from place to
11649 i2. */
11650 if (i2 && INSN_UID (place) <= max_uid_cuid
11651 && INSN_CUID (place) > INSN_CUID (i2)
11652 && from_insn && INSN_CUID (from_insn) > INSN_CUID (i2)
11653 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11654 {
11655 rtx links = LOG_LINKS (place);
11656 LOG_LINKS (place) = 0;
11657 distribute_links (links);
11658 }
11659 break;
11660 }
11661 }
11662
11663 /* If we haven't found an insn for the death note and it
11664 is still a REG_DEAD note, but we have hit a CODE_LABEL,
11665 insert a USE insn for the register at that label and
11666 put the death node there. This prevents problems with
11667 call-state tracking in caller-save.c. */
11668 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0)
11669 {
11670 place
10839 = emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (note, 0)),
11671 = emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (note, 0)),
10840 tem);
10841
10842 /* If this insn was emitted between blocks, then update
10843 basic_block_head of the current block to include it. */
10844 if (basic_block_end[this_basic_block - 1] == tem)
10845 basic_block_head[this_basic_block] = place;
10846 }
10847 }
10848
10849 /* If the register is set or already dead at PLACE, we needn't do
11672 tem);
11673
11674 /* If this insn was emitted between blocks, then update
11675 basic_block_head of the current block to include it. */
11676 if (basic_block_end[this_basic_block - 1] == tem)
11677 basic_block_head[this_basic_block] = place;
11678 }
11679 }
11680
11681 /* If the register is set or already dead at PLACE, we needn't do
10850 anything with this note if it is still a REG_DEAD note.
11682 anything with this note if it is still a REG_DEAD note.
11683 We can here if it is set at all, not if is it totally replace,
11684 which is what `dead_or_set_p' checks, so also check for it being
11685 set partially. */
10851
11686
10852 Note that we cannot use just `dead_or_set_p' here since we can
10853 convert an assignment to a register into a bit-field assignment.
10854 Therefore, we must also omit the note if the register is the
10855 target of a bitfield assignment. */
10856
11687
10857 if (place && REG_NOTE_KIND (note) == REG_DEAD)
10858 {
10859 int regno = REGNO (XEXP (note, 0));
10860
10861 if (dead_or_set_p (place, XEXP (note, 0))
10862 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
10863 {
10864 /* Unless the register previously died in PLACE, clear
10865 reg_last_death. [I no longer understand why this is
10866 being done.] */
10867 if (reg_last_death[regno] != place)
10868 reg_last_death[regno] = 0;
10869 place = 0;
10870 }
10871 else
10872 reg_last_death[regno] = place;
10873
10874 /* If this is a death note for a hard reg that is occupying
10875 multiple registers, ensure that we are still using all
10876 parts of the object. If we find a piece of the object
10877 that is unused, we must add a USE for that piece before
10878 PLACE and put the appropriate REG_DEAD note on it.
10879
10880 An alternative would be to put a REG_UNUSED for the pieces
10881 on the insn that set the register, but that can't be done if
10882 it is not in the same block. It is simpler, though less
10883 efficient, to add the USE insns. */
10884
10885 if (place && regno < FIRST_PSEUDO_REGISTER
10886 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
10887 {
10888 int endregno
10889 = regno + HARD_REGNO_NREGS (regno,
10890 GET_MODE (XEXP (note, 0)));
10891 int all_used = 1;
10892 int i;
10893
10894 for (i = regno; i < endregno; i++)
10895 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
10896 && ! find_regno_fusage (place, USE, i))
10897 {
11688 if (place && REG_NOTE_KIND (note) == REG_DEAD)
11689 {
11690 int regno = REGNO (XEXP (note, 0));
11691
11692 if (dead_or_set_p (place, XEXP (note, 0))
11693 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
11694 {
11695 /* Unless the register previously died in PLACE, clear
11696 reg_last_death. [I no longer understand why this is
11697 being done.] */
11698 if (reg_last_death[regno] != place)
11699 reg_last_death[regno] = 0;
11700 place = 0;
11701 }
11702 else
11703 reg_last_death[regno] = place;
11704
11705 /* If this is a death note for a hard reg that is occupying
11706 multiple registers, ensure that we are still using all
11707 parts of the object. If we find a piece of the object
11708 that is unused, we must add a USE for that piece before
11709 PLACE and put the appropriate REG_DEAD note on it.
11710
11711 An alternative would be to put a REG_UNUSED for the pieces
11712 on the insn that set the register, but that can't be done if
11713 it is not in the same block. It is simpler, though less
11714 efficient, to add the USE insns. */
11715
11716 if (place && regno < FIRST_PSEUDO_REGISTER
11717 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
11718 {
11719 int endregno
11720 = regno + HARD_REGNO_NREGS (regno,
11721 GET_MODE (XEXP (note, 0)));
11722 int all_used = 1;
11723 int i;
11724
11725 for (i = regno; i < endregno; i++)
11726 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
11727 && ! find_regno_fusage (place, USE, i))
11728 {
10898 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
11729 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
10899 rtx p;
10900
10901 /* See if we already placed a USE note for this
10902 register in front of PLACE. */
10903 for (p = place;
10904 GET_CODE (PREV_INSN (p)) == INSN
10905 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
10906 p = PREV_INSN (p))
10907 if (rtx_equal_p (piece,
10908 XEXP (PATTERN (PREV_INSN (p)), 0)))
10909 {
10910 p = 0;
10911 break;
10912 }
10913
10914 if (p)
10915 {
10916 rtx use_insn
11730 rtx p;
11731
11732 /* See if we already placed a USE note for this
11733 register in front of PLACE. */
11734 for (p = place;
11735 GET_CODE (PREV_INSN (p)) == INSN
11736 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
11737 p = PREV_INSN (p))
11738 if (rtx_equal_p (piece,
11739 XEXP (PATTERN (PREV_INSN (p)), 0)))
11740 {
11741 p = 0;
11742 break;
11743 }
11744
11745 if (p)
11746 {
11747 rtx use_insn
10917 = emit_insn_before (gen_rtx (USE, VOIDmode,
10918 piece),
11748 = emit_insn_before (gen_rtx_USE (VOIDmode,
11749 piece),
10919 p);
10920 REG_NOTES (use_insn)
11750 p);
11751 REG_NOTES (use_insn)
10921 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
10922 REG_NOTES (use_insn));
11752 = gen_rtx_EXPR_LIST (REG_DEAD, piece,
11753 REG_NOTES (use_insn));
10923 }
10924
10925 all_used = 0;
10926 }
10927
10928 /* Check for the case where the register dying partially
10929 overlaps the register set by this insn. */
10930 if (all_used)
10931 for (i = regno; i < endregno; i++)
10932 if (dead_or_set_regno_p (place, i))
10933 {
10934 all_used = 0;
10935 break;
10936 }
10937
10938 if (! all_used)
10939 {
10940 /* Put only REG_DEAD notes for pieces that are
10941 still used and that are not already dead or set. */
10942
10943 for (i = regno; i < endregno; i++)
10944 {
11754 }
11755
11756 all_used = 0;
11757 }
11758
11759 /* Check for the case where the register dying partially
11760 overlaps the register set by this insn. */
11761 if (all_used)
11762 for (i = regno; i < endregno; i++)
11763 if (dead_or_set_regno_p (place, i))
11764 {
11765 all_used = 0;
11766 break;
11767 }
11768
11769 if (! all_used)
11770 {
11771 /* Put only REG_DEAD notes for pieces that are
11772 still used and that are not already dead or set. */
11773
11774 for (i = regno; i < endregno; i++)
11775 {
10945 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
11776 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
10946
10947 if ((reg_referenced_p (piece, PATTERN (place))
10948 || (GET_CODE (place) == CALL_INSN
10949 && find_reg_fusage (place, USE, piece)))
10950 && ! dead_or_set_p (place, piece)
10951 && ! reg_bitfield_target_p (piece,
10952 PATTERN (place)))
11777
11778 if ((reg_referenced_p (piece, PATTERN (place))
11779 || (GET_CODE (place) == CALL_INSN
11780 && find_reg_fusage (place, USE, piece)))
11781 && ! dead_or_set_p (place, piece)
11782 && ! reg_bitfield_target_p (piece,
11783 PATTERN (place)))
10953 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
10954 piece,
10955 REG_NOTES (place));
11784 REG_NOTES (place)
11785 = gen_rtx_EXPR_LIST (REG_DEAD,
11786 piece, REG_NOTES (place));
10956 }
10957
10958 place = 0;
10959 }
10960 }
10961 }
10962 break;
10963
10964 default:
10965 /* Any other notes should not be present at this point in the
10966 compilation. */
10967 abort ();
10968 }
10969
10970 if (place)
10971 {
10972 XEXP (note, 1) = REG_NOTES (place);
10973 REG_NOTES (place) = note;
10974 }
10975 else if ((REG_NOTE_KIND (note) == REG_DEAD
10976 || REG_NOTE_KIND (note) == REG_UNUSED)
10977 && GET_CODE (XEXP (note, 0)) == REG)
11787 }
11788
11789 place = 0;
11790 }
11791 }
11792 }
11793 break;
11794
11795 default:
11796 /* Any other notes should not be present at this point in the
11797 compilation. */
11798 abort ();
11799 }
11800
11801 if (place)
11802 {
11803 XEXP (note, 1) = REG_NOTES (place);
11804 REG_NOTES (place) = note;
11805 }
11806 else if ((REG_NOTE_KIND (note) == REG_DEAD
11807 || REG_NOTE_KIND (note) == REG_UNUSED)
11808 && GET_CODE (XEXP (note, 0)) == REG)
10978 reg_n_deaths[REGNO (XEXP (note, 0))]--;
11809 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
10979
10980 if (place2)
10981 {
10982 if ((REG_NOTE_KIND (note) == REG_DEAD
10983 || REG_NOTE_KIND (note) == REG_UNUSED)
10984 && GET_CODE (XEXP (note, 0)) == REG)
11810
11811 if (place2)
11812 {
11813 if ((REG_NOTE_KIND (note) == REG_DEAD
11814 || REG_NOTE_KIND (note) == REG_UNUSED)
11815 && GET_CODE (XEXP (note, 0)) == REG)
10985 reg_n_deaths[REGNO (XEXP (note, 0))]++;
11816 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
10986
11817
10987 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
10988 XEXP (note, 0), REG_NOTES (place2));
11818 REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
11819 REG_NOTE_KIND (note),
11820 XEXP (note, 0),
11821 REG_NOTES (place2));
10989 }
10990 }
10991}
10992
10993/* Similarly to above, distribute the LOG_LINKS that used to be present on
10994 I3, I2, and I1 to new locations. This is also called in one case to
10995 add a link pointing at I3 when I3's destination is changed. */
10996
10997static void
10998distribute_links (links)
10999 rtx links;
11000{
11001 rtx link, next_link;
11002
11003 for (link = links; link; link = next_link)
11004 {
11005 rtx place = 0;
11006 rtx insn;
11007 rtx set, reg;
11008
11009 next_link = XEXP (link, 1);
11010
11011 /* If the insn that this link points to is a NOTE or isn't a single
11012 set, ignore it. In the latter case, it isn't clear what we
11013 can do other than ignore the link, since we can't tell which
11014 register it was for. Such links wouldn't be used by combine
11015 anyway.
11016
11017 It is not possible for the destination of the target of the link to
11018 have been changed by combine. The only potential of this is if we
11019 replace I3, I2, and I1 by I3 and I2. But in that case the
11020 destination of I2 also remains unchanged. */
11021
11022 if (GET_CODE (XEXP (link, 0)) == NOTE
11023 || (set = single_set (XEXP (link, 0))) == 0)
11024 continue;
11025
11026 reg = SET_DEST (set);
11027 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
11028 || GET_CODE (reg) == SIGN_EXTRACT
11029 || GET_CODE (reg) == STRICT_LOW_PART)
11030 reg = XEXP (reg, 0);
11031
11032 /* A LOG_LINK is defined as being placed on the first insn that uses
11033 a register and points to the insn that sets the register. Start
11034 searching at the next insn after the target of the link and stop
11035 when we reach a set of the register or the end of the basic block.
11036
11037 Note that this correctly handles the link that used to point from
11038 I3 to I2. Also note that not much searching is typically done here
11039 since most links don't point very far away. */
11040
11041 for (insn = NEXT_INSN (XEXP (link, 0));
11042 (insn && (this_basic_block == n_basic_blocks - 1
11043 || basic_block_head[this_basic_block + 1] != insn));
11044 insn = NEXT_INSN (insn))
11045 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
11046 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
11047 {
11048 if (reg_referenced_p (reg, PATTERN (insn)))
11049 place = insn;
11050 break;
11051 }
11052 else if (GET_CODE (insn) == CALL_INSN
11053 && find_reg_fusage (insn, USE, reg))
11054 {
11055 place = insn;
11056 break;
11057 }
11058
11059 /* If we found a place to put the link, place it there unless there
11060 is already a link to the same insn as LINK at that point. */
11061
11062 if (place)
11063 {
11064 rtx link2;
11065
11066 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
11067 if (XEXP (link2, 0) == XEXP (link, 0))
11068 break;
11069
11070 if (link2 == 0)
11071 {
11072 XEXP (link, 1) = LOG_LINKS (place);
11073 LOG_LINKS (place) = link;
11074
11075 /* Set added_links_insn to the earliest insn we added a
11076 link to. */
11077 if (added_links_insn == 0
11078 || INSN_CUID (added_links_insn) > INSN_CUID (place))
11079 added_links_insn = place;
11080 }
11081 }
11082 }
11083}
11084
11822 }
11823 }
11824}
11825
11826/* Similarly to above, distribute the LOG_LINKS that used to be present on
11827 I3, I2, and I1 to new locations. This is also called in one case to
11828 add a link pointing at I3 when I3's destination is changed. */
11829
11830static void
11831distribute_links (links)
11832 rtx links;
11833{
11834 rtx link, next_link;
11835
11836 for (link = links; link; link = next_link)
11837 {
11838 rtx place = 0;
11839 rtx insn;
11840 rtx set, reg;
11841
11842 next_link = XEXP (link, 1);
11843
11844 /* If the insn that this link points to is a NOTE or isn't a single
11845 set, ignore it. In the latter case, it isn't clear what we
11846 can do other than ignore the link, since we can't tell which
11847 register it was for. Such links wouldn't be used by combine
11848 anyway.
11849
11850 It is not possible for the destination of the target of the link to
11851 have been changed by combine. The only potential of this is if we
11852 replace I3, I2, and I1 by I3 and I2. But in that case the
11853 destination of I2 also remains unchanged. */
11854
11855 if (GET_CODE (XEXP (link, 0)) == NOTE
11856 || (set = single_set (XEXP (link, 0))) == 0)
11857 continue;
11858
11859 reg = SET_DEST (set);
11860 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
11861 || GET_CODE (reg) == SIGN_EXTRACT
11862 || GET_CODE (reg) == STRICT_LOW_PART)
11863 reg = XEXP (reg, 0);
11864
11865 /* A LOG_LINK is defined as being placed on the first insn that uses
11866 a register and points to the insn that sets the register. Start
11867 searching at the next insn after the target of the link and stop
11868 when we reach a set of the register or the end of the basic block.
11869
11870 Note that this correctly handles the link that used to point from
11871 I3 to I2. Also note that not much searching is typically done here
11872 since most links don't point very far away. */
11873
11874 for (insn = NEXT_INSN (XEXP (link, 0));
11875 (insn && (this_basic_block == n_basic_blocks - 1
11876 || basic_block_head[this_basic_block + 1] != insn));
11877 insn = NEXT_INSN (insn))
11878 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
11879 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
11880 {
11881 if (reg_referenced_p (reg, PATTERN (insn)))
11882 place = insn;
11883 break;
11884 }
11885 else if (GET_CODE (insn) == CALL_INSN
11886 && find_reg_fusage (insn, USE, reg))
11887 {
11888 place = insn;
11889 break;
11890 }
11891
11892 /* If we found a place to put the link, place it there unless there
11893 is already a link to the same insn as LINK at that point. */
11894
11895 if (place)
11896 {
11897 rtx link2;
11898
11899 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
11900 if (XEXP (link2, 0) == XEXP (link, 0))
11901 break;
11902
11903 if (link2 == 0)
11904 {
11905 XEXP (link, 1) = LOG_LINKS (place);
11906 LOG_LINKS (place) = link;
11907
11908 /* Set added_links_insn to the earliest insn we added a
11909 link to. */
11910 if (added_links_insn == 0
11911 || INSN_CUID (added_links_insn) > INSN_CUID (place))
11912 added_links_insn = place;
11913 }
11914 }
11915 }
11916}
11917
11918/* Compute INSN_CUID for INSN, which is an insn made by combine. */
11919
11920static int
11921insn_cuid (insn)
11922 rtx insn;
11923{
11924 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
11925 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
11926 insn = NEXT_INSN (insn);
11927
11928 if (INSN_UID (insn) > max_uid_cuid)
11929 abort ();
11930
11931 return INSN_CUID (insn);
11932}
11933
11085void
11086dump_combine_stats (file)
11087 FILE *file;
11088{
11089 fprintf
11090 (file,
11091 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
11092 combine_attempts, combine_merges, combine_extras, combine_successes);
11093}
11094
11095void
11096dump_combine_total_stats (file)
11097 FILE *file;
11098{
11099 fprintf
11100 (file,
11101 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
11102 total_attempts, total_merges, total_extras, total_successes);
11103}
11934void
11935dump_combine_stats (file)
11936 FILE *file;
11937{
11938 fprintf
11939 (file,
11940 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
11941 combine_attempts, combine_merges, combine_extras, combine_successes);
11942}
11943
11944void
11945dump_combine_total_stats (file)
11946 FILE *file;
11947{
11948 fprintf
11949 (file,
11950 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
11951 total_attempts, total_merges, total_extras, total_successes);
11952}